diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml
index a0e1423..363cd24 100644
--- a/.github/workflows/semgrep.yml
+++ b/.github/workflows/semgrep.yml
@@ -8,7 +8,6 @@ on:
push:
branches:
- main
- - master
paths:
- .github/workflows/semgrep.yml
schedule:
diff --git a/.github/workflows/sonarcloud.yaml b/.github/workflows/sonarcloud.yaml
index 8fa60ce..3f9dd4f 100644
--- a/.github/workflows/sonarcloud.yaml
+++ b/.github/workflows/sonarcloud.yaml
@@ -7,7 +7,7 @@ on:
# a pull request.
push:
branches:
- - master
+ - main
pull_request:
types: [opened, synchronize, reopened]
name: SonarCloud analysis
@@ -19,6 +19,10 @@ jobs:
with:
# Disabling shallow clone is recommended for improving relevancy of reporting
fetch-depth: 0
+ - uses: jdx/mise-action@0c39a522dfbced3ed83924152c45804a71ce216f # v2.0.4
+ with:
+ experimental: true
+ - run: mise run dev:lint --no-fail
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@6bbd64e0cb2194e04addb429d669a9ee873eeeef
env:
diff --git a/.gitignore b/.gitignore
index 68bc17f..0272eaf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -158,3 +158,60 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# AWS User-specific
+.idea/**/aws.xml
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# SonarLint plugin
+.idea/sonarlint/
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+.DS_Store
diff --git a/.golangci.yaml b/.golangci.yaml
new file mode 100644
index 0000000..79bcaef
--- /dev/null
+++ b/.golangci.yaml
@@ -0,0 +1,58 @@
+run:
+ tests: true
+ build-tags:
+ - integration
+output:
+ formats:
+ - format: checkstyle
+ path: build/report.xml
+ - format: colored-line-number
+linters:
+ enable:
+ - errcheck
+ - gosimple
+ - govet
+ - ineffassign
+ - staticcheck
+ - unused
+ - goimports
+ - goheader
+ - gosec
+ - forbidigo
+ - goconst
+ - godot
+issues:
+ exclude-rules:
+ - linters:
+ - goheader
+ path: cache.go # This file already has a header with another license
+linters-settings:
+ goimports:
+ local-prefixes: github.com/madsrc/sophrosyne
+ gofmt:
+ simplify: true
+ rewrite-rules:
+ - pattern: 'interface{}'
+ replacement: 'any'
+ - pattern: 'a[b:len(a)]'
+ replacement: 'a[b:]'
+ goheader:
+ values:
+ const:
+ AUTHOR: 'Mads R. Havmand'
+ template: |-
+ Sophrosyne
+ Copyright (C) {{ YEAR }} {{ AUTHOR }}
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
diff --git a/.idea/dataSources.xml b/.idea/dataSources.xml
new file mode 100644
index 0000000..54c2225
--- /dev/null
+++ b/.idea/dataSources.xml
@@ -0,0 +1,12 @@
+
+
+
+
+ postgresql
+ true
+ org.postgresql.Driver
+ jdbc:postgresql://localhost:5432/postgres
+ $ProjectFileDir$
+
+
+
diff --git a/.idea/golinter.xml b/.idea/golinter.xml
new file mode 100644
index 0000000..2a9eee5
--- /dev/null
+++ b/.idea/golinter.xml
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.idea/sonarlint.xml b/.idea/sonarlint.xml
new file mode 100644
index 0000000..48327f5
--- /dev/null
+++ b/.idea/sonarlint.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/.idea/sophrosyne.iml b/.idea/sophrosyne.iml
new file mode 100644
index 0000000..5a22182
--- /dev/null
+++ b/.idea/sophrosyne.iml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.idea/sqldialects.xml b/.idea/sqldialects.xml
new file mode 100644
index 0000000..3c44614
--- /dev/null
+++ b/.idea/sqldialects.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..dcb6b8c
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/.mise.toml b/.mise.toml
new file mode 100644
index 0000000..7646b55
--- /dev/null
+++ b/.mise.toml
@@ -0,0 +1,9 @@
+[tools]
+go = "latest"
+watchexec = "latest"
+"go:github.com/golangci/golangci-lint/cmd/golangci-lint" = "1.58.1"
+"go:golang.org/x/tools/cmd/goimports" = "latest"
+
+[settings]
+experimental = true
+go_set_gobin = true
diff --git a/.mise/tasks/clean b/.mise/tasks/clean
new file mode 100755
index 0000000..1d54cf4
--- /dev/null
+++ b/.mise/tasks/clean
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+rm -rf build
+rm -rf dist
diff --git a/.mise/tasks/dev/db/down b/.mise/tasks/dev/db/down
new file mode 100755
index 0000000..b815069
--- /dev/null
+++ b/.mise/tasks/dev/db/down
@@ -0,0 +1,4 @@
+#!/bin/sh
+# mise outputs=[]
+
+docker compose -f docker-compose.development.yml down
diff --git a/.mise/tasks/dev/db/up b/.mise/tasks/dev/db/up
new file mode 100755
index 0000000..6e35beb
--- /dev/null
+++ b/.mise/tasks/dev/db/up
@@ -0,0 +1,4 @@
+#!/bin/sh
+# mise outputs=[]
+
+docker compose -f docker-compose.development.yml up -d
diff --git a/.mise/tasks/dev/format b/.mise/tasks/dev/format
new file mode 100755
index 0000000..a691af1
--- /dev/null
+++ b/.mise/tasks/dev/format
@@ -0,0 +1,30 @@
+#!/bin/sh
+# mise sources=["go.mod", "go.sum", "internal/**/*", "cmd/**/*", "*.go"]
+# mise outputs=["build/report.xml"]
+
+check=1
+
+handle_argument() {
+ echo "Argument '$1' not supported" >&2; exit 1
+}
+
+while test "$#" -gt 0; do
+ case "$1" in
+ --check) check=0; shift 1;;
+
+ -*) echo "unknown option: $1" >&2; exit 1;;
+ *) handle_argument "$1"; shift 1;;
+ esac
+done
+
+if test "$check" = 0; then
+ changes=$(goimports -l -local github.com/madsrc/sophrosyne -e .)
+ if test -n "$changes"; then
+ echo "$changes"
+ exit 1
+ else
+ exit 0
+ fi
+else
+ goimports -w -local github.com/madsrc/sophrosyne -e .
+fi
diff --git a/.mise/tasks/dev/lint b/.mise/tasks/dev/lint
new file mode 100755
index 0000000..e68da60
--- /dev/null
+++ b/.mise/tasks/dev/lint
@@ -0,0 +1,28 @@
+#!/bin/sh
+# mise sources=["go.mod", "go.sum", "internal/**/*", "cmd/**/*", "*.go"]
+# mise outputs=["build/report.xml"]
+
+nofail=1
+fix=1
+
+fixflag=""
+
+handle_argument() {
+ echo "Argument '$1' not supported" >&2; exit 1
+}
+
+while test "$#" -gt 0; do
+ case "$1" in
+ --no-fail) nofail=0; shift 1;;
+ --fix) fix=0; shift 1;;
+
+ -*) echo "unknown option: $1" >&2; exit 1;;
+ *) handle_argument "$1"; shift 1;;
+ esac
+done
+
+if test "$fix" = 0; then
+ fixflags="--fix"
+fi
+
+golangci-lint run --issues-exit-code="$nofail" $fixflags
diff --git a/.mise/tasks/dev/run b/.mise/tasks/dev/run
new file mode 100755
index 0000000..1175fd5
--- /dev/null
+++ b/.mise/tasks/dev/run
@@ -0,0 +1,13 @@
+#!/bin/sh
+# mise outputs=[]
+
+docker compose -f docker-compose.development.yml up -d
+SOPH_LOGGING_LEVEL=debug \
+ SOPH_TRACING_OUTPUT=http \
+ SOPH_METRICS_OUTPUT=http \
+ SOPH_METRICS_ENABLED=true \
+ SOPH_TRACING_ENABLED=true \
+ OTEL_EXPORTER_OTLP_ENDPOINT="http://localhost:4318" \
+ OTEL_TRACES_EXPORTER="otlp" \
+ OTEL_EXPORTER_OTLP_PROTOCOL="http/protobuf" \
+go run cmd/sophrosyne/main.go --config configurations/dev.yaml run
diff --git a/.mise/tasks/sync-go-version b/.mise/tasks/sync-go-version
new file mode 100755
index 0000000..9d28868
--- /dev/null
+++ b/.mise/tasks/sync-go-version
@@ -0,0 +1,14 @@
+#!/bin/sh
+# mise outputs=[]
+
+set -e
+
+version=$(grep -E "^go .*$" go.mod | awk '{print $2}')
+
+echo "Version extracted from go.mod: $version"
+
+echo "Setting go version in .mise.toml"
+mise use "go@$version"
+
+echo "Setting go version in .idea/workspace.xml"
+sed -i "s/.local\/share\/mise\/installs\/go\/.*\"/.local\/share\/mise\/installs\/go\/$version\"/g" .idea/workspace.xml
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3e2c833..6b82cce 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,7 +6,6 @@ repos:
args: ['--maxkb=4000']
- id: end-of-file-fixer
exclude: "snapshots|docs/api.md"
- - id: check-docstring-first
- id: trailing-whitespace
exclude: "snapshots|docs/api.md"
- id: check-merge-conflict
@@ -15,10 +14,4 @@ repos:
- id: check-case-conflict
- id: mixed-line-ending
- id: detect-private-key
- - id: check-ast
- - id: check-builtin-literals
- id: check-executables-have-shebangs
- - id: check-shebang-scripts-are-executable
- - id: name-tests-test
- args: [ "--pytest-test-first" ]
- - id: requirements-txt-fixer
diff --git a/.python-version b/.python-version
deleted file mode 100644
index 8531a3b..0000000
--- a/.python-version
+++ /dev/null
@@ -1 +0,0 @@
-3.12.2
diff --git a/Dockerfile b/Dockerfile
index fd6a247..ba91b60 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,19 +1,3 @@
-FROM cgr.dev/chainguard/python:latest-dev@sha256:40b3a19b7e2a50824b1ff32d61ae5f59af1b0de67f7bb8e56f5804bace0d94b7 AS builder
-
-ARG dist_file
-
-WORKDIR /app
-
-COPY "dist/${dist_file}" "/home/nonroot/${dist_file}"
-
-RUN --mount=type=secret,id=requirements,dst=/home/nonroot/requirements.txt,uid=65532,gid=65532 \
- pip install --no-cache-dir -r "/home/nonroot/requirements.txt"
-RUN pip install --no-cache-dir "/home/nonroot/${dist_file}"
-
-FROM cgr.dev/chainguard/python:latest@sha256:5f16431f56f330925a9c8f5168b31ca65f603de15b127b376f8532bab11583c0
-
-WORKDIR /app
-
-COPY --from=builder /home/nonroot/.local/lib/python3.12/site-packages /home/nonroot/.local/lib/python3.12/site-packages
-
-ENTRYPOINT [ "python", "-m", "sophrosyne.main", "run" ]
+FROM cgr.dev/chainguard/glibc-dynamic:latest
+COPY --chown=noneroot:noneroot dist/sophrosyne /usr/bin/
+ENTRYPOINT ["/usr/bin/sophrosyne"]
diff --git a/Makefile b/Makefile
deleted file mode 100644
index a94ce7e..0000000
--- a/Makefile
+++ /dev/null
@@ -1,105 +0,0 @@
-VERSION ?= $(shell poetry version | cut -d' ' -f2)
-
-.PHONY: protobuf
-protobuf: src/sophrosyne/grpc/checks/checks.proto src/sophrosyne/grpc/checks/checks_pb2.py src/sophrosyne/grpc/checks/checks_pb2_grpc.py src/sophrosyne/grpc/checks/checks_pb2.pyi
-
-src/sophrosyne/grpc/checks/checks_pb2.py: src/sophrosyne/grpc/checks/checks.proto
- poetry run python -m grpc_tools.protoc -I./src --python_out=./src ./src/sophrosyne/grpc/checks/checks.proto
-
-src/sophrosyne/grpc/checks/checks_pb2_grpc.py: src/sophrosyne/grpc/checks/checks.proto
- poetry run python -m grpc_tools.protoc -I./src --grpc_python_out=./src ./src/sophrosyne/grpc/checks/checks.proto
-
-src/sophrosyne/grpc/checks/checks_pb2.pyi: src/sophrosyne/grpc/checks/checks.proto
- poetry run python -m grpc_tools.protoc -I./src --pyi_out=./src ./src/sophrosyne/grpc/checks/checks.proto
-
-build/requirements.txt:
- @mkdir -p $(@D)
- poetry export --without-hashes --format=requirements.txt --with-credentials --output $@
-
-dist/sophrosyne-$(VERSION)-py3-none-any.whl: src/sophrosyne/* src/sophrosyne/grpc/checks/checks.proto src/sophrosyne/grpc/checks/checks_pb2.py src/sophrosyne/grpc/checks/checks_pb2_grpc.py src/sophrosyne/grpc/checks/checks_pb2.pyi
- @mkdir -p $(@D)
- poetry build --format=wheel
-
-dist/sophrosyne.tar: dist/sophrosyne-$(VERSION)-py3-none-any.whl build/requirements.txt
- mkdir -p $(@D)
- docker build --build-arg="dist_file=sophrosyne-$(VERSION)-py3-none-any.whl" --secret id=requirements,src=build/requirements.txt --no-cache --tag sophrosyne:$(VERSION) --attest=type=provenance,mode=max --attest=type=sbom --platform=linux/arm64 --output type=oci,dest=- . > $@
-
-.PHONY: alembic/stamp
-alembic/stamp:
- poetry run alembic stamp "head"
-
-.PHONY: alembic/upgrade
-alembic/upgrade:
- poetry run alembic upgrade "head"
-
-.PHONY: alembic/auto
-alembic/auto:
- poetry run alembic revision --autogenerate
-
-.PHONY: alembic/revision
-alembic/revision:
- poetry run alembic revision
-
-.PHONY: dev/run
-dev/run: build/.certificate_sentinel
- docker compose -f docker-compose.development.yml up -d
- SOPH__CONFIG_YAML_FILE=configurations/dev.yaml poetry run python src/sophrosyne/main.py run
-
-.PHONY: dev/db/up
-dev/db/up:
- docker compose -f docker-compose.development.yml up -d
-
-.PHONY: dev/db/down
-dev/db/down:
- docker compose -f docker-compose.development.yml down
-
-build/.image_loaded_sentinel: dist/sophrosyne.tar
- mkdir -p $(@D)
- docker load --input dist/sophrosyne.tar
- @# For some reason the previous command doesn't include a newline in its output
- @printf "\n"
- touch $@
-
-build/integration/root_token:
- mkdir -p $(@D)
- openssl rand -hex 128 > $@
-
-
-.PHONY: test/integration
-test/integration: test/integration/healthy_instance test/integration/auth01 test/integration/auth_required
-
-.PHONY: test/integration/%
-test/integration/%: build/.certificate_sentinel build/.image_loaded_sentinel build/integration/root_token
- $(MAKE) destroy/test/integration/$*
- VERSION=$(VERSION) ROOT_TOKEN="$$(cat build/integration/root_token)" docker compose -f tests/integration/$*/docker-compose.yml up --exit-code-from tester
- $(MAKE) destroy/test/integration/$*
-
-.PHONY: destroy/test/integration/%
-destroy/test/integration/%:
- VERSION="" ROOT_TOKEN="" docker compose -f tests/integration/$*/docker-compose.yml down
-
-.PHONY: clean
-clean:
- rm -rf src/sophrosyne/grpc/checks/checks_pb2.py src/sophrosyne/grpc/checks/checks_pb2_grpc.py src/sophrosyne/grpc/checks/checks_pb2.pyi
- rm -rf dist
- rm -rf build
- find . -name __pycache__ -exec rm -rf {} +
- rm -rf .pytest_cache
- rm -rf .mypy_cache
- rm -rf .ruff_cache
- -$(MAKE) dev/db/down
-
-build/server.key: build/.certificate_sentinel
-
-build/server.crt: build/.certificate_sentinel
-
-build/.certificate_sentinel:
- @mkdir -p $(@D)
- openssl req -x509 -nodes -days 3650 -newkey ec -pkeyopt ec_paramgen_curve:secp384r1 -keyout build/server.key -out build/server.crt -subj '/CN=localhost' -addext 'subjectAltName = DNS:localhost,IP:127.0.0.1,IP:0.0.0.0,DNS:api'
- chmod 0777 build/server.key
- chmod 0777 build/server.crt
- touch $@
-
-.PHONY:
-dev/install:
- poetry install --with dev,test
diff --git a/alembic.ini b/alembic.ini
deleted file mode 100644
index 1e5d218..0000000
--- a/alembic.ini
+++ /dev/null
@@ -1,114 +0,0 @@
-# A generic, single database configuration.
-
-[alembic]
-# path to migration scripts
-script_location = src/sophrosyne/migrations
-
-# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
-# Uncomment the line below if you want the files to be prepended with date and time
-# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
-
-# sys.path path, will be prepended to sys.path if present.
-# defaults to the current working directory.
-prepend_sys_path = .
-
-# timezone to use when rendering the date within the migration file
-# as well as the filename.
-# If specified, requires the python>=3.9 or backports.zoneinfo library.
-# Any required deps can installed by adding `alembic[tz]` to the pip requirements
-# string value is passed to ZoneInfo()
-# leave blank for localtime
-# timezone =
-
-# max length of characters to apply to the
-# "slug" field
-# truncate_slug_length = 40
-
-# set to 'true' to run the environment during
-# the 'revision' command, regardless of autogenerate
-# revision_environment = false
-
-# set to 'true' to allow .pyc and .pyo files without
-# a source .py file to be detected as revisions in the
-# versions/ directory
-# sourceless = false
-
-# version location specification; This defaults
-# to migrations/versions. When using multiple version
-# directories, initial revisions must be specified with --version-path.
-# The path separator used here should be the separator specified by "version_path_separator" below.
-# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
-
-# version path separator; As mentioned above, this is the character used to split
-# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
-# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
-# Valid values for version_path_separator are:
-#
-# version_path_separator = :
-# version_path_separator = ;
-# version_path_separator = space
-version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
-
-# set to 'true' to search source files recursively
-# in each "version_locations" directory
-# new in Alembic version 1.10
-# recursive_version_locations = false
-
-# the output encoding used when revision files
-# are written from script.py.mako
-# output_encoding = utf-8
-
-sqlalchemy.url = postgresql+asyncpg://postgres:postgres@localhost:5432/postgres
-
-
-[post_write_hooks]
-# post_write_hooks defines scripts or Python functions that are run
-# on newly generated revision scripts. See the documentation for further
-# detail and examples
-
-# format using "black" - use the console_scripts runner, against the "black" entrypoint
-# hooks = black
-# black.type = console_scripts
-# black.entrypoint = black
-# black.options = -l 79 REVISION_SCRIPT_FILENAME
-
-# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
-# hooks = ruff
-# ruff.type = exec
-# ruff.executable = %(here)s/.venv/bin/ruff
-# ruff.options = --fix REVISION_SCRIPT_FILENAME
-
-# Logging configuration
-[loggers]
-keys = root,sqlalchemy,alembic
-
-[handlers]
-keys = console
-
-[formatters]
-keys = generic
-
-[logger_root]
-level = WARN
-handlers = console
-qualname =
-
-[logger_sqlalchemy]
-level = WARN
-handlers =
-qualname = sqlalchemy.engine
-
-[logger_alembic]
-level = INFO
-handlers =
-qualname = alembic
-
-[handler_console]
-class = StreamHandler
-args = (sys.stderr,)
-level = NOTSET
-formatter = generic
-
-[formatter_generic]
-format = %(levelname)-5.5s [%(name)s] %(message)s
-datefmt = %H:%MWARNu
diff --git a/bruno.json b/bruno.json
deleted file mode 100644
index dd16c76..0000000
--- a/bruno.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "version": "1",
- "name": "sophrosyne",
- "type": "collection",
- "ignore": [
- "node_modules",
- ".git",
- ".mypy_cache",
- ".pytest_cache",
- ".ruff_cache",
- ".venv",
- ".vscode",
- "migrations",
- "src",
- "tests/unit",
- "build",
- "dist",
- "configurations"
- ]
-}
diff --git a/cache.go b/cache.go
new file mode 100644
index 0000000..8f35205
--- /dev/null
+++ b/cache.go
@@ -0,0 +1,130 @@
+// Copyright (c) 2024 Mads R. Havmand
+//
+// Part of the codebase in this file is lifted from the go-cache project by Patrick Mylund Nielsen. The original project
+// can be found at https://github.com/patrickmn/go-cache. The go-cache project is licensed under the MIT License, and
+// therefore so is parts of this file.
+//
+// --- License applicable to the go-cache project ---
+//Copyright (c) 2012-2019 Patrick Mylund Nielsen and the go-cache contributors
+//
+//Permission is hereby granted, free of charge, to any person obtaining a copy
+//of this software and associated documentation files (the "Software"), to deal
+//in the Software without restriction, including without limitation the rights
+//to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+//copies of the Software, and to permit persons to whom the Software is
+//furnished to do so, subject to the following conditions:
+//
+//The above copyright notice and this permission notice shall be included in
+//all copies or substantial portions of the Software.
+//
+//THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+//IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+//FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+//AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+//LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+//OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+//THE SOFTWARE.
+//
+// --- End of license applicable to the go-cache project ---
+//
+// The above license is also applicable to the parts of this file that are lifted from the go-cache project. The rest
+// of the file is licensed under the same license as the rest of the sophrosyne project.
+//
+
+package sophrosyne
+
+import (
+ "runtime"
+ "sync"
+ "time"
+)
+
+type CacheItem struct {
+ Value any
+ Expiration int64
+}
+
+type Cache struct {
+ expiration int64
+ items map[string]CacheItem
+ lock sync.RWMutex
+ cleaner *cacheCleaner
+}
+
+func NewCache(expiration int64) *Cache {
+ c := &Cache{
+ expiration: expiration,
+ items: make(map[string]CacheItem),
+ }
+
+ // Doing it this way ensures that the cacheCleaner goroutine does not keep the returned Cache object from being
+ // garbage collected. When garbage collection does occur, the finalizer will stop the cacheCleaner goroutine.
+ runCacheCleaner(c, time.Duration(expiration)*time.Nanosecond)
+ runtime.SetFinalizer(c, stopCacheCleaner)
+
+ return c
+}
+
+func (c *Cache) Get(key string) (any, bool) {
+ c.lock.RLock()
+ item, ok := c.items[key]
+ if !ok {
+ c.lock.RUnlock()
+ return nil, false
+ }
+ c.lock.RUnlock()
+ return item.Value, true
+}
+
+func (c *Cache) Set(key string, value any) {
+ c.lock.Lock()
+ c.items[key] = CacheItem{Value: value, Expiration: c.expiration}
+ c.lock.Unlock()
+}
+
+func (c *Cache) Delete(key string) {
+ c.lock.Lock()
+ delete(c.items, key)
+ c.lock.Unlock()
+}
+
+func (c *Cache) DeleteExpired() {
+ now := time.Now().UnixNano()
+ c.lock.Lock()
+ for key, item := range c.items {
+ if item.Expiration > 0 && now > item.Expiration {
+ delete(c.items, key)
+ }
+ }
+ c.lock.Unlock()
+}
+
+type cacheCleaner struct {
+ interval time.Duration
+ stop chan bool
+}
+
+func (cleaner *cacheCleaner) Start(c *Cache) {
+ ticker := time.NewTicker(cleaner.interval)
+ for {
+ select {
+ case <-ticker.C:
+ c.DeleteExpired()
+ case <-cleaner.stop:
+ ticker.Stop()
+ return
+ }
+ }
+}
+
+func runCacheCleaner(c *Cache, interval time.Duration) {
+ cleaner := &cacheCleaner{
+ interval: interval,
+ stop: make(chan bool),
+ }
+ go cleaner.Start(c)
+}
+
+func stopCacheCleaner(c *Cache) {
+ c.cleaner.stop <- true
+}
diff --git a/checks.go b/checks.go
new file mode 100644
index 0000000..9cee873
--- /dev/null
+++ b/checks.go
@@ -0,0 +1,199 @@
+package sophrosyne
+
+import (
+ "context"
+ "net/url"
+ "time"
+)
+
+type Check struct {
+ ID string
+ Name string
+ Profiles []Profile
+ UpstreamServices []url.URL
+ CreatedAt time.Time
+ UpdatedAt time.Time
+ DeletedAt *time.Time
+}
+
+func (c Check) EntityType() string { return "Check" }
+
+func (c Check) EntityID() string { return c.ID }
+
+type CheckService interface {
+ GetCheck(ctx context.Context, id string) (Check, error)
+ GetCheckByName(ctx context.Context, name string) (Check, error)
+ GetChecks(ctx context.Context, cursor *DatabaseCursor) ([]Check, error)
+ CreateCheck(ctx context.Context, check CreateCheckRequest) (Check, error)
+ UpdateCheck(ctx context.Context, check UpdateCheckRequest) (Check, error)
+ DeleteCheck(ctx context.Context, id string) error
+}
+
+type GetCheckRequest struct {
+ ID string `json:"id"`
+ Name string `json:"name" validate:"required_without=ID,excluded_with=ID"`
+}
+
+type GetCheckResponse struct {
+ Name string `json:"name"`
+ Profiles []string `json:"profiles"`
+ UpstreamServices []string `json:"upstream_services"`
+ CreatedAt string `json:"createdAt"`
+ UpdatedAt string `json:"updatedAt"`
+ DeletedAt string `json:"deletedAt,omitempty"`
+}
+
+func (r *GetCheckResponse) FromCheck(c Check) *GetCheckResponse {
+ var p []string
+ for _, entry := range c.Profiles {
+ p = append(p, entry.Name)
+ }
+ var u []string
+ for _, entry := range c.UpstreamServices {
+ u = append(u, entry.String())
+ }
+ r.Name = c.Name
+ r.Profiles = p
+ r.UpstreamServices = u
+ r.CreatedAt = c.CreatedAt.Format(TimeFormatInResponse)
+ r.UpdatedAt = c.UpdatedAt.Format(TimeFormatInResponse)
+ if c.DeletedAt != nil {
+ r.DeletedAt = c.DeletedAt.Format(TimeFormatInResponse)
+ }
+ return r
+}
+
+type GetChecksRequest struct {
+ Cursor string `json:"cursor"`
+}
+
+type GetChecksResponse struct {
+ Checks []GetCheckResponse `json:"checks"`
+ Cursor string `json:"cursor"`
+ Total int `json:"total"`
+}
+
+type CreateCheckRequest struct {
+ Name string `json:"name" validate:"required"`
+ Profiles []string `json:"profiles"`
+ UpstreamServices []string `json:"upstream_services" validate:"dive,url"`
+}
+
+type CreateCheckResponse struct {
+ GetCheckResponse
+}
+
+type UpdateCheckRequest struct {
+ Name string `json:"name" validate:"required"`
+ Profiles []string `json:"profiles"`
+ UpstreamServices []string `json:"upstream_services" validate:"url"`
+}
+
+type UpdateCheckResponse struct {
+ GetCheckResponse
+}
+
+type DeleteCheckRequest struct {
+ Name string `json:"name" validate:"required"`
+}
+
+type CheckServiceCache struct {
+ cache *Cache
+ checkService CheckService
+ tracingService TracingService
+}
+
+func NewCheckServiceCache(config *Config, checkService CheckService, tracingService TracingService) *CheckServiceCache {
+ return &CheckServiceCache{
+ cache: NewCache(config.Services.Profiles.CacheTTL),
+ checkService: checkService,
+ tracingService: tracingService,
+ }
+}
+
+func (c CheckServiceCache) GetCheck(ctx context.Context, id string) (Check, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "CheckServiceCache.GetCheck")
+ v, ok := c.cache.Get(id)
+ if ok {
+ span.End()
+ return v.(Check), nil
+ }
+
+ profile, err := c.checkService.GetCheck(ctx, id)
+ if err != nil {
+ span.End()
+ return Check{}, err
+ }
+
+ c.cache.Set(id, profile)
+ span.End()
+ return profile, nil
+}
+
+func (c CheckServiceCache) GetCheckByName(ctx context.Context, name string) (Check, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "CheckServiceCache.GetCheckByName")
+ profile, err := c.checkService.GetCheckByName(ctx, name)
+ if err != nil {
+ span.End()
+ return Check{}, err
+ }
+
+ c.cache.Set(profile.ID, profile)
+ span.End()
+ return profile, nil
+}
+
+func (c CheckServiceCache) GetChecks(ctx context.Context, cursor *DatabaseCursor) ([]Check, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "CheckServiceCache.GetChecks")
+ profiles, err := c.checkService.GetChecks(ctx, cursor)
+ if err != nil {
+ span.End()
+ return nil, err
+ }
+
+ for _, user := range profiles {
+ c.cache.Set(user.ID, user)
+ }
+
+ span.End()
+ return profiles, nil
+}
+
+func (c CheckServiceCache) CreateCheck(ctx context.Context, check CreateCheckRequest) (Check, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "CheckServiceCache.CreateCheck")
+ createProfile, err := c.checkService.CreateCheck(ctx, check)
+ if err != nil {
+ span.End()
+ return Check{}, err
+ }
+
+ c.cache.Set(createProfile.ID, createProfile)
+ span.End()
+ return createProfile, nil
+}
+
+func (c CheckServiceCache) UpdateCheck(ctx context.Context, check UpdateCheckRequest) (Check, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "CheckServiceCache.UpdateCheck")
+ updateProfile, err := c.checkService.UpdateCheck(ctx, check)
+ if err != nil {
+ span.End()
+ return Check{}, err
+ }
+
+ c.cache.Set(updateProfile.ID, updateProfile)
+ span.End()
+ return updateProfile, nil
+}
+
+func (c CheckServiceCache) DeleteCheck(ctx context.Context, id string) error {
+ ctx, span := c.tracingService.StartSpan(ctx, "CheckServiceCache.DeleteCheck")
+ err := c.checkService.DeleteCheck(ctx, id)
+ if err != nil {
+ span.End()
+ return err
+ }
+
+ c.cache.Delete(id)
+ span.End()
+ return nil
+}
diff --git a/cmd/dummycheck/main.go b/cmd/dummycheck/main.go
new file mode 100644
index 0000000..79f5899
--- /dev/null
+++ b/cmd/dummycheck/main.go
@@ -0,0 +1,72 @@
+package main
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "net"
+ "os"
+
+ "github.com/urfave/cli/v2"
+ "google.golang.org/grpc"
+
+ "github.com/madsrc/sophrosyne/internal/grpc/checks"
+)
+
+func main() {
+ app := &cli.App{
+ Flags: []cli.Flag{
+ &cli.IntFlag{
+ Name: "port",
+ Usage: "port to listen on",
+ Value: 11432,
+ },
+ },
+ Action: func(c *cli.Context) error {
+ lis, err := net.Listen("tcp", fmt.Sprintf("localhost:%d", c.Int("port")))
+ if err != nil {
+ log.Fatalf("failed to listen: %v", err)
+ }
+ var opts []grpc.ServerOption
+ grpcServer := grpc.NewServer(opts...)
+ checks.RegisterCheckServiceServer(grpcServer, checkServer{})
+ err = grpcServer.Serve(lis)
+ if err != nil {
+ log.Fatalf("failed to serve: %v", err)
+ }
+
+ return nil
+ },
+ }
+
+ if err := app.Run(os.Args); err != nil {
+ log.Fatal(err)
+ }
+
+}
+
+type checkServer struct {
+ checks.UnimplementedCheckServiceServer
+}
+
+func (c checkServer) Check(ctx context.Context, request *checks.CheckRequest) (*checks.CheckResponse, error) {
+ var cnt string
+ switch request.GetCheck().(type) {
+ case *checks.CheckRequest_Text:
+ cnt = request.GetText()
+ case *checks.CheckRequest_Image:
+ cnt = request.GetImage()
+ default:
+ cnt = ""
+ }
+ if cnt == "false" {
+ return &checks.CheckResponse{
+ Result: false,
+ Details: "this was false",
+ }, nil
+ }
+ return &checks.CheckResponse{
+ Result: true,
+ Details: "this was true",
+ }, nil
+}
diff --git a/cmd/sophrosyne/main.go b/cmd/sophrosyne/main.go
new file mode 100644
index 0000000..415bcf3
--- /dev/null
+++ b/cmd/sophrosyne/main.go
@@ -0,0 +1,373 @@
+package main
+
+import (
+ "context"
+ "crypto/rand"
+ "errors"
+ "fmt"
+ "log/slog"
+ http2 "net/http"
+ "os"
+ "os/signal"
+ "syscall"
+ "time"
+
+ "github.com/urfave/cli/v2"
+ "gopkg.in/yaml.v3"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/cedar"
+ "github.com/madsrc/sophrosyne/internal/configProvider"
+ "github.com/madsrc/sophrosyne/internal/healthchecker"
+ "github.com/madsrc/sophrosyne/internal/http"
+ "github.com/madsrc/sophrosyne/internal/http/middleware"
+ "github.com/madsrc/sophrosyne/internal/migrate"
+ "github.com/madsrc/sophrosyne/internal/otel"
+ "github.com/madsrc/sophrosyne/internal/pgx"
+ "github.com/madsrc/sophrosyne/internal/rpc"
+ "github.com/madsrc/sophrosyne/internal/rpc/services"
+ "github.com/madsrc/sophrosyne/internal/tls"
+ "github.com/madsrc/sophrosyne/internal/validator"
+)
+
+func main() {
+ cli.VersionPrinter = func(c *cli.Context) {
+ _, _ = fmt.Fprintf(c.App.Writer, "v%s\n", c.App.Version)
+ }
+ cli.VersionFlag = &cli.BoolFlag{
+ Name: "version",
+ Aliases: []string{"v"},
+ Usage: "print the version",
+ }
+ app := &cli.App{
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "config",
+ Usage: "The path to the configuration file",
+ Value: "config.yaml",
+ },
+ &cli.StringSliceFlag{
+ Name: "secretfiles",
+ Usage: "Files to read individual configuration values from. Multiple files can be specified by separating them with a comma or supply the option multiple times. The name of the file is used to determine what configuration parameter the content of the file will be read in to. For example, a file called 'database.host' will have its content used as the the value for 'database.host' in the configuration. This option is recommended to be used for secrets.",
+ Value: nil,
+ },
+ },
+ Version: "0.0.0",
+ Commands: []*cli.Command{
+ {
+ Name: "run",
+ Usage: "sophrosyne",
+ Action: func(c *cli.Context) error {
+ return run(c)
+ },
+ },
+ {
+ Name: "version",
+ Usage: "print the version",
+ Action: func(c *cli.Context) error {
+ cli.VersionPrinter(c)
+ return nil
+ },
+ },
+ {
+ Name: "migrate",
+ Usage: "migrate the database to the latest version",
+ Action: func(c *cli.Context) error {
+ validate := validator.NewValidator()
+
+ config, err := getConfig(c.String("config"), nil, c.StringSlice("secretfiles"), validate)
+ migrationService, err := migrate.NewMigrationService(config)
+ if err != nil {
+ return err
+ }
+
+ err = migrationService.Up()
+ if err != nil {
+ if !errors.Is(err, migrate.ErrNoChange) {
+ return err
+ } else {
+ _, _ = fmt.Fprintf(c.App.Writer, "No migrations to apply")
+ return nil
+ }
+ }
+ v, dirty, err := migrationService.Versions()
+ if err != nil {
+ return err
+ }
+ msg := fmt.Sprintf("Migrations applied. Database at version '%d'", v)
+ if dirty {
+ msg = fmt.Sprintf("%s (dirty)\n", msg)
+ } else {
+ msg = fmt.Sprintf("%s\n", msg)
+ }
+ _, _ = fmt.Fprintf(c.App.Writer, msg)
+ return nil
+ },
+ },
+ {
+ Name: "config",
+ Usage: "show the current configuration",
+ Action: func(c *cli.Context) error {
+ validate := validator.NewValidator()
+ config, err := getConfig(c.String("config"), nil, c.StringSlice("secretfiles"), validate)
+ if err != nil {
+ return err
+ }
+
+ dat, err := yaml.Marshal(config)
+ if err != nil {
+ return err
+ }
+
+ _, _ = fmt.Fprintf(c.App.Writer, "%s\n", dat)
+ return nil
+ },
+ },
+ {
+ Name: "healthcheck",
+ Usage: "check if the server is running",
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "target",
+ Usage: "target server address. Must include scheme and port number",
+ Value: "https://127.0.0.1:8080/healthz",
+ },
+ &cli.BoolFlag{
+ Name: "insecure-skip-verify",
+ Usage: "Skip TLS certificate verification",
+ Value: false,
+ },
+ },
+ Action: func(c *cli.Context) error {
+ validate := validator.NewValidator()
+ config, err := getConfig(c.String("config"), map[string]interface{}{
+ "security.tls.insecureSkipVerify": c.Bool("insecure-skip-verify"),
+ }, c.StringSlice("secretfiles"), validate)
+ if err != nil {
+ return err
+ }
+
+ tlsConfig, err := tls.NewTLSClientConfig(config)
+ if err != nil {
+ return err
+ }
+ client := http2.Client{
+ Timeout: 5 * time.Second,
+ Transport: &http2.Transport{
+ TLSClientConfig: tlsConfig,
+ },
+ }
+ resp, err := client.Get(c.String("target"))
+ if err != nil {
+ if errors.Is(err, syscall.ECONNREFUSED) {
+ return cli.Exit("unhealthy", 2)
+ }
+ return cli.Exit(err.Error(), 1)
+ }
+ if resp.StatusCode == http2.StatusOK {
+ return cli.Exit("healthy", 0)
+ }
+ return cli.Exit("unhealthy", 3)
+
+ },
+ },
+ },
+ }
+
+ if err := app.Run(os.Args); err != nil {
+ panic(err)
+ }
+}
+
+func getConfig(filepath string, overwrites map[string]interface{}, secretfiles []string, validate *validator.Validator) (*sophrosyne.Config, error) {
+ cp, err := configProvider.NewConfigProvider(
+ filepath,
+ overwrites,
+ secretfiles,
+ validate,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ return cp.Get(), nil
+}
+
+func run(c *cli.Context) error {
+ // Handle SIGINT (CTRL+C) gracefully.
+ ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
+ defer stop()
+
+ validate := validator.NewValidator()
+ config, err := getConfig(c.String("config"), nil, c.StringSlice("secretfiles"), validate)
+ if err != nil {
+ return err
+ }
+
+ otelService, err := otel.NewOtelService()
+ if err != nil {
+ return err
+ }
+
+ logger := slog.New(sophrosyne.NewLogHandler(config, otelService))
+
+ otelShutdown, err := otel.SetupOTelSDK(ctx, config)
+ if err != nil {
+ return err
+ }
+ // Handle shutdown properly so nothing leaks.
+ defer func() {
+ err = errors.Join(err, otelShutdown(ctx))
+ }()
+
+ migrationService, err := migrate.NewMigrationService(config)
+ if err != nil {
+ return err
+ }
+
+ logger.DebugContext(ctx, "Applying migrations")
+ err = migrationService.Up()
+ if err != nil {
+ if !errors.Is(err, migrate.ErrNoChange) {
+ panic(err)
+ } else {
+ logger.DebugContext(ctx, "No migrations to apply")
+ }
+ }
+ sourceErr, dbError := migrationService.Close()
+ if sourceErr != nil {
+ return sourceErr
+ }
+ if dbError != nil {
+ return dbError
+ }
+
+ checkServiceDatabase, err := pgx.NewCheckService(ctx, config, logger)
+ if err != nil {
+ return err
+ }
+
+ checkService := sophrosyne.NewCheckServiceCache(config, checkServiceDatabase, otelService)
+
+ profileServiceDatabase, err := pgx.NewProfileService(ctx, config, logger, checkService)
+ if err != nil {
+ return err
+ }
+
+ userServiceDatabase, err := pgx.NewUserService(ctx, config, logger, rand.Reader, profileServiceDatabase)
+ if err != nil {
+ return err
+ }
+
+ userService := sophrosyne.NewUserServiceCache(config, userServiceDatabase, otelService)
+ if err != nil {
+ return err
+ }
+
+ profileService := sophrosyne.NewProfileServiceCache(config, profileServiceDatabase, otelService)
+ if err != nil {
+ return err
+ }
+
+ authzProvider, err := cedar.NewAuthorizationProvider(ctx, logger, userService, otelService, profileService, checkService)
+
+ rpcServer, err := rpc.NewRPCServer(logger)
+ if err != nil {
+ return err
+ }
+
+ rpcUserService, err := services.NewUserService(userService, authzProvider, logger, validate)
+ if err != nil {
+ return err
+ }
+
+ rpcCheckService, err := services.NewCheckService(checkService, authzProvider, logger, validate)
+ if err != nil {
+ return err
+ }
+
+ rpcProfileService, err := services.NewProfileService(profileService, authzProvider, logger, validate)
+ if err != nil {
+ return err
+ }
+
+ rpcScanService, err := services.NewScanService(authzProvider, logger, validate, profileService, checkService)
+ if err != nil {
+ return err
+ }
+
+ rpcServer.Register(rpcUserService.EntityID(), rpcUserService)
+ rpcServer.Register(rpcCheckService.EntityID(), rpcCheckService)
+ rpcServer.Register(rpcProfileService.EntityID(), rpcProfileService)
+ rpcServer.Register(rpcScanService.EntityID(), rpcScanService)
+
+ tlsConfig, err := tls.NewTLSServerConfig(config, rand.Reader)
+
+ healthcheckService, err := healthchecker.NewHealthcheckService(
+ []sophrosyne.HealthChecker{
+ userService,
+ userServiceDatabase,
+ },
+ )
+
+ s, err := http.NewServer(ctx, config, validate, logger, otelService, userService, tlsConfig)
+ if err != nil {
+ return err
+ }
+
+ s.Handle(
+ "/v1/rpc",
+ middleware.PanicCatcher(
+ logger,
+ otelService,
+ middleware.SetupTracing(
+ otelService,
+ middleware.RequestLogging(
+ logger,
+ middleware.Authentication(
+ nil,
+ config,
+ userService,
+ logger,
+ http.RPCHandler(logger, rpcServer),
+ ),
+ ),
+ ),
+ ),
+ )
+ s.Handle(
+ "/healthz",
+ middleware.PanicCatcher(
+ logger,
+ otelService,
+ middleware.SetupTracing(
+ otelService,
+ middleware.RequestLogging(
+ logger,
+ http.HealthcheckHandler(logger, healthcheckService),
+ ),
+ ),
+ ),
+ )
+
+ srvErr := make(chan error, 1)
+ go func() {
+ srvErr <- s.Start()
+ }()
+
+ // Wait for interruption.
+ select {
+ case err = <-srvErr:
+ // Error when starting HTTP server.
+
+ return err
+ case <-ctx.Done():
+ // Wait for first CTRL+C.
+ // Stop receiving signal notifications as soon as possible.
+ stop()
+ }
+
+ // When Shutdown is called, ListenAndServe immediately returns ErrServerClosed.
+ err = s.Shutdown(context.Background())
+ return err
+}
diff --git a/collection.bru b/collection.bru
deleted file mode 100644
index 59af699..0000000
--- a/collection.bru
+++ /dev/null
@@ -1,7 +0,0 @@
-auth {
- mode: bearer
-}
-
-auth:bearer {
- token: {{bearer_token}}
-}
diff --git a/config.go b/config.go
new file mode 100644
index 0000000..64f5530
--- /dev/null
+++ b/config.go
@@ -0,0 +1,144 @@
+package sophrosyne
+
+// The ConfigProvider interface is used to retrieve the configuration of the
+// application.
+//
+// Implementations may support reloading the configuration by watching
+// configuration sources for changes. In the event that the configuration is
+// reloaded, the implementation must ensure that the pointer address
+// returned by the Get method remains the same, but is expected to change the
+// object pointed to by the pointer.
+//
+// Additionally, implementations should ensure that the configuration is
+//
+// based off of the DefaultConfig and validated using the validate
+//
+// information in the Config struct's validate tag.
+//
+// The ConfigProvider interface is expected to be thread-safe.
+//
+// The ConfigProvider interface is expected to be used as a singleton.
+//
+// The ConfigProvider interface is expected to reference the
+// [ConfigEnvironmentPrefix] if reading from the environment.
+//
+// The ConfigProvider interface is expected to use the [ConfigDelimiter] to
+// separate keys in the configuration.
+//
+// The Get method returns the configuration of the application. Multiple calls
+// to Get must return same pointer address.
+type ConfigProvider interface {
+ Get() *Config
+}
+
+// Default configuration for the application. ConfigProvider implementations
+// should use this configuration as the default configuration.
+//
+// Values that should not have a default value should not be included.
+var DefaultConfig = map[string]interface{}{
+ "database.user": "postgres",
+ "database.host": "localhost",
+ "database.port": 5432,
+ "database.name": "postgres",
+ "server.port": 8080,
+ "logging.level": LogLevelInfo,
+ "logging.format": LogFormatJSON,
+ "logging.enabled": true,
+ "tracing.enabled": true,
+ "tracing.batch.timeout": 5,
+ "tracing.output": OtelOutputStdout,
+ "metrics.enabled": false,
+ "metrics.interval": 60,
+ "metrics.output": OtelOutputStdout,
+ "principals.root.name": "root",
+ "principals.root.email": "root@localhost",
+ "principals.root.recreate": false,
+ "services.users.pageSize": 2,
+ "services.users.cacheTTL": 100,
+ "security.tls.keyType": "EC-P384",
+ "security.tls.insecureSkipVerify": false,
+ "services.profiles.pageSize": 2,
+ "services.profiles.cacheTTL": 100,
+ "services.checks.pageSize": 2,
+ "services.checks.cacheTTL": 100,
+}
+
+// The Config struct is used to store the configuration of the application.
+//
+// The ConfigProvider interface is used to retrieve the configuration of the
+// application from the environment variables, configuration files, and secret
+// files.
+//
+// The validate tag is used to validate the configuration using
+// https://github.com/go-playground/validator/v10.
+type Config struct {
+ Principals struct {
+ Root struct {
+ Name string `key:"name" validate:"required"`
+ Email string `key:"email" validate:"required"`
+ Recreate bool `key:"recreate"`
+ } `key:"root" validate:"required"`
+ } `key:"principals" validate:"required"`
+ Database struct {
+ User string `key:"user" validate:"required"`
+ Password string `key:"password" validate:"required"`
+ Host string `key:"host" validate:"required"`
+ Port int `key:"port" validate:"required,min=1,max=65535"`
+ Name string `key:"name" validate:"required"`
+ } `key:"database"`
+ Server struct {
+ Port int `key:"port" validate:"required,min=1,max=65535"`
+ } `key:"server"`
+ Logging struct {
+ Enabled bool `key:"enabled"`
+ Level LogLevel `key:"level" validate:"required,oneof=debug info"`
+ Format LogFormat `key:"format" validate:"required,oneof=text json"`
+ } `key:"logging"`
+ Tracing struct {
+ Enabled bool `key:"enabled"`
+ Batch struct {
+ Timeout int `key:"timeout"`
+ } `key:"batch"`
+ Output OtelOutput `key:"output" validate:"required,oneof=stdout http"`
+ } `key:"tracing"`
+ Metrics struct {
+ Enabled bool `key:"enabled"`
+ Interval int `key:"interval"`
+ Output OtelOutput `key:"output" validate:"required,oneof=stdout http"`
+ } `key:"metrics"`
+ Security struct {
+ SiteKey []byte `key:"siteKey" validate:"required,min=64,max=64"`
+ Salt []byte `key:"salt" validate:"required,min=32,max=32"`
+ TLS struct {
+ KeyType string `key:"keyType" validate:"required,oneof=RSA-4096 EC-P224 EC-P256 EC-P384 EC-P521 ED25519"`
+ CertificatePath string `key:"certificatePath"`
+ KeyPath string `key:"keyPath"`
+ InsecureSkipVerify bool `key:"insecureSkipVerify"`
+ } `key:"tls" validate:"required"`
+ } `key:"security" validate:"required"`
+ Services struct {
+ Users struct {
+ PageSize int `key:"pageSize" validate:"required,min=2"`
+ CacheTTL int64 `key:"cacheTTL" validate:"required,min=0"`
+ } `key:"users" validate:"required"`
+ Profiles struct {
+ PageSize int `key:"pageSize" validate:"required,min=2"`
+ CacheTTL int64 `key:"cacheTTL" validate:"required,min=0"`
+ } `key:"profiles" validate:"required"`
+ Checks struct {
+ PageSize int `key:"pageSize" validate:"required,min=2"`
+ CacheTTL int64 `key:"cacheTTL" validate:"required,min=0"`
+ } `key:"checks" validate:"required"`
+ } `key:"services" validate:"required"`
+ Development struct {
+ StaticRootToken string `key:"staticRootToken"`
+ } `key:"development"`
+}
+
+// ConfigEnvironmentPrefix is the prefix used to identify the environment
+// variables that are used to configure the application.
+var ConfigEnvironmentPrefix = "SOPH_"
+
+// ConfigDelimiter is the delimiter used to separate the keys in the
+// configuration.
+var ConfigDelimiter = "."
diff --git a/configurations/dev.yaml b/configurations/dev.yaml
index d516a5e..bbb804b 100644
--- a/configurations/dev.yaml
+++ b/configurations/dev.yaml
@@ -1,8 +1,11 @@
+database:
+ user: postgres
+ password: postgres
+ host: localhost
+ port: 5432
+ name: postgres
security:
- site_key: '6fAkguc1RQkgdWwoLdslkQjA/N5ujPvGEmG2l97Gt+4='
- salt: 'KULByt65QDNWy4BhtkOagb3td08q992ZfPWLthFw00s='
- outgoing_tls_verify: False
+ siteKey: !!binary ZD+b96+IBJranGUCEMZzi2yhrA7eRyrqwgCTYhEG7oIyNz3mcvEjGqC/RRF+rLtpfD+Jbb5DzVlmUM1TlL80BQ==
+ salt: !!binary Ag4k628yFI2h+SWoypEO7OYzrFqBrEz8az9c6Du7ons=
development:
- static_root_token: "thisisastaticroottoken"
-logging:
- format: development
+ staticRootToken: staticroottoken
diff --git a/configurations/integration.yaml b/configurations/integration.yaml
deleted file mode 100644
index 3f169ba..0000000
--- a/configurations/integration.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-security:
- site_key: 'lecGtZB0czpTHnCAKAbK3biwvDb+uFUZZ6yQL4CAb3lC8RpokUfAr0cBp8CNzYXLHOl+8paSnJDifq4F7Rx70A=='
- salt: 'BtNgvcpdVBxV7OQ9hIm4XJGOru0k7GonkIJKBcjQoNY='
- key_path: 'build/server.key'
- certificate_path: 'build/server.crt'
- outgoing_tls_ca_path: 'build/server.crt'
-database:
- host: 'db'
diff --git a/docker-compose.development.yml b/docker-compose.development.yml
index 13a117b..1d81442 100644
--- a/docker-compose.development.yml
+++ b/docker-compose.development.yml
@@ -1,15 +1,26 @@
services:
+
db:
- image: postgres@sha256:5c587078ef86f4b5fd24c9ed4394429e59035ead421873c14135d5b57c117908
+ image: postgres
restart: always
# set shared memory limit when using docker-compose
shm_size: 128mb
ports:
- - "5432:5432"
+ - "5432:5432"
environment:
POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: postgres
+ POSTGRES_USER: postgres
healthcheck:
- test: ["CMD-SHELL", "pg_isready -U postgres"]
+ test: [ "CMD-SHELL", "pg_isready -U postgres" ]
interval: 5s
timeout: 5s
retries: 5
+
+ jaeger:
+ image: jaegertracing/all-in-one:1.56
+ ports:
+ - 5778:5778
+ - 16686:16686
+ - 4317:4317
+ - 4318:4318
diff --git a/docs/docs.md b/docs/docs.md
new file mode 100644
index 0000000..ba79c6c
--- /dev/null
+++ b/docs/docs.md
@@ -0,0 +1,12 @@
+Loading binary data into the configuration
+
+To load binary data into the configuration, there is 3 options:
+- Load the binary data from a secrets file
+- Load the binary data from a YAML file. This requires the binary data to be
+ base64 encoded (without newlines and spaces) with a prefix of `!!binary `. For example:
+ ```yaml
+ binary_data: !!binary Ag4k628yFI2h+SWoypEO7OYzrFqBrEz8az9c6Du7ons=
+ ```
+- Load the binary data from an environment variable. This requires the binary
+ data to be hex encoded with the prefix `0x`. If the string fails decoding
+ from hex to binary, the configuration will treat it as a raw string.
diff --git a/environments/integration.bru b/environments/integration.bru
deleted file mode 100644
index 84adbd8..0000000
--- a/environments/integration.bru
+++ /dev/null
@@ -1,8 +0,0 @@
-vars {
- scheme: https://
- host: api
- port: 8000
-}
-vars:secret [
- bearer_token
-]
diff --git a/environments/local.bru b/environments/local.bru
deleted file mode 100644
index 40c7a86..0000000
--- a/environments/local.bru
+++ /dev/null
@@ -1,6 +0,0 @@
-vars {
- scheme: https://
- host: localhost
- port: 8000
- bearer_token: thisisastaticroottoken
-}
diff --git a/errors.go b/errors.go
new file mode 100644
index 0000000..c688924
--- /dev/null
+++ b/errors.go
@@ -0,0 +1,73 @@
+package sophrosyne
+
+import (
+ "errors"
+ "fmt"
+ "log/slog"
+ "runtime/debug"
+)
+
+type UnreachableCodeError struct {
+ stack []byte
+}
+
+func NewUnreachableCodeError() error {
+ stack := debug.Stack()
+ return &UnreachableCodeError{
+ stack: stack,
+ }
+}
+
+func (e UnreachableCodeError) Error() string {
+ return fmt.Sprintf("unreachable code encountered - this is a bug.\nStack:\n%s", e.stack)
+}
+
+func (e UnreachableCodeError) LogValue() slog.Value {
+ return slog.GroupValue(slog.String("stack", string(e.stack)))
+}
+
+type PanicError struct {
+ reason string
+ stack []byte
+}
+
+func (e PanicError) Error() string {
+ return fmt.Sprintf("panic encountered.\nReason: %s\nStack:\n%s", e.reason, e.stack)
+}
+
+func (e PanicError) LogValue() slog.Value {
+ return slog.GroupValue(slog.String("reason", e.reason), slog.String("stack", string(e.stack)))
+}
+
+var ErrNotFound = errors.New("not found")
+
+type ConstraintViolationError struct {
+ UnderlyingError error
+ code string
+ Detail string
+ TableName string
+ ConstraintName string
+}
+
+type DatastoreError interface {
+ error
+ Code() string
+}
+
+func NewConstraintViolationError(err error, code, detail, tableName, constraintName string) error {
+ return &ConstraintViolationError{
+ UnderlyingError: err,
+ code: code,
+ Detail: detail,
+ TableName: tableName,
+ ConstraintName: constraintName,
+ }
+}
+
+func (e ConstraintViolationError) Error() string {
+ return fmt.Sprintf("violation of constraint '%s' in table '%s' - code '%s'. Detail: %s", e.ConstraintName, e.TableName, e.code, e.Detail)
+}
+
+func (e ConstraintViolationError) Code() string {
+ return e.code
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..29cc849
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,222 @@
+module github.com/madsrc/sophrosyne
+
+go 1.22.2
+
+require (
+ github.com/cedar-policy/cedar-go v0.0.0-20240423170804-f3d86202cb43
+ github.com/exaring/otelpgx v0.5.4
+ github.com/go-playground/validator/v10 v10.19.0
+ github.com/golang-migrate/migrate/v4 v4.17.1
+ github.com/jackc/pgx/v5 v5.5.5
+ github.com/knadh/koanf/parsers/yaml v0.1.0
+ github.com/knadh/koanf/providers/confmap v0.1.0
+ github.com/knadh/koanf/providers/env v0.1.0
+ github.com/knadh/koanf/providers/file v0.1.0
+ github.com/knadh/koanf/v2 v2.1.1
+ github.com/stretchr/testify v1.9.0
+ github.com/testcontainers/testcontainers-go/modules/compose v0.30.0
+ github.com/urfave/cli/v2 v2.27.2
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0
+ go.opentelemetry.io/otel v1.25.0
+ go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.25.0
+ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.25.0
+ go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.25.0
+ go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.25.0
+ go.opentelemetry.io/otel/metric v1.25.0
+ go.opentelemetry.io/otel/sdk v1.25.0
+ go.opentelemetry.io/otel/sdk/metric v1.25.0
+ go.opentelemetry.io/otel/trace v1.25.0
+ gopkg.in/yaml.v3 v3.0.1
+)
+
+require (
+ dario.cat/mergo v1.0.0 // indirect
+ github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect
+ github.com/AlecAivazis/survey/v2 v2.3.7 // indirect
+ github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect
+ github.com/Masterminds/semver/v3 v3.2.1 // indirect
+ github.com/Microsoft/go-winio v0.6.1 // indirect
+ github.com/Microsoft/hcsshim v0.11.4 // indirect
+ github.com/aws/aws-sdk-go-v2 v1.17.6 // indirect
+ github.com/aws/aws-sdk-go-v2/config v1.18.16 // indirect
+ github.com/aws/aws-sdk-go-v2/credentials v1.13.16 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.24 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.30 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.24 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/ini v1.3.31 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.24 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.12.5 // indirect
+ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.5 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sts v1.18.6 // indirect
+ github.com/aws/smithy-go v1.13.5 // indirect
+ github.com/beorn7/perks v1.0.1 // indirect
+ github.com/buger/goterm v1.0.4 // indirect
+ github.com/cenkalti/backoff/v4 v4.3.0 // indirect
+ github.com/cespare/xxhash/v2 v2.2.0 // indirect
+ github.com/compose-spec/compose-go/v2 v2.0.0-rc.8.0.20240228111658-a0507e98fe60 // indirect
+ github.com/containerd/console v1.0.3 // indirect
+ github.com/containerd/containerd v1.7.12 // indirect
+ github.com/containerd/continuity v0.4.2 // indirect
+ github.com/containerd/log v0.1.0 // indirect
+ github.com/containerd/typeurl/v2 v2.1.1 // indirect
+ github.com/cpuguy83/dockercfg v0.3.1 // indirect
+ github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/distribution/reference v0.5.0 // indirect
+ github.com/docker/buildx v0.12.0-rc2.0.20231219140829-617f538cb315 // indirect
+ github.com/docker/cli v25.0.4-0.20240305161310-2bf4225ad269+incompatible // indirect
+ github.com/docker/compose/v2 v2.24.7 // indirect
+ github.com/docker/distribution v2.8.3+incompatible // indirect
+ github.com/docker/docker v25.0.5+incompatible // indirect
+ github.com/docker/docker-credential-helpers v0.8.0 // indirect
+ github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect
+ github.com/docker/go-connections v0.5.0 // indirect
+ github.com/docker/go-metrics v0.0.1 // indirect
+ github.com/docker/go-units v0.5.0 // indirect
+ github.com/emicklei/go-restful/v3 v3.10.1 // indirect
+ github.com/felixge/httpsnoop v1.0.4 // indirect
+ github.com/fsnotify/fsevents v0.1.1 // indirect
+ github.com/fsnotify/fsnotify v1.6.0 // indirect
+ github.com/fvbommel/sortorder v1.0.2 // indirect
+ github.com/gabriel-vasile/mimetype v1.4.3 // indirect
+ github.com/go-logr/logr v1.4.1 // indirect
+ github.com/go-logr/stdr v1.2.2 // indirect
+ github.com/go-ole/go-ole v1.2.6 // indirect
+ github.com/go-openapi/jsonpointer v0.19.5 // indirect
+ github.com/go-openapi/jsonreference v0.20.0 // indirect
+ github.com/go-openapi/swag v0.19.14 // indirect
+ github.com/go-playground/locales v0.14.1 // indirect
+ github.com/go-playground/universal-translator v0.18.1 // indirect
+ github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 // indirect
+ github.com/gofrs/flock v0.8.1 // indirect
+ github.com/gogo/googleapis v1.4.1 // indirect
+ github.com/gogo/protobuf v1.3.2 // indirect
+ github.com/golang/protobuf v1.5.4 // indirect
+ github.com/google/gnostic v0.5.7-v3refs // indirect
+ github.com/google/go-cmp v0.6.0 // indirect
+ github.com/google/gofuzz v1.2.0 // indirect
+ github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
+ github.com/google/uuid v1.6.0 // indirect
+ github.com/gorilla/mux v1.8.1 // indirect
+ github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect
+ github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect
+ github.com/hashicorp/errwrap v1.1.0 // indirect
+ github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
+ github.com/hashicorp/go-multierror v1.1.1 // indirect
+ github.com/hashicorp/go-version v1.6.0 // indirect
+ github.com/imdario/mergo v0.3.16 // indirect
+ github.com/in-toto/in-toto-golang v0.5.0 // indirect
+ github.com/inconshreveable/mousetrap v1.1.0 // indirect
+ github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa // indirect
+ github.com/jackc/pgpassfile v1.0.0 // indirect
+ github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
+ github.com/jackc/puddle/v2 v2.2.1 // indirect
+ github.com/jonboulle/clockwork v0.4.0 // indirect
+ github.com/josharian/intern v1.0.0 // indirect
+ github.com/json-iterator/go v1.1.12 // indirect
+ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
+ github.com/klauspost/compress v1.17.4 // indirect
+ github.com/knadh/koanf/maps v0.1.1 // indirect
+ github.com/leodido/go-urn v1.4.0 // indirect
+ github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
+ github.com/magiconair/properties v1.8.7 // indirect
+ github.com/mailru/easyjson v0.7.6 // indirect
+ github.com/mattn/go-colorable v0.1.13 // indirect
+ github.com/mattn/go-isatty v0.0.17 // indirect
+ github.com/mattn/go-runewidth v0.0.15 // indirect
+ github.com/mattn/go-shellwords v1.0.12 // indirect
+ github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
+ github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b // indirect
+ github.com/miekg/pkcs11 v1.1.1 // indirect
+ github.com/mitchellh/copystructure v1.2.0 // indirect
+ github.com/mitchellh/mapstructure v1.5.0 // indirect
+ github.com/mitchellh/reflectwalk v1.0.2 // indirect
+ github.com/moby/buildkit v0.13.0-beta1.0.20231219135447-957cb50df991 // indirect
+ github.com/moby/locker v1.0.1 // indirect
+ github.com/moby/patternmatcher v0.6.0 // indirect
+ github.com/moby/spdystream v0.2.0 // indirect
+ github.com/moby/sys/mountinfo v0.7.1 // indirect
+ github.com/moby/sys/sequential v0.5.0 // indirect
+ github.com/moby/sys/signal v0.7.0 // indirect
+ github.com/moby/sys/symlink v0.2.0 // indirect
+ github.com/moby/sys/user v0.1.0 // indirect
+ github.com/moby/term v0.5.0 // indirect
+ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+ github.com/modern-go/reflect2 v1.0.2 // indirect
+ github.com/morikuni/aec v1.0.0 // indirect
+ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
+ github.com/opencontainers/go-digest v1.0.0 // indirect
+ github.com/opencontainers/image-spec v1.1.0 // indirect
+ github.com/pelletier/go-toml v1.9.5 // indirect
+ github.com/pkg/errors v0.9.1 // indirect
+ github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
+ github.com/prometheus/client_golang v1.16.0 // indirect
+ github.com/prometheus/client_model v0.4.0 // indirect
+ github.com/prometheus/common v0.42.0 // indirect
+ github.com/prometheus/procfs v0.10.1 // indirect
+ github.com/rivo/uniseg v0.2.0 // indirect
+ github.com/russross/blackfriday/v2 v2.1.0 // indirect
+ github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect
+ github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002 // indirect
+ github.com/shibumi/go-pathspec v1.3.0 // indirect
+ github.com/shirou/gopsutil/v3 v3.23.12 // indirect
+ github.com/shoenig/go-m1cpu v0.1.6 // indirect
+ github.com/sirupsen/logrus v1.9.3 // indirect
+ github.com/spf13/cobra v1.8.0 // indirect
+ github.com/spf13/pflag v1.0.5 // indirect
+ github.com/testcontainers/testcontainers-go v0.30.0 // indirect
+ github.com/testcontainers/testcontainers-go/modules/postgres v0.30.0 // indirect
+ github.com/theupdateframework/notary v0.7.0 // indirect
+ github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 // indirect
+ github.com/tklauser/go-sysconf v0.3.12 // indirect
+ github.com/tklauser/numcpus v0.6.1 // indirect
+ github.com/tonistiigi/fsutil v0.0.0-20230825212630-f09800878302 // indirect
+ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea // indirect
+ github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 // indirect
+ github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
+ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
+ github.com/xeipuuv/gojsonschema v1.2.0 // indirect
+ github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 // indirect
+ github.com/yusufpapurcu/wmi v1.2.3 // indirect
+ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.45.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.45.0 // indirect
+ go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 // indirect
+ go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 // indirect
+ go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.25.0 // indirect
+ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0 // indirect
+ go.opentelemetry.io/otel/exporters/prometheus v0.42.0 // indirect
+ go.opentelemetry.io/proto/otlp v1.1.0 // indirect
+ go.uber.org/atomic v1.7.0 // indirect
+ go.uber.org/mock v0.4.0 // indirect
+ golang.org/x/crypto v0.21.0 // indirect
+ golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 // indirect
+ golang.org/x/mod v0.16.0 // indirect
+ golang.org/x/net v0.23.0 // indirect
+ golang.org/x/oauth2 v0.17.0 // indirect
+ golang.org/x/sync v0.6.0 // indirect
+ golang.org/x/sys v0.18.0 // indirect
+ golang.org/x/term v0.18.0 // indirect
+ golang.org/x/text v0.14.0 // indirect
+ golang.org/x/time v0.3.0 // indirect
+ golang.org/x/tools v0.18.0 // indirect
+ google.golang.org/appengine v1.6.8 // indirect
+ google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda // indirect
+ google.golang.org/grpc v1.63.0 // indirect
+ google.golang.org/protobuf v1.33.0 // indirect
+ gopkg.in/inf.v0 v0.9.1 // indirect
+ gopkg.in/yaml.v2 v2.4.0 // indirect
+ k8s.io/api v0.26.7 // indirect
+ k8s.io/apimachinery v0.26.7 // indirect
+ k8s.io/apiserver v0.26.7 // indirect
+ k8s.io/client-go v0.26.7 // indirect
+ k8s.io/klog/v2 v2.90.1 // indirect
+ k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280 // indirect
+ k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 // indirect
+ sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 // indirect
+ sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect
+ sigs.k8s.io/yaml v1.3.0 // indirect
+ tags.cncf.io/container-device-interface v0.6.2 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..3697f3c
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,819 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM=
+cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg=
+cloud.google.com/go/compute v1.24.0/go.mod h1:kw1/T+h/+tK2LJK0wiPPx1intgdAM3j/g3hFDlscY40=
+cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
+cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
+dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
+dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
+github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
+github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
+github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0 h1:59MxjQVfjXsBpLy+dbd2/ELV5ofnUkUZBvWSC85sheA=
+github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0/go.mod h1:OahwfttHWG6eJ0clwcfBAHoDI6X/LV/15hx/wlMZSrU=
+github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ=
+github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
+github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
+github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
+github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
+github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
+github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
+github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
+github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8=
+github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w=
+github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s=
+github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w=
+github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
+github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=
+github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
+github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
+github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
+github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 h1:aM1rlcoLz8y5B2r4tTLMiVTrMtpfY0O8EScKJxaSaEc=
+github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092/go.mod h1:rYqSE9HbjzpHTI74vwPvae4ZVYZd1lue2ta6xHPdblA=
+github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
+github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
+github.com/aws/aws-sdk-go-v2 v1.17.6 h1:Y773UK7OBqhzi5VDXMi1zVGsoj+CVHs2eaC2bDsLwi0=
+github.com/aws/aws-sdk-go-v2 v1.17.6/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw=
+github.com/aws/aws-sdk-go-v2/config v1.18.16 h1:4r7gsCu8Ekwl5iJGE/GmspA2UifqySCCkyyyPFeWs3w=
+github.com/aws/aws-sdk-go-v2/config v1.18.16/go.mod h1:XjM6lVbq7UgELp9NjXBrb1DQY/ownlWsvDhEQksemJc=
+github.com/aws/aws-sdk-go-v2/credentials v1.13.16 h1:GgToSxaENX/1zXIGNFfiVk4hxryYJ5Vt4Mh8XLAL7Lc=
+github.com/aws/aws-sdk-go-v2/credentials v1.13.16/go.mod h1:KP7aFJhfwPFgx9aoVYL2nYHjya5WBD98CWaadpgmnpY=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.24 h1:5qyqXASrX2zy5cTnoHHa4N2c3Lc94GH7gjnBP3GwKdU=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.24/go.mod h1:neYVaeKr5eT7BzwULuG2YbLhzWZ22lpjKdCybR7AXrQ=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.30 h1:y+8n9AGDjikyXoMBTRaHHHSaFEB8267ykmvyPodJfys=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.30/go.mod h1:LUBAO3zNXQjoONBKn/kR1y0Q4cj/D02Ts0uHYjcCQLM=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.24 h1:r+Kv+SEJquhAZXaJ7G4u44cIwXV3f8K+N482NNAzJZA=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.24/go.mod h1:gAuCezX/gob6BSMbItsSlMb6WZGV7K2+fWOvk8xBSto=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.3.31 h1:hf+Vhp5WtTdcSdE+yEcUz8L73sAzN0R+0jQv+Z51/mI=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.3.31/go.mod h1:5zUjguZfG5qjhG9/wqmuyHRyUftl2B5Cp6NNxNC6kRA=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.24 h1:c5qGfdbCHav6viBwiyDns3OXqhqAbGjfIB4uVu2ayhk=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.24/go.mod h1:HMA4FZG6fyib+NDo5bpIxX1EhYjrAOveZJY2YR0xrNE=
+github.com/aws/aws-sdk-go-v2/service/sso v1.12.5 h1:bdKIX6SVF3nc3xJFw6Nf0igzS6Ff/louGq8Z6VP/3Hs=
+github.com/aws/aws-sdk-go-v2/service/sso v1.12.5/go.mod h1:vuWiaDB30M/QTC+lI3Wj6S/zb7tpUK2MSYgy3Guh2L0=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.5 h1:xLPZMyuZ4GuqRCIec/zWuIhRFPXh2UOJdLXBSi64ZWQ=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.5/go.mod h1:QjxpHmCwAg0ESGtPQnLIVp7SedTOBMYy+Slr3IfMKeI=
+github.com/aws/aws-sdk-go-v2/service/sts v1.18.6 h1:rIFn5J3yDoeuKCE9sESXqM5POTAhOP1du3bv/qTL+tE=
+github.com/aws/aws-sdk-go-v2/service/sts v1.18.6/go.mod h1:48WJ9l3dwP0GSHWGc5sFGGlCkuA82Mc2xnw+T6Q8aDw=
+github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8=
+github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA=
+github.com/beorn7/perks v0.0.0-20150223135152-b965b613227f/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
+github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
+github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
+github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
+github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
+github.com/bitly/go-hostpool v0.1.0/go.mod h1:4gOCgp6+NZnVqlKyZ/iBZFTAJKembaVENUpMkpg42fw=
+github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
+github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
+github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY=
+github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE=
+github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0 h1:s7+5BfS4WFJoVF9pnB8kBk03S7pZXRdKamnV0FOl5Sc=
+github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=
+github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ=
+github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
+github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0 h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o=
+github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=
+github.com/cedar-policy/cedar-go v0.0.0-20240423170804-f3d86202cb43 h1:mCdHcb1NVpAo0L2+bq4HZ3Iz9q7iJ4PPQgHgvfZ1Crc=
+github.com/cedar-policy/cedar-go v0.0.0-20240423170804-f3d86202cb43/go.mod h1:qZuNWmkhx7pxkYvgmNPcBE4NtfGBF6nmI+bjecaQp14=
+github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
+github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
+github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004 h1:lkAMpLVBDaj17e85keuznYcH5rqI438v41pKcBl4ZxQ=
+github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa h1:jQCWAUqqlij9Pgj2i/PB79y4KOPYVyFYdROxgaCwdTQ=
+github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa/go.mod h1:x/1Gn8zydmfq8dk6e9PdstVsDgu9RuyIIJqAaF//0IM=
+github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE=
+github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4=
+github.com/compose-spec/compose-go/v2 v2.0.0-rc.8.0.20240228111658-a0507e98fe60 h1:NlkpaLBPFr05mNJWVMH7PP4L30gFG6k4z1QpypLUSh8=
+github.com/compose-spec/compose-go/v2 v2.0.0-rc.8.0.20240228111658-a0507e98fe60/go.mod h1:bEPizBkIojlQ20pi2vNluBa58tevvj0Y18oUSHPyfdc=
+github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
+github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw=
+github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw=
+github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U=
+github.com/containerd/containerd v1.7.12 h1:+KQsnv4VnzyxWcfO9mlxxELaoztsDEjOuCMPAuPqgU0=
+github.com/containerd/containerd v1.7.12/go.mod h1:/5OMpE1p0ylxtEUGY8kuCYkDRzJm9NO1TFMWjUpdevk=
+github.com/containerd/continuity v0.4.2 h1:v3y/4Yz5jwnvqPKJJ+7Wf93fyWoCB3F5EclWG023MDM=
+github.com/containerd/continuity v0.4.2/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
+github.com/containerd/fifo v1.1.0 h1:4I2mbh5stb1u6ycIABlBw9zgtlK8viPI9QkQNRQEEmY=
+github.com/containerd/fifo v1.1.0/go.mod h1:bmC4NWMbXlt2EZ0Hc7Fx7QzTFxgPID13eH0Qu+MAb2o=
+github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
+github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
+github.com/containerd/nydus-snapshotter v0.13.1 h1:5XNkCZ9ivLXCcyx3Jbbfh/fntkcls69uBg0x9VE8zlk=
+github.com/containerd/nydus-snapshotter v0.13.1/go.mod h1:XWAz9ytsjBuKPVXDKP3xoMlcSKNsGnjXlEup6DuzUIo=
+github.com/containerd/stargz-snapshotter v0.14.3 h1:OTUVZoPSPs8mGgmQUE1dqw3WX/3nrsmsurW7UPLWl1U=
+github.com/containerd/stargz-snapshotter/estargz v0.14.3 h1:OqlDCK3ZVUO6C3B/5FSkDwbkEETK84kQgEeFwDC+62k=
+github.com/containerd/stargz-snapshotter/estargz v0.14.3/go.mod h1:KY//uOCIkSuNAHhJogcZtrNHdKrA99/FCCRjE3HD36o=
+github.com/containerd/ttrpc v1.2.2 h1:9vqZr0pxwOF5koz6N0N3kJ0zDHokrcPxIR/ZR2YFtOs=
+github.com/containerd/ttrpc v1.2.2/go.mod h1:sIT6l32Ph/H9cvnJsfXM5drIVzTr5A2flTf1G5tYZak=
+github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4=
+github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0=
+github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E=
+github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
+github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4=
+github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
+github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
+github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
+github.com/dhui/dktest v0.4.1 h1:/w+IWuDXVymg3IrRJCHHOkMK10m9aNVMOyD0X12YVTg=
+github.com/dhui/dktest v0.4.1/go.mod h1:DdOqcUpL7vgyP4GlF3X3w7HbSlz8cEQzwewPveYEQbA=
+github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
+github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
+github.com/docker/buildx v0.12.0-rc2.0.20231219140829-617f538cb315 h1:UZxx9xBADdf/9UmSdEUi+pdJoPKpgcf9QUAY5gEIYmY=
+github.com/docker/buildx v0.12.0-rc2.0.20231219140829-617f538cb315/go.mod h1:X8ZHhuW6ncwtoJ36TlU+gyaROTcBkTE01VHYmTStQCE=
+github.com/docker/cli v25.0.4-0.20240305161310-2bf4225ad269+incompatible h1:xhVCHXq+P5LhT31+RuDuk0xXEbEnd50Fr37J1bGuyWg=
+github.com/docker/cli v25.0.4-0.20240305161310-2bf4225ad269+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
+github.com/docker/compose/v2 v2.24.7 h1:1WSo4CVf18tnGJMC6V78jYsAxSDD61ry6L3JwVT+8EI=
+github.com/docker/compose/v2 v2.24.7/go.mod h1:7U3QbXdRJfBylTgkdlrjOg8hWLZqM09mof9DVZ5Fh4E=
+github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
+github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
+github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
+github.com/docker/docker v25.0.5+incompatible h1:UmQydMduGkrD5nQde1mecF/YnSbTOaPeFIeP5C4W+DE=
+github.com/docker/docker v25.0.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
+github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8=
+github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40=
+github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
+github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q=
+github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
+github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
+github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
+github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c h1:+pKlWGMw7gf6bQ+oDZB4KHQFypsfjYlq/C4rfL7D3g8=
+github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
+github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=
+github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8=
+github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=
+github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
+github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
+github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4=
+github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
+github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
+github.com/dvsekhvalnov/jose2go v0.0.0-20170216131308-f21a8cedbbae/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM=
+github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=
+github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
+github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ=
+github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/envoyproxy/protoc-gen-validate v1.0.4 h1:gVPz/FMfvh57HdSJQyvBtF00j8JU4zdyUgIUNhlgg0A=
+github.com/envoyproxy/protoc-gen-validate v1.0.4/go.mod h1:qys6tmnRsYrQqIhm2bvKZH4Blx/1gTIZ2UKVY1M+Yew=
+github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
+github.com/exaring/otelpgx v0.5.4 h1:uytSs8A9/8tpnJ4J8jsusbRtNgP6Cn5npnffCxE2Unk=
+github.com/exaring/otelpgx v0.5.4/go.mod h1:DuRveXIeRNz6VJrMTj2uCBFqiocMx4msCN1mIMmbZUI=
+github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
+github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
+github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw=
+github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc=
+github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
+github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
+github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo=
+github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
+github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
+github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
+github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
+github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
+github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
+github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
+github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
+github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
+github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
+github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
+github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
+github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA=
+github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
+github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
+github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng=
+github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
+github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
+github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
+github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
+github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
+github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
+github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
+github.com/go-sql-driver/mysql v1.3.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
+github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
+github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
+github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 h1:TQcrn6Wq+sKGkpyPvppOz99zsMBaUOKXq6HSv655U1c=
+github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
+github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
+github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
+github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0=
+github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4=
+github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
+github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
+github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
+github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
+github.com/golang-migrate/migrate/v4 v4.17.1 h1:4zQ6iqL6t6AiItphxJctQb3cFqWiSpMnX7wLTPnnYO4=
+github.com/golang-migrate/migrate/v4 v4.17.1/go.mod h1:m8hinFyWBn0SA4QKHuKh175Pm9wjmxj3S2Mia7dbXzM=
+github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93 h1:jc2UWq7CbdszqeH6qu1ougXMIUBfSy8Pbh/anURYbGI=
+github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg=
+github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54=
+github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
+github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
+github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/gorilla/mux v1.7.0/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
+github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
+github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
+github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw=
+github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 h1:Wqo399gCIufwto+VfwCSvsnfGpF/w5E9CNxSwbpD6No=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0/go.mod h1:qmOFXW2epJhM0qSnUUYpldc7gVz2KMQwJ/QYCDIa7XU=
+github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
+github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
+github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
+github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
+github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
+github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
+github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
+github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
+github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
+github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
+github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog=
+github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68=
+github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
+github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
+github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
+github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY=
+github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
+github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
+github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw=
+github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
+github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
+github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
+github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
+github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
+github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw=
+github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
+github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
+github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
+github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8 h1:CZkYfurY6KGhVtlalI4QwQ6T0Cu6iuY3e0x5RLu96WE=
+github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo=
+github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d h1:jRQLvyVGL+iVtDElaEIDdKwpPqUIZJfzkNLV34htpEc=
+github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
+github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
+github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
+github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
+github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
+github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
+github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
+github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U=
+github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
+github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
+github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
+github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
+github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
+github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs=
+github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI=
+github.com/knadh/koanf/parsers/yaml v0.1.0 h1:ZZ8/iGfRLvKSaMEECEBPM1HQslrZADk8fP1XFUxVI5w=
+github.com/knadh/koanf/parsers/yaml v0.1.0/go.mod h1:cvbUDC7AL23pImuQP0oRw/hPuccrNBS2bps8asS0CwY=
+github.com/knadh/koanf/providers/confmap v0.1.0 h1:gOkxhHkemwG4LezxxN8DMOFopOPghxRVp7JbIvdvqzU=
+github.com/knadh/koanf/providers/confmap v0.1.0/go.mod h1:2uLhxQzJnyHKfxG927awZC7+fyHFdQkd697K4MdLnIU=
+github.com/knadh/koanf/providers/env v0.1.0 h1:LqKteXqfOWyx5Ab9VfGHmjY9BvRXi+clwyZozgVRiKg=
+github.com/knadh/koanf/providers/env v0.1.0/go.mod h1:RE8K9GbACJkeEnkl8L/Qcj8p4ZyPXZIQ191HJi44ZaQ=
+github.com/knadh/koanf/providers/file v0.1.0 h1:fs6U7nrV58d3CFAFh8VTde8TM262ObYf3ODrc//Lp+c=
+github.com/knadh/koanf/providers/file v0.1.0/go.mod h1:rjJ/nHQl64iYCtAW2QQnF0eSmDEX/YZ/eNFj5yR6BvA=
+github.com/knadh/koanf/v2 v2.1.1 h1:/R8eXqasSTsmDCsAyYj+81Wteg8AqrV9CP6gvsTsOmM=
+github.com/knadh/koanf/v2 v2.1.1/go.mod h1:4mnTRbZCK+ALuBXHZMjDfG9y714L7TykVnZkXbMU3Es=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
+github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
+github.com/lib/pq v0.0.0-20150723085316-0dad96c0b94f/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
+github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
+github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
+github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
+github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
+github.com/magiconair/properties v1.5.3/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
+github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
+github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
+github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
+github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
+github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
+github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
+github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
+github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk=
+github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
+github.com/mattn/go-sqlite3 v1.6.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
+github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
+github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
+github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
+github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
+github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
+github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
+github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU=
+github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
+github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
+github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
+github.com/mitchellh/mapstructure v0.0.0-20150613213606-2caf8efc9366/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
+github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
+github.com/moby/buildkit v0.13.0-beta1.0.20231219135447-957cb50df991 h1:r80LLQ91uOLxU1ElAvrB1o8oBsph51lPzVnr7t2b200=
+github.com/moby/buildkit v0.13.0-beta1.0.20231219135447-957cb50df991/go.mod h1:6MddWPSL5jxy+W8eMMHWDOfZzzRRKWXPZqajw72YHBc=
+github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
+github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
+github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
+github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
+github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=
+github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c=
+github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g=
+github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI=
+github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
+github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
+github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI=
+github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg=
+github.com/moby/sys/symlink v0.2.0 h1:tk1rOM+Ljp0nFmfOIBtlV3rTDlWOwFRhjEeAhZB0nZc=
+github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs=
+github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg=
+github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU=
+github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
+github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
+github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
+github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg=
+github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=
+github.com/onsi/ginkgo/v2 v2.4.0 h1:+Ig9nvqgS5OBSACXNk15PLdp0U9XPYROt9CFzVdFGIs=
+github.com/onsi/ginkgo/v2 v2.4.0/go.mod h1:iHkDK1fKGcBoEHT5W7YBq4RFWaQulw+caOMkAt4OrFo=
+github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
+github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
+github.com/onsi/gomega v1.23.0 h1:/oxKu9c2HVap+F3PfKort2Hw5DEU+HGlW8n+tguWsys=
+github.com/onsi/gomega v1.23.0/go.mod h1:Z/NWtiqwBrwUt4/2loMmHL63EDLnYHmVbuBpDr2vQAg=
+github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
+github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
+github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
+github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
+github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
+github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
+github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
+github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
+github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU=
+github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec=
+github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU=
+github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
+github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
+github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
+github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
+github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
+github.com/prometheus/client_golang v0.9.0-pre1.0.20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
+github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
+github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
+github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g=
+github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8=
+github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc=
+github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
+github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
+github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY=
+github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU=
+github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
+github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
+github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc=
+github.com/prometheus/common v0.42.0 h1:EKsfXEYo4JpWMHH5cg+KOUWeuJSov1Id8zGR8eeI1YM=
+github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc=
+github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
+github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
+github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
+github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
+github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg=
+github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM=
+github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
+github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE=
+github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs=
+github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002 h1:ka9QPuQg2u4LGipiZGsgkg3rJCo4iIUCy75FddM0GRQ=
+github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
+github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI=
+github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE=
+github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4=
+github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM=
+github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
+github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
+github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
+github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
+github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
+github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
+github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
+github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
+github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
+github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
+github.com/spdx/tools-golang v0.5.1 h1:fJg3SVOGG+eIva9ZUBm/hvyA7PIPVFjRxUKe6fdAgwE=
+github.com/spdx/tools-golang v0.5.1/go.mod h1:/DRDQuBfB37HctM29YtrX1v+bXiVmT2OpQDalRmX9aU=
+github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94 h1:JmfC365KywYwHB946TTiQWEb8kqPY+pybPLoGE9GgVk=
+github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
+github.com/spf13/cobra v0.0.1/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
+github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
+github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
+github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431 h1:XTHrT015sxHyJ5FnQ0AeemSspZWaDq7DoTRW0EVsDCE=
+github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
+github.com/spf13/pflag v1.0.0/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c h1:2EejZtjFjKJGk71ANb+wtFK5EjUzUkEM3R0xnp559xg=
+github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM=
+github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/testcontainers/testcontainers-go v0.30.0 h1:jmn/XS22q4YRrcMwWg0pAwlClzs/abopbsBzrepyc4E=
+github.com/testcontainers/testcontainers-go v0.30.0/go.mod h1:K+kHNGiM5zjklKjgTtcrEetF3uhWbMUyqAQoyoh8Pf0=
+github.com/testcontainers/testcontainers-go/modules/compose v0.30.0 h1:k5ZbLDlO9AGJ5N2GRqVXL3L2gs+ZHXBfTpT2+jFNtgA=
+github.com/testcontainers/testcontainers-go/modules/compose v0.30.0/go.mod h1:+9x1MOKxi1SF+s7iuNxwW0fRQMm4trp6QvZm1fiJdaA=
+github.com/testcontainers/testcontainers-go/modules/postgres v0.30.0 h1:D3HFqpZS90iRGAN7M85DFiuhPfvYvFNnx8urQ6mPAvo=
+github.com/testcontainers/testcontainers-go/modules/postgres v0.30.0/go.mod h1:e1sKxwUOkqzvaqdHl/oV9mUtFmkDPTfBGp0po2tnWQU=
+github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c=
+github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw=
+github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA=
+github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g=
+github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
+github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
+github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
+github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
+github.com/tonistiigi/fsutil v0.0.0-20230825212630-f09800878302 h1:ZT8ibgassurSISJ1Pj26NsM3vY2jxFZn63Nd/TpHmRw=
+github.com/tonistiigi/fsutil v0.0.0-20230825212630-f09800878302/go.mod h1:9kMVqMyQ/Sx2df5LtnGG+nbrmiZzCS7V6gjW3oGHsvI=
+github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
+github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
+github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs=
+github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc=
+github.com/urfave/cli/v2 v2.27.2 h1:6e0H+AkS+zDckwPCUrZkKX38mRaau4nL2uipkJpbkcI=
+github.com/urfave/cli/v2 v2.27.2/go.mod h1:g0+79LmHHATl7DAcHO99smiR/T7uGLw84w8Y42x+4eM=
+github.com/vbatts/tar-split v0.11.3 h1:hLFqsOLQ1SsppQNTMpkpPXClLDfC2A3Zgy9OUU+RVck=
+github.com/vbatts/tar-split v0.11.3/go.mod h1:9QlHN18E+fEH7RdG+QAJJcuya3rqT7eXSTY7wGrAokY=
+github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
+github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
+github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
+github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
+github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
+github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 h1:+qGGcbkzsfDQNPPe9UDgpxAWQrhbbBXOYJFQDq/dtJw=
+github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913/go.mod h1:4aEEwZQutDLsQv2Deui4iYQ6DWTxR14g6m8Wv88+Xqk=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
+github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
+go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
+go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.45.0 h1:RsQi0qJ2imFfCvZabqzM9cNXBG8k6gXMv1A0cXRmH6A=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.45.0/go.mod h1:vsh3ySueQCiKPxFLvjWC4Z135gIa34TQ/NSqkDTZYUM=
+go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.45.0 h1:2ea0IkZBsWH+HA2GkD+7+hRw2u97jzdFyRtXuO14a1s=
+go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.45.0/go.mod h1:4m3RnBBb+7dB9d21y510oO1pdB1V4J6smNf14WXcBFQ=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0 h1:cEPbyTSEHlQR89XVlyo78gqluF8Y3oMeBkXGWzQsfXY=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0/go.mod h1:DKdbWcT4GH1D0Y3Sqt/PFXt2naRKDWtU+eE6oLdFNA8=
+go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k=
+go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0/go.mod h1:UVAO61+umUsHLtYb8KXXRoHtxUkdOPkYidzW3gipRLQ=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.25.0 h1:Wc4hZuYXhVqq+TfRXLXlmNIL/awOanGx8ssq3ciDQxc=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.25.0/go.mod h1:BydOvapRqVEc0DVz27qWBX2jq45Ca5TI9mhZBDIdweY=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.25.0 h1:dT33yIHtmsqpixFsSQPwNeY5drM9wTcoL8h0FWF4oGM=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.25.0/go.mod h1:h95q0LBGh7hlAC08X2DhSeyIG02YQ0UyioTCVAqRPmc=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0 h1:3d+S281UTjM+AbF31XSOYn1qXn3BgIdWl8HNEpx08Jk=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0/go.mod h1:0+KuTDyKL4gjKCF75pHOX4wuzYDUZYfAQdSu43o+Z2I=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.25.0 h1:Mbi5PKN7u322woPa85d7ebZ+SOvEoPvoiBu+ryHWgfA=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.25.0/go.mod h1:e7ciERRhZaOZXVjx5MiL8TK5+Xv7G5Gv5PA2ZDEJdL8=
+go.opentelemetry.io/otel/exporters/prometheus v0.42.0 h1:jwV9iQdvp38fxXi8ZC+lNpxjK16MRcZlpDYvbuO1FiA=
+go.opentelemetry.io/otel/exporters/prometheus v0.42.0/go.mod h1:f3bYiqNqhoPxkvI2LrXqQVC546K7BuRDL/kKuxkujhA=
+go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.25.0 h1:d7nHbdzU84STOiszaOxQ3kw5IwkSmHsU5Muol5/vL4I=
+go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.25.0/go.mod h1:yiPA1iZbb/EHYnODXOxvtKuB0I2hV8ehfLTEWpl7BJU=
+go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.25.0 h1:0vZZdECYzhTt9MKQZ5qQ0V+J3MFu4MQaQ3COfugF+FQ=
+go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.25.0/go.mod h1:e7iXx3HjaSSBXfy9ykVUlupS2Vp7LBIBuT21ousM2Hk=
+go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA=
+go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s=
+go.opentelemetry.io/otel/sdk v1.25.0 h1:PDryEJPC8YJZQSyLY5eqLeafHtG+X7FWnf3aXMtxbqo=
+go.opentelemetry.io/otel/sdk v1.25.0/go.mod h1:oFgzCM2zdsxKzz6zwpTZYLLQsFwc+K0daArPdIhuxkw=
+go.opentelemetry.io/otel/sdk/metric v1.25.0 h1:7CiHOy08LbrxMAp4vWpbiPcklunUshVpAvGBrdDRlGw=
+go.opentelemetry.io/otel/sdk/metric v1.25.0/go.mod h1:LzwoKptdbBBdYfvtGCzGwk6GWMA3aUzBOwtQpR6Nz7o=
+go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM=
+go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I=
+go.opentelemetry.io/proto/otlp v1.1.0 h1:2Di21piLrCqJ3U3eXGCTPHE9R8Nh+0uglSnOyxikMeI=
+go.opentelemetry.io/proto/otlp v1.1.0/go.mod h1:GpBHCBWiqvVLDqmHZsoMM3C5ySeKTC7ej/RNTae6MdY=
+go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
+go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
+go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
+go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
+go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
+go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU=
+go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc=
+go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
+go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
+golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ=
+golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic=
+golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
+golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ=
+golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
+golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210331175145-43e1dd70ce54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
+golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
+golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
+golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
+golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
+golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ=
+golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
+google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY=
+google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo=
+google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0=
+google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda h1:LI5DOvAxUPMv/50agcLLoo+AdWc1irS9Rzz4vPuD1V4=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
+google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
+google.golang.org/grpc v1.63.0 h1:WjKe+dnvABXyPJMD7KDNLxtoGk5tgk+YFWN6cBWjZE8=
+google.golang.org/grpc v1.63.0/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
+google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
+gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
+gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII=
+gopkg.in/cenkalti/backoff.v2 v2.2.1/go.mod h1:S0QdOvT2AlerfSBkp0O+dk+bbIMaNbEmVk876gPCthU=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
+gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
+gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
+gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
+gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1 h1:d4KQkxAaAiRY2h5Zqis161Pv91A37uZyJOx73duwUwM=
+gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1/go.mod h1:WbjuEoo1oadwzQ4apSDU+JTvmllEHtsNHS6y7vFc7iw=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
+gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
+gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+k8s.io/api v0.26.7 h1:Lf4iEBEJb5OFNmawtBfSZV/UNi9riSJ0t1qdhyZqI40=
+k8s.io/api v0.26.7/go.mod h1:Vk9bMadzA49UHPmHB//lX7VRCQSXGoVwfLd3Sc1SSXI=
+k8s.io/apimachinery v0.26.7 h1:590jSBwaSHCAFCqltaEogY/zybFlhGsnLteLpuF2wig=
+k8s.io/apimachinery v0.26.7/go.mod h1:qYzLkrQ9lhrZRh0jNKo2cfvf/R1/kQONnSiyB7NUJU0=
+k8s.io/apiserver v0.26.7 h1:NX/zBZZn4R+Cq6shwyn8Pn8REd0yJJ16dbtv9WkEVEU=
+k8s.io/apiserver v0.26.7/go.mod h1:r0wDRWHI7VL/KlQLTkJJBVGZ3KeNfv+VetlyRtr86xs=
+k8s.io/client-go v0.26.7 h1:hyU9aKHlwVOykgyxzGYkrDSLCc4+mimZVyUJjPyUn1E=
+k8s.io/client-go v0.26.7/go.mod h1:okYjy0jtq6sdeztALDvCh24tg4opOQS1XNvsJlERDAo=
+k8s.io/klog/v2 v2.90.1 h1:m4bYOKall2MmOiRaR1J+We67Do7vm9KiQVlT96lnHUw=
+k8s.io/klog/v2 v2.90.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0=
+k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280 h1:+70TFaan3hfJzs+7VK2o+OGxg8HsuBr/5f6tVAjDu6E=
+k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280/go.mod h1:+Axhij7bCpeqhklhUTe3xmOn6bWxolyZEeyaFpjGtl4=
+k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 h1:kmDqav+P+/5e1i9tFfHq1qcF3sOrDp+YEkVDAHu7Jwk=
+k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
+sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN1p0AC/kzH07hu3NE+k=
+sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0=
+sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE=
+sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E=
+sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
+sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
+tags.cncf.io/container-device-interface v0.6.2 h1:dThE6dtp/93ZDGhqaED2Pu374SOeUkBfuvkLuiTdwzg=
+tags.cncf.io/container-device-interface v0.6.2/go.mod h1:Shusyhjs1A5Na/kqPVLL0KqnHQHuunol9LFeUNkuGVE=
diff --git a/internal/cedar/cedar.go b/internal/cedar/cedar.go
new file mode 100644
index 0000000..8ccd38a
--- /dev/null
+++ b/internal/cedar/cedar.go
@@ -0,0 +1,198 @@
+package cedar
+
+import (
+ "context"
+ _ "embed"
+ "encoding/json"
+ "fmt"
+ "log/slog"
+ "sync"
+
+ "github.com/cedar-policy/cedar-go"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+//go:embed policies.cedar
+var Policies []byte
+
+func UserToEntity(u sophrosyne.User) cedar.Entity {
+ out := cedar.Entity{
+ UID: cedar.EntityUID{Type: u.EntityType(), ID: u.EntityID()},
+ Attributes: cedar.Record{
+ "id": cedar.String(u.ID),
+ "name": cedar.String(u.Name),
+ "email": cedar.String(u.Email),
+ "is_admin": cedar.Boolean(u.IsAdmin),
+ "created_at": cedar.Long(u.CreatedAt.Unix()),
+ "updated_at": cedar.Long(u.UpdatedAt.Unix()),
+ },
+ }
+ if u.DeletedAt != nil {
+ out.Attributes["deleted_at"] = cedar.Long(u.DeletedAt.Unix())
+ }
+ return out
+}
+
+func ProfileToEntity(u sophrosyne.Profile) cedar.Entity {
+ out := cedar.Entity{
+ UID: cedar.EntityUID{Type: u.EntityType(), ID: u.EntityID()},
+ Attributes: cedar.Record{
+ "id": cedar.String(u.ID),
+ "name": cedar.String(u.Name),
+ "created_at": cedar.Long(u.CreatedAt.Unix()),
+ "updated_at": cedar.Long(u.UpdatedAt.Unix()),
+ },
+ }
+ if u.DeletedAt != nil {
+ out.Attributes["deleted_at"] = cedar.Long(u.DeletedAt.Unix())
+ }
+ return out
+}
+
+func CheckToEntity(u sophrosyne.Check) cedar.Entity {
+ out := cedar.Entity{
+ UID: cedar.EntityUID{Type: u.EntityType(), ID: u.EntityID()},
+ Attributes: cedar.Record{
+ "id": cedar.String(u.ID),
+ "name": cedar.String(u.Name),
+ "created_at": cedar.Long(u.CreatedAt.Unix()),
+ "updated_at": cedar.Long(u.UpdatedAt.Unix()),
+ },
+ }
+ if u.DeletedAt != nil {
+ out.Attributes["deleted_at"] = cedar.Long(u.DeletedAt.Unix())
+ }
+ return out
+}
+
+type AuthorizationProvider struct {
+ policySet cedar.PolicySet
+ psMutex *sync.RWMutex
+ logger *slog.Logger
+ userService sophrosyne.UserService
+ profileService sophrosyne.ProfileService
+ checkService sophrosyne.CheckService
+ tracingService sophrosyne.TracingService
+}
+
+func NewAuthorizationProvider(ctx context.Context, logger *slog.Logger, userService sophrosyne.UserService, tracingService sophrosyne.TracingService, profileService sophrosyne.ProfileService, checkService sophrosyne.CheckService) (*AuthorizationProvider, error) {
+ ap := AuthorizationProvider{
+ logger: logger,
+ userService: userService,
+ profileService: profileService,
+ checkService: checkService,
+ tracingService: tracingService,
+ }
+ ap.psMutex = &sync.RWMutex{}
+ err := ap.RefreshPolicies(ctx, Policies)
+ if err != nil {
+ return nil, err
+ }
+ return &ap, nil
+}
+
+func (a *AuthorizationProvider) RefreshPolicies(ctx context.Context, b []byte) error {
+ ps, err := cedar.NewPolicySet("policies.cedar", b)
+ if err != nil {
+ a.logger.DebugContext(ctx, "error refreshing policies", "error", err.Error())
+ return err
+ }
+ a.psMutex.Lock()
+ defer a.psMutex.Unlock()
+ a.policySet = ps
+ return nil
+}
+
+func (a *AuthorizationProvider) fetchEntities(ctx context.Context, req cedar.Request) (cedar.Entities, error) {
+ var principal cedar.Entity
+ var resource cedar.Entity
+
+ pri, err := a.userService.GetUser(ctx, req.Principal.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ if !req.Resource.IsZero() {
+ switch req.Resource.Type {
+ case "User":
+ res, err := a.userService.GetUser(ctx, req.Resource.ID)
+ if err != nil {
+ return nil, err
+ }
+ resource = UserToEntity(res)
+ case "Profile":
+ res, err := a.profileService.GetProfile(ctx, req.Resource.ID)
+ if err != nil {
+ return nil, err
+ }
+ resource = ProfileToEntity(res)
+ case "Check":
+ res, err := a.checkService.GetCheck(ctx, req.Resource.ID)
+ if err != nil {
+ return nil, err
+ }
+ resource = CheckToEntity(res)
+ default:
+ return nil, fmt.Errorf("unknown resource type: %s", req.Resource.Type)
+ }
+
+ }
+
+ principal = UserToEntity(pri)
+
+ entities := cedar.Entities{
+ principal.UID: principal,
+ }
+ if !resource.UID.IsZero() {
+ entities[resource.UID] = resource
+ }
+
+ a.logger.DebugContext(ctx, "fetched entities", "entities", entities)
+
+ return entities, nil
+}
+
+func (a *AuthorizationProvider) IsAuthorized(ctx context.Context, req sophrosyne.AuthorizationRequest) bool {
+ ctx, span := a.tracingService.StartSpan(ctx, "AuthorizationProvider.IsAuthorized")
+ defer span.End()
+ reqCtx, err := contextToRecord(req.Context)
+ if err != nil {
+ a.logger.InfoContext(ctx, "error converting context to record", "error", err.Error())
+ return false
+ }
+
+ cReq := cedar.Request{
+ Principal: cedar.NewEntityUID(req.Principal.EntityType(), req.Principal.EntityID()),
+ Action: cedar.NewEntityUID(req.Action.EntityType(), req.Action.EntityID()),
+ Context: *reqCtx,
+ }
+ if req.Resource != nil {
+ cReq.Resource = cedar.NewEntityUID(req.Resource.EntityType(), req.Resource.EntityID())
+ }
+ entities, err := a.fetchEntities(ctx, cReq)
+ if err != nil {
+ a.logger.InfoContext(ctx, "error fetching entities", "error", err.Error())
+ return false
+ }
+
+ a.psMutex.RLock()
+ defer a.psMutex.RUnlock()
+ a.logger.DebugContext(ctx, "checking authorization", "request", cReq)
+ decision, diag := a.policySet.IsAuthorized(entities, cReq)
+ a.logger.InfoContext(ctx, "authorization decision", "decision", decision, "diag", diag)
+ return decision == cedar.Allow
+}
+
+func contextToRecord(in map[string]interface{}) (*cedar.Record, error) {
+ b, err := json.Marshal(in)
+ if err != nil {
+ return nil, err
+ }
+ var out cedar.Record
+ err = json.Unmarshal(b, &out)
+ if err != nil {
+ return nil, err
+ }
+ return &out, nil
+}
diff --git a/internal/cedar/cedarschema.json b/internal/cedar/cedarschema.json
new file mode 100644
index 0000000..ad4daa0
--- /dev/null
+++ b/internal/cedar/cedarschema.json
@@ -0,0 +1,73 @@
+{
+ "Users": {
+ "entityTypes": {
+ "User": {
+ "shape": {
+ "type": "Record",
+ "attributes": {
+ "id": {
+ "type": "String"
+ },
+ "name": {
+ "type": "String"
+ },
+ "email": {
+ "type": "String"
+ },
+ "is_admin": {
+ "type": "Boolean"
+ },
+ "created_at": {
+ "type": "Long"
+ },
+ "updated_at": {
+ "type": "Long"
+ },
+ "deleted_at": {
+ "type": "Long",
+ "required": false
+ }
+ }
+ }
+ }
+ },
+ "actions": {
+ "GetUser": {
+ "appliesTo": {
+ "principalTypes": [ "User"],
+ "resourceTypes": [ "User" ]
+ }
+ },
+ "GetUsers": {
+ "appliesTo": {
+ "principalTypes": [ "User"],
+ "resourceTypes": [ "User" ]
+ }
+ },
+ "CreateUser": {
+ "appliesTo": {
+ "principalTypes": [ "User"],
+ "resourceTypes": [ "User" ]
+ }
+ },
+ "UpdateUser": {
+ "appliesTo": {
+ "principalTypes": [ "User"],
+ "resourceTypes": [ "User" ]
+ }
+ },
+ "DeleteUser": {
+ "appliesTo": {
+ "principalTypes": [ "User"],
+ "resourceTypes": [ "User" ]
+ }
+ },
+ "RotateToken": {
+ "appliesTo": {
+ "principalTypes": [ "User"],
+ "resourceTypes": [ "User" ]
+ }
+ }
+ }
+ }
+}
diff --git a/internal/cedar/policies.cedar b/internal/cedar/policies.cedar
new file mode 100644
index 0000000..49fcc6c
--- /dev/null
+++ b/internal/cedar/policies.cedar
@@ -0,0 +1,34 @@
+// Initial policy
+permit (
+ principal,
+ action == Action::"Users::RotateToken",
+ resource
+) when {
+ principal.is_admin == true
+ ||
+ principal.id == resource.id
+};
+// Do not allow rotating tokens unless user is admin or the token is theirs
+forbid (
+ principal,
+ action == Action::"Users::RotateToken",
+ resource
+) when {
+ principal.is_admin == false
+ &&
+ principal.id != resource.id
+};
+permit (
+ principal,
+ action,
+ resource
+) when {
+ principal.is_admin == true
+};
+permit (
+ principal,
+ action == Action::"Users::*",
+ resource
+) when {
+ principal.id == resource.id
+};
diff --git a/internal/configProvider/config.go b/internal/configProvider/config.go
new file mode 100644
index 0000000..a395c64
--- /dev/null
+++ b/internal/configProvider/config.go
@@ -0,0 +1,140 @@
+package configProvider
+
+import (
+ "encoding/hex"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/knadh/koanf/parsers/yaml"
+ "github.com/knadh/koanf/providers/confmap"
+ "github.com/knadh/koanf/providers/env"
+ "github.com/knadh/koanf/providers/file"
+ "github.com/knadh/koanf/v2"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+func envExtractor(s string, v string) (string, interface{}) {
+ key := strings.Replace(strings.ToLower(strings.TrimPrefix(s, sophrosyne.ConfigEnvironmentPrefix)), "_", sophrosyne.ConfigDelimiter, -1)
+
+ if strings.Contains(v, " ") {
+ return key, strings.Split(v, " ")
+ }
+
+ if strings.HasPrefix(v, "0x") {
+ b, err := hex.DecodeString(v[2:])
+ if err == nil {
+ return key, b
+ }
+ }
+
+ return key, v
+}
+
+func loadConfig(k *koanf.Koanf, defaultConfig map[string]interface{}, yamlFile koanf.Provider, overwrites map[string]interface{}, secretFiles []string) error {
+ k.Load(confmap.Provider(defaultConfig, sophrosyne.ConfigDelimiter), nil)
+
+ if err := loadYamlConfig(k, yamlFile); err != nil {
+ return err
+ }
+
+ k.Load(env.ProviderWithValue(sophrosyne.ConfigEnvironmentPrefix, sophrosyne.ConfigDelimiter, envExtractor), nil)
+
+ k.Load(confmap.Provider(overwrites, sophrosyne.ConfigDelimiter), nil)
+
+ for _, secretFile := range secretFiles {
+ secret, err := secretFromFile(secretFile)
+ if err != nil {
+ return err
+ }
+ k.Load(confmap.Provider(secret, sophrosyne.ConfigDelimiter), nil)
+ }
+
+ return nil
+}
+
+func loadYamlConfig(k *koanf.Koanf, yamlFile koanf.Provider) error {
+ if err := k.Load(yamlFile, yaml.Parser()); err != nil {
+ return err
+ }
+ return nil
+}
+
+type ConfigProvider struct {
+ k *koanf.Koanf
+ config *sophrosyne.Config
+ validate sophrosyne.Validator
+ mu sync.Mutex
+}
+
+func NewConfigProvider(yamlFilePath string, overwrites map[string]interface{}, secretFiles []string, validator sophrosyne.Validator) (*ConfigProvider, error) {
+ cfgProv := &ConfigProvider{
+ config: &sophrosyne.Config{},
+ k: koanf.New(sophrosyne.ConfigDelimiter),
+ validate: validator,
+ }
+
+ yamlFile := file.Provider(yamlFilePath)
+
+ if err := loadConfig(cfgProv.k, sophrosyne.DefaultConfig, yamlFile, overwrites, secretFiles); err != nil {
+ return nil, err
+ }
+
+ yamlFile.Watch(func(event interface{}, err error) {
+ if err != nil {
+ // Error occurred when watching the file.
+ return
+ }
+ // We have to reload not just the yaml file, but everything else as well.
+ // If we do not, we risk that values that have been removed from the
+ // yaml file are still present in the config.
+ err = loadConfig(cfgProv.k, sophrosyne.DefaultConfig, yamlFile, overwrites, secretFiles)
+ if err != nil {
+ // Error occurred when reloading the yaml file.
+ return
+ }
+ newConf := &sophrosyne.Config{}
+ cfgProv.k.UnmarshalWithConf("", newConf, koanf.UnmarshalConf{Tag: "key"})
+ err = cfgProv.validate.Validate(newConf)
+ if err != nil {
+ // Error occurred when validating the config.
+ return
+ }
+ // Reuse the existing pointer (as this is what the user is already
+ // using) and just copy the new values over.
+ cfgProv.mu.Lock()
+ defer cfgProv.mu.Unlock()
+ *cfgProv.config = *newConf
+ })
+
+ cfgProv.k.UnmarshalWithConf("", cfgProv.config, koanf.UnmarshalConf{Tag: "key"})
+
+ err := cfgProv.validate.Validate(cfgProv.config)
+ if err != nil {
+ return nil, err
+ }
+
+ return cfgProv, nil
+}
+
+func (c *ConfigProvider) Get() *sophrosyne.Config {
+ return c.config
+}
+
+func secretFromFile(filePath string) (map[string]interface{}, error) {
+ fileName := filepath.Base(filePath)
+
+ f := file.Provider(filePath)
+
+ b, err := f.ReadBytes()
+
+ if err != nil {
+ return nil, err
+ }
+
+ out := make(map[string]interface{})
+ out[fileName] = b
+
+ return out, nil
+}
diff --git a/internal/configProvider/config_test.go b/internal/configProvider/config_test.go
new file mode 100644
index 0000000..3fe7203
--- /dev/null
+++ b/internal/configProvider/config_test.go
@@ -0,0 +1,337 @@
+package configProvider
+
+import (
+ "os"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/madsrc/sophrosyne/internal/validator"
+
+ "github.com/knadh/koanf/providers/file"
+ "github.com/knadh/koanf/v2"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+var testYamlFilePath = "testdata/config.yaml"
+var testFilePath = "testdata/test.file"
+var databaseUserKey = "database.user"
+var databasePasswordKey = "database.password"
+var nonExistentYamlFilePath = "testdata/non-existent.yaml"
+var newUserNameString = "new-username"
+var newPasswordString = "new-password 42 c@t"
+var rootConfigYamlPath = "/config.yaml"
+
+func TestSecretFromFile(t *testing.T) {
+ cases := []struct {
+ name string
+ file string
+ keyName string
+ }{
+ {
+ name: "secret file with text",
+ file: testFilePath,
+ keyName: "test.file",
+ },
+ {
+ name: "secret file with binary data",
+ file: "testdata/file.binary",
+ keyName: "file.binary",
+ },
+ {
+ name: "secret file with empty data",
+ file: "testdata/empty.file",
+ keyName: "empty.file",
+ },
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ // Read the raw file for comparison
+ dat, err := os.ReadFile(tc.file)
+ require.NoError(t, err)
+
+ // Read the secret from the file
+ secret, err := secretFromFile(tc.file)
+ require.NoError(t, err)
+
+ // Compare the raw file data with the secret
+ require.Equal(t, dat, secret[tc.keyName])
+ })
+ }
+
+}
+
+func TestSecretFromFileErrors(t *testing.T) {
+ cases := []struct {
+ name string
+ file string
+ }{
+ {
+ name: "empty filepath",
+ file: "",
+ },
+ {
+ name: "non-existent file",
+ file: "testdata/non-existent.file",
+ },
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ // Read the secret from the file
+ secret, err := secretFromFile(tc.file)
+ require.Error(t, err)
+ require.Nil(t, secret)
+ })
+ }
+
+}
+
+func TestEnvExtractor(t *testing.T) {
+ cases := []struct {
+ name string
+ key string
+ val string
+ exp interface{}
+ }{
+ {
+ name: "single value",
+ key: "KEY",
+ val: "value",
+ exp: "value",
+ },
+ {
+ name: "multiple values",
+ key: "KEY",
+ val: "value1 value2 value3",
+ exp: []string{"value1", "value2", "value3"},
+ },
+ {
+ name: "empty value",
+ key: "KEY",
+ val: "",
+ exp: "",
+ },
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ key, val := envExtractor(tc.key, tc.val)
+ require.Equal(t, strings.ToLower(tc.key), key)
+ require.Equal(t, tc.exp, val)
+ })
+ }
+}
+
+func TestLoadYamlConfig(t *testing.T) {
+ k := koanf.New(sophrosyne.ConfigDelimiter)
+ err := loadYamlConfig(k, file.Provider(testYamlFilePath))
+ require.NoError(t, err)
+
+ require.Equal(t, "postgres", k.String(databaseUserKey))
+ require.Equal(t, "postgres", k.String(databasePasswordKey))
+ require.Equal(t, "localhost", k.String("database.host"))
+ require.Equal(t, "5432", k.String("database.port"))
+ require.Equal(t, "postgres", k.String("database.name"))
+}
+
+func TestLoadYamlConfigErrors(t *testing.T) {
+ k := koanf.New(sophrosyne.ConfigDelimiter)
+ err := loadYamlConfig(k, file.Provider(nonExistentYamlFilePath))
+ require.Error(t, err)
+}
+
+func TestLoadConfig(t *testing.T) {
+ defaultConfig := map[string]interface{}{
+ "test.file": "not content",
+ }
+ k := koanf.New(sophrosyne.ConfigDelimiter)
+ err := loadConfig(
+ k,
+ defaultConfig,
+ file.Provider(testYamlFilePath),
+ nil,
+ []string{testFilePath},
+ )
+ require.NoError(t, err)
+
+ require.Equal(t, "postgres", k.String(databaseUserKey))
+ require.Equal(t, "postgres", k.String(databasePasswordKey))
+ require.Equal(t, "localhost", k.String("database.host"))
+ require.Equal(t, "5432", k.String("database.port"))
+ require.Equal(t, "postgres", k.String("database.name"))
+ require.NotEqual(t, "content", k.String("not content"))
+}
+
+func TestLoadConfigErr(t *testing.T) {
+ cases := []struct {
+ name string
+ defaultConf map[string]interface{}
+ yamlFile string
+ secretFiles []string
+ }{
+ {
+ name: "non-existent yaml file",
+ defaultConf: map[string]interface{}{},
+ yamlFile: nonExistentYamlFilePath,
+ secretFiles: []string{},
+ },
+ {
+ name: "non-existent secret file",
+ defaultConf: map[string]interface{}{},
+ yamlFile: testYamlFilePath,
+ secretFiles: []string{"testdata/non-existent.file"},
+ },
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ k := koanf.New(sophrosyne.ConfigDelimiter)
+ err := loadConfig(
+ k,
+ tc.defaultConf,
+ file.Provider(tc.yamlFile),
+ nil,
+ tc.secretFiles,
+ )
+ require.Error(t, err)
+ })
+ }
+}
+
+func TestNewConfigProvider(t *testing.T) {
+ initialPw := "password"
+ yamlContent := []byte(`database:
+ password: ` + initialPw)
+
+ tempDir := t.TempDir()
+ tempFile := tempDir + rootConfigYamlPath
+ err := os.WriteFile(tempFile, yamlContent, 0644)
+ require.NoError(t, err)
+
+ c, err := NewConfigProvider(tempFile, nil, []string{testFilePath}, validator.NewValidator())
+ require.NoError(t, err)
+
+ require.Equal(t, initialPw, c.k.String(databasePasswordKey))
+
+ newYamlContent := []byte(`database:
+ password: ` + newPasswordString)
+ err = os.WriteFile(tempFile, newYamlContent, 0644)
+ require.NoError(t, err)
+
+ // sleep for a 100 milliseconds to allow the file watcher to pick up the
+ // change and reload the config
+ time.Sleep(100 * time.Millisecond)
+
+ assert.Equal(t, newPasswordString, c.k.String(databasePasswordKey))
+
+ badYamlContent := []byte{0x00, 0x01, 0x02}
+ err = os.WriteFile(tempFile, badYamlContent, 0644)
+ require.NoError(t, err)
+
+ time.Sleep(100 * time.Millisecond)
+
+ // The bad yaml content should not have been loaded and thus the previous
+ // value should still be present.
+ assert.Equal(t, newPasswordString, c.k.String(databasePasswordKey))
+}
+
+func TestNewConfigProviderErrorNoYamlFile(t *testing.T) {
+ c, err := NewConfigProvider(nonExistentYamlFilePath, nil, []string{testFilePath}, nil)
+ require.Error(t, err)
+ require.Nil(t, c)
+}
+
+func TestNewConfigProviderErrorUpdateFailValidate(t *testing.T) {
+ tempDir := t.TempDir()
+ // Copy file at testYamlFilePath to tempDir
+ tempFile := tempDir + rootConfigYamlPath
+ yamlContent, err := os.ReadFile(testYamlFilePath)
+ require.NoError(t, err)
+ err = os.WriteFile(tempFile, yamlContent, 0644)
+ require.NoError(t, err)
+
+ c, err := NewConfigProvider(tempFile, nil, []string{testFilePath}, validator.NewValidator())
+ require.NoError(t, err)
+
+ cfg := c.Get()
+ require.NotNil(t, cfg)
+ require.Equal(t, "postgres", cfg.Database.User)
+ require.Equal(t, "postgres", cfg.Database.Password)
+ require.Equal(t, "localhost", cfg.Database.Host)
+ require.Equal(t, 5432, cfg.Database.Port)
+ require.Equal(t, "postgres", cfg.Database.Name)
+
+ newYamlContent := []byte(`database:
+ port: 65536`)
+ err = os.WriteFile(tempFile, newYamlContent, 0644)
+ require.NoError(t, err)
+
+ time.Sleep(100 * time.Millisecond)
+ require.Equal(t, 5432, cfg.Database.Port)
+}
+
+func TestNewConfigProviderErrorValidateYamlFile(t *testing.T) {
+ tempDir := t.TempDir()
+ tempFile := tempDir + rootConfigYamlPath
+ yamlContent := []byte(`database:
+ port: not-a-number`)
+ err := os.WriteFile(tempFile, yamlContent, 0644)
+ require.NoError(t, err)
+
+ c, err := NewConfigProvider(tempFile, nil, []string{testFilePath}, validator.NewValidator())
+ require.Error(t, err)
+ require.Nil(t, c)
+}
+
+func TestConfigProviderGet(t *testing.T) {
+ tempDir := t.TempDir()
+ // Copy file at testYamlFilePath to tempDir
+ tempFile := tempDir + rootConfigYamlPath
+ yamlContent, err := os.ReadFile(testYamlFilePath)
+ require.NoError(t, err)
+ err = os.WriteFile(tempFile, yamlContent, 0644)
+ require.NoError(t, err)
+
+ c, err := NewConfigProvider(tempFile, nil, []string{testFilePath}, validator.NewValidator())
+ require.NoError(t, err)
+
+ cfg := c.Get()
+ require.NotNil(t, cfg)
+ require.Equal(t, "postgres", cfg.Database.User)
+ require.Equal(t, "postgres", cfg.Database.Password)
+ require.Equal(t, "localhost", cfg.Database.Host)
+ require.Equal(t, 5432, cfg.Database.Port)
+ require.Equal(t, "postgres", cfg.Database.Name)
+
+ newYamlContent := []byte(`database:
+ password: ` + newPasswordString)
+ err = os.WriteFile(tempFile, newYamlContent, 0644)
+ require.NoError(t, err)
+
+ time.Sleep(100 * time.Millisecond)
+
+ require.NotNil(t, cfg)
+ // Ensure that the config has been updated
+ require.Equal(t, newPasswordString, cfg.Database.Password)
+ // Ensure that the other values have not changed
+ require.Equal(t, "postgres", cfg.Database.User)
+ require.Equal(t, "localhost", cfg.Database.Host)
+ require.Equal(t, 5432, cfg.Database.Port)
+ require.Equal(t, "postgres", cfg.Database.Name)
+
+ badYamlContent := []byte{0x00, 0x01, 0x02}
+ err = os.WriteFile(tempFile, badYamlContent, 0644)
+ require.NoError(t, err)
+
+ time.Sleep(100 * time.Millisecond)
+
+ // The bad yaml content should not have been loaded and thus the previous
+ // value should still be present.
+ require.NotNil(t, cfg)
+ require.Equal(t, newPasswordString, cfg.Database.Password)
+ require.Equal(t, "postgres", cfg.Database.User)
+ require.Equal(t, "localhost", cfg.Database.Host)
+ require.Equal(t, 5432, cfg.Database.Port)
+ require.Equal(t, "postgres", cfg.Database.Name)
+}
diff --git a/internal/configProvider/testdata/config.yaml b/internal/configProvider/testdata/config.yaml
new file mode 100644
index 0000000..f019bbf
--- /dev/null
+++ b/internal/configProvider/testdata/config.yaml
@@ -0,0 +1,6 @@
+database:
+ user: postgres
+ password: postgres
+ host: localhost
+ port: 5432
+ name: postgres
diff --git a/src/sophrosyne/py.typed b/internal/configProvider/testdata/empty.file
similarity index 100%
rename from src/sophrosyne/py.typed
rename to internal/configProvider/testdata/empty.file
diff --git a/internal/configProvider/testdata/file.binary b/internal/configProvider/testdata/file.binary
new file mode 100644
index 0000000..8cd6141
Binary files /dev/null and b/internal/configProvider/testdata/file.binary differ
diff --git a/internal/configProvider/testdata/security.salt b/internal/configProvider/testdata/security.salt
new file mode 100644
index 0000000..2368725
Binary files /dev/null and b/internal/configProvider/testdata/security.salt differ
diff --git a/internal/configProvider/testdata/security.sitekey b/internal/configProvider/testdata/security.sitekey
new file mode 100644
index 0000000..e07e5e1
Binary files /dev/null and b/internal/configProvider/testdata/security.sitekey differ
diff --git a/internal/configProvider/testdata/test.file b/internal/configProvider/testdata/test.file
new file mode 100644
index 0000000..d95f3ad
--- /dev/null
+++ b/internal/configProvider/testdata/test.file
@@ -0,0 +1 @@
+content
diff --git a/internal/grpc/checks/checks.pb.go b/internal/grpc/checks/checks.pb.go
new file mode 100644
index 0000000..9068578
--- /dev/null
+++ b/internal/grpc/checks/checks.pb.go
@@ -0,0 +1,265 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// protoc-gen-go v1.28.1
+// protoc v5.26.1
+// source: proto/checks/checks.proto
+
+package checks
+
+import (
+ reflect "reflect"
+ sync "sync"
+
+ protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+ protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+)
+
+const (
+ // Verify that this generated code is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+ // Verify that runtime/protoimpl is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+type CheckRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ // Types that are assignable to Check:
+ //
+ // *CheckRequest_Text
+ // *CheckRequest_Image
+ Check isCheckRequest_Check `protobuf_oneof:"check"`
+}
+
+func (x *CheckRequest) Reset() {
+ *x = CheckRequest{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_proto_checks_checks_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *CheckRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CheckRequest) ProtoMessage() {}
+
+func (x *CheckRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_proto_checks_checks_proto_msgTypes[0]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CheckRequest.ProtoReflect.Descriptor instead.
+func (*CheckRequest) Descriptor() ([]byte, []int) {
+ return file_proto_checks_checks_proto_rawDescGZIP(), []int{0}
+}
+
+func (m *CheckRequest) GetCheck() isCheckRequest_Check {
+ if m != nil {
+ return m.Check
+ }
+ return nil
+}
+
+func (x *CheckRequest) GetText() string {
+ if x, ok := x.GetCheck().(*CheckRequest_Text); ok {
+ return x.Text
+ }
+ return ""
+}
+
+func (x *CheckRequest) GetImage() string {
+ if x, ok := x.GetCheck().(*CheckRequest_Image); ok {
+ return x.Image
+ }
+ return ""
+}
+
+type isCheckRequest_Check interface {
+ isCheckRequest_Check()
+}
+
+type CheckRequest_Text struct {
+ Text string `protobuf:"bytes,1,opt,name=text,proto3,oneof"`
+}
+
+type CheckRequest_Image struct {
+ Image string `protobuf:"bytes,2,opt,name=image,proto3,oneof"`
+}
+
+func (*CheckRequest_Text) isCheckRequest_Check() {}
+
+func (*CheckRequest_Image) isCheckRequest_Check() {}
+
+type CheckResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Result bool `protobuf:"varint,1,opt,name=result,proto3" json:"result,omitempty"`
+ Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"`
+}
+
+func (x *CheckResponse) Reset() {
+ *x = CheckResponse{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_proto_checks_checks_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *CheckResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CheckResponse) ProtoMessage() {}
+
+func (x *CheckResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_proto_checks_checks_proto_msgTypes[1]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CheckResponse.ProtoReflect.Descriptor instead.
+func (*CheckResponse) Descriptor() ([]byte, []int) {
+ return file_proto_checks_checks_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *CheckResponse) GetResult() bool {
+ if x != nil {
+ return x.Result
+ }
+ return false
+}
+
+func (x *CheckResponse) GetDetails() string {
+ if x != nil {
+ return x.Details
+ }
+ return ""
+}
+
+var File_proto_checks_checks_proto protoreflect.FileDescriptor
+
+var file_proto_checks_checks_proto_rawDesc = []byte{
+ 0x0a, 0x19, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x2f, 0x63,
+ 0x68, 0x65, 0x63, 0x6b, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x63, 0x68, 0x65,
+ 0x63, 0x6b, 0x73, 0x2e, 0x76, 0x31, 0x22, 0x45, 0x0a, 0x0c, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x16, 0x0a, 0x05,
+ 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x69,
+ 0x6d, 0x61, 0x67, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x22, 0x41, 0x0a,
+ 0x0d, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16,
+ 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06,
+ 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c,
+ 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73,
+ 0x32, 0x4c, 0x0a, 0x0c, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
+ 0x12, 0x3c, 0x0a, 0x05, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x17, 0x2e, 0x63, 0x68, 0x65, 0x63,
+ 0x6b, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65,
+ 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43,
+ 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x2a,
+ 0x5a, 0x28, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6d, 0x61, 0x64,
+ 0x73, 0x72, 0x63, 0x2f, 0x73, 0x6f, 0x70, 0x68, 0x72, 0x6f, 0x73, 0x79, 0x6e, 0x65, 0x2f, 0x67,
+ 0x72, 0x70, 0x63, 0x2f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
+ 0x6f, 0x33,
+}
+
+var (
+ file_proto_checks_checks_proto_rawDescOnce sync.Once
+ file_proto_checks_checks_proto_rawDescData = file_proto_checks_checks_proto_rawDesc
+)
+
+func file_proto_checks_checks_proto_rawDescGZIP() []byte {
+ file_proto_checks_checks_proto_rawDescOnce.Do(func() {
+ file_proto_checks_checks_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_checks_checks_proto_rawDescData)
+ })
+ return file_proto_checks_checks_proto_rawDescData
+}
+
+var file_proto_checks_checks_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
+var file_proto_checks_checks_proto_goTypes = []interface{}{
+ (*CheckRequest)(nil), // 0: checks.v1.CheckRequest
+ (*CheckResponse)(nil), // 1: checks.v1.CheckResponse
+}
+var file_proto_checks_checks_proto_depIdxs = []int32{
+ 0, // 0: checks.v1.CheckService.Check:input_type -> checks.v1.CheckRequest
+ 1, // 1: checks.v1.CheckService.Check:output_type -> checks.v1.CheckResponse
+ 1, // [1:2] is the sub-list for method output_type
+ 0, // [0:1] is the sub-list for method input_type
+ 0, // [0:0] is the sub-list for extension type_name
+ 0, // [0:0] is the sub-list for extension extendee
+ 0, // [0:0] is the sub-list for field type_name
+}
+
+func init() { file_proto_checks_checks_proto_init() }
+func file_proto_checks_checks_proto_init() {
+ if File_proto_checks_checks_proto != nil {
+ return
+ }
+ if !protoimpl.UnsafeEnabled {
+ file_proto_checks_checks_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*CheckRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_proto_checks_checks_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*CheckResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
+ file_proto_checks_checks_proto_msgTypes[0].OneofWrappers = []interface{}{
+ (*CheckRequest_Text)(nil),
+ (*CheckRequest_Image)(nil),
+ }
+ type x struct{}
+ out := protoimpl.TypeBuilder{
+ File: protoimpl.DescBuilder{
+ GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+ RawDescriptor: file_proto_checks_checks_proto_rawDesc,
+ NumEnums: 0,
+ NumMessages: 2,
+ NumExtensions: 0,
+ NumServices: 1,
+ },
+ GoTypes: file_proto_checks_checks_proto_goTypes,
+ DependencyIndexes: file_proto_checks_checks_proto_depIdxs,
+ MessageInfos: file_proto_checks_checks_proto_msgTypes,
+ }.Build()
+ File_proto_checks_checks_proto = out.File
+ file_proto_checks_checks_proto_rawDesc = nil
+ file_proto_checks_checks_proto_goTypes = nil
+ file_proto_checks_checks_proto_depIdxs = nil
+}
diff --git a/internal/grpc/checks/checks_grpc.pb.go b/internal/grpc/checks/checks_grpc.pb.go
new file mode 100644
index 0000000..47b3735
--- /dev/null
+++ b/internal/grpc/checks/checks_grpc.pb.go
@@ -0,0 +1,106 @@
+// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
+// versions:
+// - protoc-gen-go-grpc v1.2.0
+// - protoc v5.26.1
+// source: proto/checks/checks.proto
+
+package checks
+
+import (
+ context "context"
+
+ grpc "google.golang.org/grpc"
+ codes "google.golang.org/grpc/codes"
+ status "google.golang.org/grpc/status"
+)
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+// Requires gRPC-Go v1.32.0 or later.
+const _ = grpc.SupportPackageIsVersion7
+
+// CheckServiceClient is the client API for CheckService service.
+//
+// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
+type CheckServiceClient interface {
+ Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error)
+}
+
+type checkServiceClient struct {
+ cc grpc.ClientConnInterface
+}
+
+func NewCheckServiceClient(cc grpc.ClientConnInterface) CheckServiceClient {
+ return &checkServiceClient{cc}
+}
+
+func (c *checkServiceClient) Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) {
+ out := new(CheckResponse)
+ err := c.cc.Invoke(ctx, "/checks.v1.CheckService/Check", in, out, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// CheckServiceServer is the server API for CheckService service.
+// All implementations must embed UnimplementedCheckServiceServer
+// for forward compatibility
+type CheckServiceServer interface {
+ Check(context.Context, *CheckRequest) (*CheckResponse, error)
+ mustEmbedUnimplementedCheckServiceServer()
+}
+
+// UnimplementedCheckServiceServer must be embedded to have forward compatible implementations.
+type UnimplementedCheckServiceServer struct {
+}
+
+func (UnimplementedCheckServiceServer) Check(context.Context, *CheckRequest) (*CheckResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method Check not implemented")
+}
+func (UnimplementedCheckServiceServer) mustEmbedUnimplementedCheckServiceServer() {}
+
+// UnsafeCheckServiceServer may be embedded to opt out of forward compatibility for this service.
+// Use of this interface is not recommended, as added methods to CheckServiceServer will
+// result in compilation errors.
+type UnsafeCheckServiceServer interface {
+ mustEmbedUnimplementedCheckServiceServer()
+}
+
+func RegisterCheckServiceServer(s grpc.ServiceRegistrar, srv CheckServiceServer) {
+ s.RegisterService(&CheckService_ServiceDesc, srv)
+}
+
+func _CheckService_Check_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(CheckRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(CheckServiceServer).Check(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/checks.v1.CheckService/Check",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(CheckServiceServer).Check(ctx, req.(*CheckRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+// CheckService_ServiceDesc is the grpc.ServiceDesc for CheckService service.
+// It's only intended for direct use with grpc.RegisterService,
+// and not to be introspected or modified (even as a copy)
+var CheckService_ServiceDesc = grpc.ServiceDesc{
+ ServiceName: "checks.v1.CheckService",
+ HandlerType: (*CheckServiceServer)(nil),
+ Methods: []grpc.MethodDesc{
+ {
+ MethodName: "Check",
+ Handler: _CheckService_Check_Handler,
+ },
+ },
+ Streams: []grpc.StreamDesc{},
+ Metadata: "proto/checks/checks.proto",
+}
diff --git a/internal/healthchecker/healthchecker.go b/internal/healthchecker/healthchecker.go
new file mode 100644
index 0000000..9eda8f2
--- /dev/null
+++ b/internal/healthchecker/healthchecker.go
@@ -0,0 +1,32 @@
+package healthchecker
+
+import (
+ "context"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+type HealthCheckService struct {
+ services []sophrosyne.HealthChecker
+}
+
+func NewHealthcheckService(services []sophrosyne.HealthChecker) (*HealthCheckService, error) {
+ return &HealthCheckService{
+ services: services,
+ }, nil
+}
+
+func (h HealthCheckService) UnauthenticatedHealthcheck(ctx context.Context) bool {
+ for _, service := range h.services {
+ ok, _ := service.Health(ctx)
+ if !ok {
+ return false
+ }
+ }
+ return true
+}
+
+func (h HealthCheckService) AuthenticatedHealthcheck(ctx context.Context) ([]byte, error) {
+ //TODO implement me
+ panic("implement me")
+}
diff --git a/internal/http/http.go b/internal/http/http.go
new file mode 100644
index 0000000..9d324f9
--- /dev/null
+++ b/internal/http/http.go
@@ -0,0 +1,142 @@
+// Sophrosyne
+//
+// Copyright (C) 2024 Mads R. Havmand
+//
+// This program is free software: you can redistribute it and/or modify
+//
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+package http
+
+import (
+ "context"
+ "crypto/tls"
+ "fmt"
+ "io"
+ "log"
+ "log/slog"
+ "net"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+type Server struct {
+ appConfig *sophrosyne.Config `validate:"required"`
+ mux *http.ServeMux `validate:"required"`
+ validator sophrosyne.Validator `validate:"required"`
+ middleware []func(http.Handler) http.Handler
+ logger *slog.Logger `validate:"required"`
+ http *http.Server `validate:"required"`
+ tracingService sophrosyne.TracingService `validate:"required"`
+ userService sophrosyne.UserService `validate:"required"`
+}
+
+func NewServer(ctx context.Context, appConfig *sophrosyne.Config, validator sophrosyne.Validator, logger *slog.Logger, tracingService sophrosyne.TracingService, userService sophrosyne.UserService, tlsConfig *tls.Config) (*Server, error) {
+ mux := http.NewServeMux()
+ s := Server{appConfig: appConfig,
+ validator: validator,
+ logger: logger,
+ http: &http.Server{
+ Addr: fmt.Sprintf(":%d", appConfig.Server.Port),
+ Handler: mux,
+ BaseContext: func(_ net.Listener) context.Context { return ctx },
+ ReadTimeout: time.Second,
+ WriteTimeout: 10 * time.Second,
+ TLSConfig: tlsConfig,
+ ErrorLog: log.New(NewSlogLoggerAdapter(logger), "", 0),
+ },
+ mux: mux,
+ tracingService: tracingService,
+ userService: userService,
+ }
+
+ if err := s.validator.Validate(s); err != nil {
+ return nil, err
+ }
+
+ return &s, nil
+}
+
+func (s *Server) Start() error {
+ s.logger.Info("Starting server", "port", s.appConfig.Server.Port)
+ return s.http.ListenAndServeTLS("", "")
+}
+
+func (s *Server) Shutdown(ctx context.Context) error {
+ s.logger.InfoContext(ctx, "Shutting down server")
+ return s.http.Shutdown(ctx)
+}
+
+func (s *Server) Handle(path string, handler http.Handler) {
+ s.mux.Handle(path, handler)
+}
+
+func RPCHandler(logger *slog.Logger, rpcService sophrosyne.RPCServer) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+
+ body, err := io.ReadAll(r.Body) // Find a way to implement a limit on the body size
+ b, err := rpcService.HandleRPCRequest(r.Context(), body)
+ if err != nil {
+ logger.ErrorContext(r.Context(), "error handling rpc request", "error", err)
+ WriteInternalServerError(r.Context(), w, logger)
+ return
+ }
+ WriteResponse(r.Context(), w, http.StatusOK, "application/json", b, logger)
+ })
+}
+
+func HealthcheckHandler(logger *slog.Logger, healthcheckService sophrosyne.HealthCheckService) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ok := healthcheckService.UnauthenticatedHealthcheck(r.Context())
+ if ok {
+ WriteResponse(r.Context(), w, http.StatusOK, "application/json", nil, logger)
+ return
+ }
+ w.Header().Set("Retry-After", "5")
+ WriteResponse(r.Context(), w, http.StatusServiceUnavailable, "application/json", nil, logger)
+ return
+ })
+}
+
+func WriteResponse(ctx context.Context, w http.ResponseWriter, status int, contentType string, data []byte, logger *slog.Logger) {
+ w.Header().Set("Content-Type", contentType)
+ w.WriteHeader(status)
+ _, err := w.Write(data)
+ if err != nil {
+ logger.ErrorContext(ctx, "unable to write response", "error", err)
+ }
+}
+
+func WriteInternalServerError(ctx context.Context, w http.ResponseWriter, logger *slog.Logger) {
+ logger.ErrorContext(ctx, "returning internal server error")
+ WriteResponse(ctx, w, http.StatusInternalServerError, "text/plain", []byte("Internal Server Error"), logger)
+}
+
+// SlogLoggerAdapter adapts a *slog.Logger to implement the Log interface.
+type SlogLoggerAdapter struct {
+ slogLogger *slog.Logger
+}
+
+// NewSlogLoggerAdapter creates a new SlogLoggerAdapter.
+func NewSlogLoggerAdapter(logger *slog.Logger) *SlogLoggerAdapter {
+ return &SlogLoggerAdapter{slogLogger: logger}
+}
+
+// Write implements the Write method of the Log interface.
+func (a *SlogLoggerAdapter) Write(p []byte) (n int, err error) {
+ // Use the slog.Logger to log the message.
+ a.slogLogger.Error("server error", "error", strings.TrimRight(string(p), "\n"))
+ return len(p), nil
+}
diff --git a/internal/http/middleware/middleware.go b/internal/http/middleware/middleware.go
new file mode 100644
index 0000000..ab54641
--- /dev/null
+++ b/internal/http/middleware/middleware.go
@@ -0,0 +1,139 @@
+package middleware
+
+import (
+ "context"
+ "encoding/base64"
+ "log/slog"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/madsrc/sophrosyne"
+
+ ownHttp "github.com/madsrc/sophrosyne/internal/http"
+)
+
+// Middleware to catch panics.
+//
+// When a panic is encountered, a response is returned to the client using
+// [sophrosyne.RespondWithHTTPError] with a [sophrosyne.PanicError].
+//
+// This middleware should be the first middleware in the chain.
+//
+// This middleware does not attempt to log the panic, but relies on the fact
+// that the creation of a [sophrosyne.PanicError] will capture the necessary
+// information, and the [sophrosyne.RespondWithHTTPError] function will ensure the
+// error is handled appropriately.
+func PanicCatcher(logger *slog.Logger, metricService sophrosyne.MetricService, next http.Handler) http.Handler {
+ logger.Debug("Creating PanicCatcher middleware")
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ logger.DebugContext(r.Context(), "Entering PanicCatcher middleware")
+ defer func() {
+ logger.DebugContext(r.Context(), "Executing deferred function in PanicCatcher middleware")
+ if err := recover(); err != nil {
+ metricService.RecordPanic(r.Context())
+ logger.ErrorContext(r.Context(), "Panic encountered", "error", err)
+ ownHttp.WriteInternalServerError(r.Context(), w, logger)
+ }
+ }()
+ next.ServeHTTP(w, r)
+ logger.DebugContext(r.Context(), "Exiting PanicCatcher middleware")
+ })
+
+}
+
+func SetupTracing(tracingService sophrosyne.TracingService, next http.Handler) http.Handler {
+ return tracingService.NewHTTPHandler("incoming HTTP request", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ next.ServeHTTP(w, r)
+ }))
+}
+
+func Authentication(exceptions []string, config *sophrosyne.Config, userService sophrosyne.UserService, logger *slog.Logger, next http.Handler) http.Handler {
+ logger.Debug("Creating Authentication middleware")
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ logger.DebugContext(r.Context(), "Entering Authentication middleware")
+ defer logger.DebugContext(r.Context(), "Exiting Authentication middleware")
+
+ // Check if the request path is in the exceptions list
+ for _, path := range exceptions {
+ if strings.HasPrefix(r.URL.Path, path) {
+ logger.DebugContext(r.Context(), "request path is in authentication exceptions list", "matched_exception_entry", path, "request_path", r.URL.Path)
+ next.ServeHTTP(w, r)
+ return
+ }
+ }
+
+ // Extract authentication header
+ authHeader := r.Header.Get("Authorization")
+ if !strings.HasPrefix(authHeader, "Bearer ") {
+ logger.DebugContext(r.Context(), "unable to extract token from Authorization header", "header", authHeader)
+ logger.InfoContext(r.Context(), "authentication", "result", "failed")
+ ownHttp.WriteResponse(r.Context(), w, http.StatusUnauthorized, "text/plain", nil, logger)
+ return
+ }
+
+ // Extract token
+ token, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(authHeader, "Bearer "))
+ if err != nil {
+ logger.DebugContext(r.Context(), "unable to decode token", "token", token, "error", err)
+ logger.InfoContext(r.Context(), "authentication", "result", "failed")
+ ownHttp.WriteResponse(r.Context(), w, http.StatusUnauthorized, "text/plain", nil, logger)
+ return
+ }
+
+ // Hash the token using ProtectToken
+ hashedToken := sophrosyne.ProtectToken(token, config)
+
+ // Validate token
+ user, err := userService.GetUserByToken(r.Context(), hashedToken)
+ if err != nil {
+ logger.DebugContext(r.Context(), "unable to validate token", "error", err)
+ logger.InfoContext(r.Context(), "authentication", "result", "failed")
+ ownHttp.WriteResponse(r.Context(), w, http.StatusUnauthorized, "text/plain", nil, logger)
+ return
+ }
+ user.Token = []byte{} // Overwrite the token, so we don't leak it into the context
+ ctx := r.Context()
+ ctx = context.WithValue(ctx, sophrosyne.UserContextKey{}, &user)
+ r = r.WithContext(ctx)
+ logger.InfoContext(r.Context(), "authenticated", "result", "success")
+
+ next.ServeHTTP(w, r)
+ })
+}
+
+type responseWrapper struct {
+ http.ResponseWriter
+ status int
+ wroteHeader bool
+}
+
+func wrapResponseWriter(w http.ResponseWriter) *responseWrapper {
+ return &responseWrapper{ResponseWriter: w}
+}
+
+func (w *responseWrapper) WriteHeader(status int) {
+ if w.wroteHeader {
+ return
+ }
+ w.status = status
+ w.ResponseWriter.WriteHeader(status)
+ w.wroteHeader = true
+
+ return
+}
+
+func (w *responseWrapper) Status() int {
+ return w.status
+}
+
+func RequestLogging(logger *slog.Logger, next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ begin := time.Now()
+ defer func() {
+ logger.InfoContext(r.Context(), "request served", "remote", r.RemoteAddr, "method", r.Method, "path", r.URL.Path, "user_agent", r.UserAgent(), "duration_ms", time.Since(begin)+time.Millisecond)
+ }()
+ wrapped := wrapResponseWriter(w)
+ next.ServeHTTP(wrapped, r)
+ })
+}
diff --git a/internal/migrate/migrate.go b/internal/migrate/migrate.go
new file mode 100644
index 0000000..6f7ce8d
--- /dev/null
+++ b/internal/migrate/migrate.go
@@ -0,0 +1,52 @@
+package migrate
+
+import (
+ "embed"
+ "fmt"
+
+ "github.com/golang-migrate/migrate/v4"
+ _ "github.com/golang-migrate/migrate/v4/database/pgx/v5"
+ "github.com/golang-migrate/migrate/v4/source/iofs"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+var ErrNoChange = migrate.ErrNoChange
+
+//go:embed migrations
+var fs embed.FS
+
+type MigrationService struct {
+ migrate *migrate.Migrate
+}
+
+func NewMigrationService(config *sophrosyne.Config) (*MigrationService, error) {
+ d, err := iofs.New(fs, "migrations")
+ if err != nil {
+ return nil, err
+ }
+
+ m, err := migrate.NewWithSourceInstance("iofs", d, fmt.Sprintf("pgx5://%s:%s@%s:%d/%s", config.Database.User, config.Database.Password, config.Database.Host, config.Database.Port, config.Database.Name))
+ if err != nil {
+ return nil, err
+ }
+ return &MigrationService{
+ migrate: m,
+ }, nil
+}
+
+func (m *MigrationService) Up() error {
+ return m.migrate.Up()
+}
+
+func (m *MigrationService) Down() error {
+ return m.migrate.Down()
+}
+
+func (m *MigrationService) Close() (source error, database error) {
+ return m.migrate.Close()
+}
+
+func (m *MigrationService) Versions() (version uint, dirty bool, err error) {
+ return m.migrate.Version()
+}
diff --git a/internal/migrate/migrations/000001_create_xid.down.sql b/internal/migrate/migrations/000001_create_xid.down.sql
new file mode 100644
index 0000000..303ea45
--- /dev/null
+++ b/internal/migrate/migrations/000001_create_xid.down.sql
@@ -0,0 +1,19 @@
+DROP FUNCTION public.xid_counter(_xid public.xid);
+
+DROP FUNCTION public.xid_pid(_xid public.xid);
+
+DROP FUNCTION public.xid_machine(_xid public.xid);
+
+DROP FUNCTION public.xid_time(_xid public.xid);
+
+DROP FUNCTION xid(_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP);
+
+DROP FUNCTION public.xid_decode(_xid public.xid);
+
+DROP FUNCTION public.xid_encode(_id int[]);
+
+DROP FUNCTION public._xid_machine_id();
+
+DROP SEQUENCE public.xid_serial MINVALUE 0 MAXVALUE 16777215 CYCLE;
+
+DROP DOMAIN public.xid;
diff --git a/internal/migrate/migrations/000001_create_xid.up.sql b/internal/migrate/migrations/000001_create_xid.up.sql
new file mode 100644
index 0000000..c0a9bbe
--- /dev/null
+++ b/internal/migrate/migrations/000001_create_xid.up.sql
@@ -0,0 +1,165 @@
+--Copyright 2022 Rasmus Holm
+--
+--Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+--
+--The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+--
+--THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+--ref: https://stackoverflow.com/a/48382296
+DO $$ BEGIN
+ CREATE DOMAIN public.xid AS CHAR(20) CHECK (VALUE ~ '^[a-v0-9]{20}$');
+EXCEPTION
+ WHEN duplicate_object THEN null;
+END $$;
+
+CREATE SEQUENCE IF NOT EXISTS public.xid_serial MINVALUE 0 MAXVALUE 16777215 CYCLE; -- ((255<<16) + (255<<8) + 255))
+
+SELECT setval('xid_serial', (random() * 16777215)::INT); -- ((255<<16) + (255<<8) + 255))
+
+CREATE OR REPLACE FUNCTION public._xid_machine_id()
+ RETURNS INT
+ LANGUAGE plpgsql
+ IMMUTABLE
+AS
+$$
+DECLARE
+BEGIN
+ RETURN (SELECT system_identifier & 16777215 FROM pg_control_system());
+END
+$$;
+
+CREATE OR REPLACE FUNCTION public.xid_encode(_id int[])
+ RETURNS public.xid
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _encoding CHAR(1)[] = '{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v}';
+BEGIN
+ RETURN _encoding[1 + (_id[1] >> 3)]
+ || _encoding[1 + ((_id[2] >> 6) & 31 | (_id[1] << 2) & 31)]
+ || _encoding[1 + ((_id[2] >> 1) & 31)]
+ || _encoding[1 + ((_id[3] >> 4) & 31 | (_id[2] << 4) & 31)]
+ || _encoding[1 + (_id[4] >> 7 | (_id[3] << 1) & 31)]
+ || _encoding[1 + ((_id[4] >> 2) & 31)]
+ || _encoding[1 + (_id[5] >> 5 | (_id[4] << 3) & 31)]
+ || _encoding[1 + (_id[5] & 31)]
+ || _encoding[1 + (_id[6] >> 3)]
+ || _encoding[1 + ((_id[7] >> 6) & 31 | (_id[6] << 2) & 31)]
+ || _encoding[1 + ((_id[7] >> 1) & 31)]
+ || _encoding[1 + ((_id[8] >> 4) & 31 | (_id[7] << 4) & 31)]
+ || _encoding[1 + (_id[9] >> 7 | (_id[8] << 1) & 31)]
+ || _encoding[1 + ((_id[9] >> 2) & 31)]
+ || _encoding[1 + ((_id[10] >> 5) | (_id[9] << 3) & 31)]
+ || _encoding[1 + (_id[10] & 31)]
+ || _encoding[1 + (_id[11] >> 3)]
+ || _encoding[1 + ((_id[12] >> 6) & 31 | (_id[11] << 2) & 31)]
+ || _encoding[1 + ((_id[12] >> 1) & 31)]
+ || _encoding[1 + ((_id[12] << 4) & 31)];
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.xid_decode(_xid public.xid)
+ RETURNS int[]
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _dec int[] = '{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}';
+ _b bytea;
+BEGIN
+ _b := _xid::BYTEA;
+ return ARRAY [
+ ((_dec[get_byte(_b, 0)] << 3) | (_dec[get_byte(_b, 1)] >> 2)) & 255,
+ ((_dec[get_byte(_b, 1)] << 6) | (_dec[get_byte(_b, 2)] << 1) | (_dec[get_byte(_b, 3)] >> 4)) & 255,
+ ((_dec[get_byte(_b, 3)] << 4) | (_dec[get_byte(_b, 4)] >> 1)) & 255,
+ ((_dec[get_byte(_b, 4)] << 7) | (_dec[get_byte(_b, 5)] << 2) | (_dec[get_byte(_b, 6)] >> 3)) & 255,
+ ((_dec[get_byte(_b, 6)] << 5) | (_dec[get_byte(_b, 7)])) & 255,
+ ((_dec[get_byte(_b, 8)] << 3) | (_dec[get_byte(_b, 9)] >> 2)) & 255,
+ ((_dec[get_byte(_b, 9)] << 6) | (_dec[get_byte(_b, 10)] << 1) | (_dec[get_byte(_b, 11)] >> 4)) & 255,
+ ((_dec[get_byte(_b, 11)] << 4) | (_dec[get_byte(_b, 12)] >> 1)) & 255,
+ ((_dec[get_byte(_b, 12)] << 7) | (_dec[get_byte(_b, 13)] << 2) | (_dec[get_byte(_b, 14)] >> 3)) & 255,
+ ((_dec[get_byte(_b, 14)] << 5) | (_dec[get_byte(_b, 15)])) & 255,
+ ((_dec[get_byte(_b, 16)] << 3) | (_dec[get_byte(_b, 17)] >> 2)) & 255,
+ ((_dec[get_byte(_b, 17)] << 6) | (_dec[get_byte(_b, 18)] << 1) | (_dec[get_byte(_b, 19)] >> 4)) & 255
+ ];
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION xid(_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP)
+ RETURNS public.xid
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _t INT;
+ _m INT;
+ _p INT;
+ _c INT;
+BEGIN
+ _t := floor(EXTRACT(epoch FROM _at));
+ _m := _xid_machine_id();
+ _p := pg_backend_pid();
+ _c := nextval('xid_serial')::INT;
+
+ return public.xid_encode(ARRAY [
+ (_t >> 24) & 255, (_t >> 16) & 255, (_t >> 8) & 255 , _t & 255,
+ (_m >> 16) & 255, (_m >> 8) & 255 , _m & 255,
+ (_p >> 8) & 255, _p & 255,
+ (_c >> 16) & 255, (_c >> 8) & 255 , _c & 255
+ ]);
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.xid_time(_xid public.xid)
+ RETURNS TIMESTAMPTZ
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _id int[];
+BEGIN
+ _id := public.xid_decode(_xid);
+ return to_timestamp((_id[1] << 24)::BIGINT + (_id[2] << 16) + (_id[3] << 8) + (_id[4]));
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.xid_machine(_xid public.xid)
+ RETURNS INT[]
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _id int[];
+BEGIN
+ _id := public.xid_decode(_xid);
+ return ARRAY [_id[5], _id[6], _id[7]];
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.xid_pid(_xid public.xid)
+ RETURNS INT
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _id int[];
+BEGIN
+ _id := public.xid_decode(_xid);
+ return (_id[8] << 8) + (_id[9]);
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.xid_counter(_xid public.xid)
+ RETURNS INT
+ LANGUAGE plpgsql
+AS
+$$
+DECLARE
+ _id int[];
+BEGIN
+ _id := public.xid_decode(_xid);
+ return (_id[10] << 16) + (_id[11] << 8) + (_id[12]);
+END;
+$$;
diff --git a/internal/migrate/migrations/000002_create_users_table.down.sql b/internal/migrate/migrations/000002_create_users_table.down.sql
new file mode 100644
index 0000000..c99ddcd
--- /dev/null
+++ b/internal/migrate/migrations/000002_create_users_table.down.sql
@@ -0,0 +1 @@
+DROP TABLE IF EXISTS users;
diff --git a/internal/migrate/migrations/000002_create_users_table.up.sql b/internal/migrate/migrations/000002_create_users_table.up.sql
new file mode 100644
index 0000000..090cebe
--- /dev/null
+++ b/internal/migrate/migrations/000002_create_users_table.up.sql
@@ -0,0 +1,10 @@
+CREATE TABLE IF NOT EXISTS users(
+ id public.xid PRIMARY KEY DEFAULT xid(),
+ name VARCHAR (50) UNIQUE NOT NULL,
+ email VARCHAR (300) UNIQUE NOT NULL,
+ token BYTEA NOT NULL,
+ is_admin BOOLEAN NOT NULL DEFAULT FALSE,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ deleted_at TIMESTAMPTZ
+);
diff --git a/internal/migrate/migrations/000003_checks_and_profiles.down.sql b/internal/migrate/migrations/000003_checks_and_profiles.down.sql
new file mode 100644
index 0000000..7eb9ac4
--- /dev/null
+++ b/internal/migrate/migrations/000003_checks_and_profiles.down.sql
@@ -0,0 +1,3 @@
+DROP TABLE IF EXISTS profiles_checks;
+DROP TABLE IF EXISTS checks;
+DROP TABLE IF EXISTS profiles;
diff --git a/internal/migrate/migrations/000003_checks_and_profiles.up.sql b/internal/migrate/migrations/000003_checks_and_profiles.up.sql
new file mode 100644
index 0000000..50c9557
--- /dev/null
+++ b/internal/migrate/migrations/000003_checks_and_profiles.up.sql
@@ -0,0 +1,23 @@
+CREATE TABLE IF NOT EXISTS profiles(
+ id public.xid PRIMARY KEY DEFAULT xid(),
+ name VARCHAR (50) UNIQUE NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ deleted_at TIMESTAMPTZ
+);
+
+CREATE TABLE IF NOT EXISTS checks(
+ id public.xid PRIMARY KEY DEFAULT xid(),
+ name VARCHAR (50) UNIQUE NOT NULL,
+ upstream_services TEXT[],
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ deleted_at TIMESTAMPTZ
+);
+
+
+CREATE TABLE IF NOT EXISTS profiles_checks(
+ profile_id public.xid REFERENCES profiles (id) ON UPDATE CASCADE ON DELETE CASCADE,
+ check_id public.xid REFERENCES checks (id) ON UPDATE CASCADE,
+ CONSTRAINT profiles_checks_pkey PRIMARY KEY (profile_id, check_id)
+);
diff --git a/internal/migrate/migrations/000004_add_default_profile_to_users.down.sql b/internal/migrate/migrations/000004_add_default_profile_to_users.down.sql
new file mode 100644
index 0000000..4eeeabd
--- /dev/null
+++ b/internal/migrate/migrations/000004_add_default_profile_to_users.down.sql
@@ -0,0 +1,7 @@
+-- Drop the foreign key constraint
+ALTER TABLE users
+ DROP CONSTRAINT IF EXISTS fk_default_profile;
+
+-- Drop the default_profile column
+ALTER TABLE users
+ DROP COLUMN IF EXISTS default_profile;
diff --git a/internal/migrate/migrations/000004_add_default_profile_to_users.up.sql b/internal/migrate/migrations/000004_add_default_profile_to_users.up.sql
new file mode 100644
index 0000000..b9b8a76
--- /dev/null
+++ b/internal/migrate/migrations/000004_add_default_profile_to_users.up.sql
@@ -0,0 +1,7 @@
+-- Step 1: Add the default_profile column
+ALTER TABLE users
+ ADD COLUMN default_profile public.xid;
+
+-- Step 2: Add a foreign key constraint to the default_profile column
+ALTER TABLE users
+ ADD CONSTRAINT fk_default_profile FOREIGN KEY (default_profile) REFERENCES profiles (id);
diff --git a/internal/otel/otel.go b/internal/otel/otel.go
new file mode 100644
index 0000000..a6fe5c5
--- /dev/null
+++ b/internal/otel/otel.go
@@ -0,0 +1,198 @@
+package otel
+
+import (
+ "context"
+ "errors"
+ "net/http"
+ "time"
+
+ "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
+ "go.opentelemetry.io/otel"
+ "go.opentelemetry.io/otel/attribute"
+ "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp"
+ "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp"
+ "go.opentelemetry.io/otel/exporters/stdout/stdoutmetric"
+ "go.opentelemetry.io/otel/exporters/stdout/stdouttrace"
+ "go.opentelemetry.io/otel/metric"
+ "go.opentelemetry.io/otel/propagation"
+ sdkMetric "go.opentelemetry.io/otel/sdk/metric"
+ "go.opentelemetry.io/otel/sdk/resource"
+ sdkTrace "go.opentelemetry.io/otel/sdk/trace"
+ semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
+ "go.opentelemetry.io/otel/trace"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+// SetupOTelSDK bootstraps the OpenTelemetry pipeline.
+// If it does not return an error, make sure to call shutdown for proper cleanup.
+func SetupOTelSDK(ctx context.Context, config *sophrosyne.Config) (shutdown func(context.Context) error, err error) {
+ var shutdownFuncs []func(context.Context) error
+
+ // shutdown calls cleanup functions registered via shutdownFuncs.
+ // The errors from the calls are joined.
+ // Each registered cleanup will be invoked once.
+ shutdown = func(ctx context.Context) error {
+ var err error
+ for _, fn := range shutdownFuncs {
+ err = errors.Join(err, fn(ctx))
+ }
+ shutdownFuncs = nil
+ return err
+ }
+
+ // handleErr calls shutdown for cleanup and makes sure that all errors are returned.
+ handleErr := func(inErr error) {
+ err = errors.Join(inErr, shutdown(ctx))
+ }
+
+ res, err := resource.New(ctx, resource.WithAttributes(
+ semconv.ServiceNameKey.String("sophrosyne"),
+ semconv.ServiceVersionKey.String("0.0.0"),
+ ),
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ // Set up propagator.
+ prop := newPropagator()
+ otel.SetTextMapPropagator(prop)
+
+ if config.Tracing.Enabled {
+ // Set up trace provider.
+ tracerProvider, err := newTraceProvider(ctx, config, res)
+ if err != nil {
+ handleErr(err)
+ return shutdown, err
+ }
+ shutdownFuncs = append(shutdownFuncs, tracerProvider.Shutdown)
+ otel.SetTracerProvider(tracerProvider)
+ }
+
+ if config.Metrics.Enabled {
+ // Set up meter provider.
+ meterProvider, err := newMeterProvider(ctx, config, res)
+ if err != nil {
+ handleErr(err)
+ return shutdown, err
+ }
+ shutdownFuncs = append(shutdownFuncs, meterProvider.Shutdown)
+ otel.SetMeterProvider(meterProvider)
+ }
+
+ return
+}
+
+func newPropagator() propagation.TextMapPropagator {
+ return propagation.NewCompositeTextMapPropagator(
+ propagation.TraceContext{},
+ propagation.Baggage{},
+ )
+}
+
+func newTraceProvider(ctx context.Context, config *sophrosyne.Config, res *resource.Resource) (*sdkTrace.TracerProvider, error) {
+ var traceExporter sdkTrace.SpanExporter
+ var err error
+ if config.Tracing.Output == sophrosyne.OtelOutputHTTP {
+ traceExporter, err = otlptracehttp.New(ctx)
+ } else {
+ traceExporter, err = stdouttrace.New()
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ traceProvider := sdkTrace.NewTracerProvider(
+ sdkTrace.WithBatcher(traceExporter,
+ sdkTrace.WithBatchTimeout(time.Duration(config.Tracing.Batch.Timeout)*time.Second)),
+ sdkTrace.WithResource(res),
+ )
+ return traceProvider, nil
+}
+
+func newMeterProvider(ctx context.Context, config *sophrosyne.Config, res *resource.Resource) (*sdkMetric.MeterProvider, error) {
+ var metricExporter sdkMetric.Exporter
+ var err error
+ if config.Metrics.Output == sophrosyne.OtelOutputHTTP {
+ metricExporter, err = otlpmetrichttp.New(ctx)
+ } else {
+ metricExporter, err = stdoutmetric.New()
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ meterProvider := sdkMetric.NewMeterProvider(
+ sdkMetric.WithReader(sdkMetric.NewPeriodicReader(metricExporter,
+ sdkMetric.WithInterval(time.Duration(config.Metrics.Interval)*time.Second))),
+ sdkMetric.WithResource(res),
+ )
+ return meterProvider, nil
+}
+
+// AttrString is a convenient wrapper around attribute.String.
+//
+// It should be used to set attribute strings on spans.
+//
+// Example:
+//
+// ctx, span := tracer.Start(r.Context(), "internal/v1/users/get-user")
+// defer span.End()
+//
+// span.SetAttributes(otel.AttrString("custom", "value"))
+//
+// This sets the attribute "custom" with the value "value" on the span.
+func AttrString(key, value string) attribute.KeyValue {
+ return attribute.String(key, value)
+}
+
+type Span struct {
+ span trace.Span
+}
+
+func (s *Span) End() {
+ s.span.End()
+}
+
+type OtelService struct {
+ panicMeter metric.Meter
+ panicCnt metric.Int64Counter
+}
+
+func NewOtelService() (*OtelService, error) {
+ panicMeter := otel.Meter("panics")
+ panicCnt, err := panicMeter.Int64Counter("panics",
+ metric.WithDescription("Number of panics"),
+ metric.WithUnit("{{total}}"))
+ if err != nil {
+ return nil, err
+ }
+ return &OtelService{panicMeter: panicMeter, panicCnt: panicCnt}, nil
+}
+
+func (o *OtelService) RecordPanic(ctx context.Context) {
+ o.panicCnt.Add(ctx, 1)
+}
+
+func (o *OtelService) StartSpan(ctx context.Context, name string) (context.Context, sophrosyne.Span) {
+ ctx, span := otel.Tracer("internal/otel").Start(ctx, name)
+ return ctx, &Span{span: span}
+}
+
+func (o *OtelService) GetTraceID(ctx context.Context) string {
+ spanCtx := trace.SpanContextFromContext(ctx)
+ if spanCtx.HasTraceID() {
+ traceID := spanCtx.TraceID()
+ return traceID.String()
+ }
+ return ""
+}
+
+func (o *OtelService) NewHTTPHandler(operation string, handler http.Handler) http.Handler {
+ return otelhttp.NewHandler(handler, operation)
+}
+
+func (o *OtelService) WithRouteTag(pattern string, handler http.Handler) http.Handler {
+ return otelhttp.WithRouteTag(pattern, handler)
+}
diff --git a/internal/pgx/checks.go b/internal/pgx/checks.go
new file mode 100644
index 0000000..c95221b
--- /dev/null
+++ b/internal/pgx/checks.go
@@ -0,0 +1,261 @@
+package pgx
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/url"
+ "time"
+
+ "github.com/jackc/pgx/v5"
+ "github.com/jackc/pgx/v5/pgxpool"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+type checkDbEntry struct {
+ ID string `db:"id"`
+ Name string `db:"name"`
+ UpstreamServices []string `db:"upstream_services"`
+ CreatedAt time.Time `db:"created_at"`
+ UpdatedAt time.Time `db:"updated_at"`
+ DeletedAt *time.Time `db:"deleted_at"`
+ Profiles []string `db:"profiles"`
+}
+
+type CheckService struct {
+ config *sophrosyne.Config
+ pool *pgxpool.Pool
+ logger *slog.Logger
+}
+
+func NewCheckService(ctx context.Context, config *sophrosyne.Config, logger *slog.Logger) (*CheckService, error) {
+ pool, err := newPool(ctx, config, logger)
+ if err != nil {
+ return nil, err
+ }
+ ps := &CheckService{
+ config: config,
+ pool: pool,
+ logger: logger,
+ }
+
+ return ps, nil
+}
+
+func (p *CheckService) nameToID(ctx context.Context, name string) (string, error) {
+ row := p.pool.QueryRow(ctx, `SELECT id FROM checks WHERE name = $1 LIMIT 1`, name)
+ var id string
+ err := row.Scan(&id)
+ if err != nil {
+ return "", err
+ }
+ return id, nil
+}
+
+func (p *CheckService) GetCheck(ctx context.Context, id string) (sophrosyne.Check, error) {
+ p.logger.DebugContext(ctx, "GetCheck", "id", id)
+ var rows pgx.Rows
+ rows, _ = p.pool.Query(ctx, `SELECT p.*,
+ CASE WHEN array_agg(c.name) IS NOT NULL
+ THEN array_remove(array_agg(c.name), NULL)
+ ELSE '{}'::text[]
+ END AS profiles
+FROM checks p
+LEFT JOIN profiles_checks pc ON p.id = pc.check_id
+LEFT JOIN profiles c ON pc.profile_id = c.id AND c.deleted_at IS NULL
+WHERE p.id = $1 AND p.deleted_at IS NULL
+GROUP BY p.id, p.name
+LIMIT 1;`, id)
+ check, err := pgx.CollectOneRow(rows, pgx.RowToStructByName[checkDbEntry])
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return sophrosyne.Check{}, sophrosyne.ErrNotFound
+ }
+ return sophrosyne.Check{}, err
+ }
+
+ var uss []url.URL
+ for _, entry := range check.UpstreamServices {
+ us, err := url.Parse(entry)
+ if err != nil {
+ p.logger.ErrorContext(ctx, "unable to parse upstream service", "entry", entry, "error", err)
+ return sophrosyne.Check{}, err
+ }
+ uss = append(uss, *us)
+ }
+
+ ret := sophrosyne.Check{
+ ID: check.ID,
+ Name: check.Name,
+ UpstreamServices: uss,
+ CreatedAt: check.CreatedAt,
+ UpdatedAt: check.UpdatedAt,
+ DeletedAt: check.DeletedAt,
+ Profiles: make([]sophrosyne.Profile, 0, len(check.Profiles)),
+ }
+ for _, check := range check.Profiles {
+ ret.Profiles = append(ret.Profiles, sophrosyne.Profile{
+ Name: check,
+ })
+ }
+ return ret, nil
+}
+
+func (p *CheckService) GetCheckByName(ctx context.Context, name string) (sophrosyne.Check, error) {
+ id, err := p.nameToID(ctx, name)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+ return p.GetCheck(ctx, id)
+}
+
+func (p *CheckService) GetChecks(ctx context.Context, cursor *sophrosyne.DatabaseCursor) ([]sophrosyne.Check, error) {
+ if cursor == nil {
+ cursor = &sophrosyne.DatabaseCursor{}
+ }
+ p.logger.DebugContext(ctx, "getting checks", "cursor", cursor)
+ rows, err := p.pool.Query(ctx, `SELECT * FROM checks WHERE id > $1 AND deleted_at IS NULL ORDER BY id ASC LIMIT $2`, cursor.Position, p.config.Services.Checks.PageSize+1)
+ checks, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[sophrosyne.Check])
+ if err != nil {
+ return []sophrosyne.Check{}, err
+ }
+ if len(checks) == 0 {
+ cursor.Reset()
+ } else if len(checks) <= p.config.Services.Profiles.PageSize && len(checks) > 0 {
+ cursor.Reset()
+ } else if len(checks) > p.config.Services.Profiles.PageSize {
+ cursor.Advance(checks[len(checks)-2].ID)
+ checks = checks[:len(checks)-1]
+ }
+
+ return checks, nil
+}
+
+func (p *CheckService) CreateCheck(ctx context.Context, check sophrosyne.CreateCheckRequest) (sophrosyne.Check, error) {
+ tx, err := p.pool.Begin(ctx)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+ defer tx.Rollback(ctx)
+
+ rows, _ := tx.Query(ctx, `INSERT INTO checks (name, upstream_services) VALUES ($1, $2) RETURNING *`, check.Name, check.UpstreamServices)
+ retP, err := pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByNameLax[checkDbEntry])
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ var uss []url.URL
+ for _, entry := range check.UpstreamServices {
+ us, err := url.Parse(entry)
+ if err != nil {
+ p.logger.ErrorContext(ctx, "unable to parse upstream service", "entry", entry, "error", err)
+ return sophrosyne.Check{}, err
+ }
+ uss = append(uss, *us)
+ }
+
+ p.logger.DebugContext(ctx, "checking profiles", "profiles", check.Profiles, "count", len(check.Profiles))
+ if len(check.Profiles) > 0 {
+ // translate the list of profile names into check ID's.
+ rows, _ := tx.Query(ctx, `SELECT id from profiles WHERE name = ANY($1) AND deleted_at IS NULL`, check.Profiles)
+ profileIDs, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[sophrosyne.Profile])
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+ p.logger.DebugContext(ctx, "profiles", "profiles", profileIDs)
+ if len(profileIDs) != len(check.Profiles) {
+ return sophrosyne.Check{}, fmt.Errorf("profiles mismatch")
+ }
+
+ var ids []string
+ for _, profileID := range profileIDs {
+ ids = append(ids, profileID.ID)
+ }
+
+ // Insert into the profiles_checks table
+ _, err = tx.Exec(ctx, `INSERT INTO profiles_checks (check_id, profile_id)
+SELECT $1, unnest($2::TEXT[])`, retP.ID, ids)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+ }
+
+ err = tx.Commit(ctx)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ return sophrosyne.Check{
+ ID: retP.ID,
+ Name: retP.Name,
+ Profiles: make([]sophrosyne.Profile, 0, len(check.Profiles)),
+ UpstreamServices: uss,
+ CreatedAt: retP.CreatedAt,
+ UpdatedAt: retP.UpdatedAt,
+ DeletedAt: retP.DeletedAt,
+ }, nil
+
+}
+
+func (p *CheckService) UpdateCheck(ctx context.Context, check sophrosyne.UpdateCheckRequest) (sophrosyne.Check, error) {
+ tx, err := p.pool.Begin(ctx)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+ defer tx.Rollback(ctx)
+
+ rows, _ := tx.Query(ctx, `SELECT id FROM checks WHERE name = $1 AND deleted_at IS NULL`, check.Name)
+ pp, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[sophrosyne.Check])
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ _, err = tx.Exec(ctx, `DELETE FROM profiles_checks
+WHERE check_id = $1 AND profile_id NOT IN (SELECT unnest($2));`, pp.ID, check.Profiles)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ _, err = tx.Exec(ctx, `INSERT INTO profiles_checks (profile_id, check_id)
+SELECT $1, c.profile_id
+FROM unnest($2) AS c(profile_id)
+ON CONFLICT (profile_id, check_id) DO NOTHING;`, pp.ID, check.Profiles)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ rows, _ = tx.Query(ctx, `SELECT c.*
+FROM profiles c
+JOIN profiles_checks pc ON c.id = pc.profile_id
+JOIN checks p ON pc.check_id = p.id
+WHERE p.id = $1
+AND c.name = ANY($2);`, pp.ID, check.Profiles)
+ profiles, err := pgx.CollectRows(rows, pgx.RowToStructByName[sophrosyne.Profile])
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ err = tx.Commit(ctx)
+ if err != nil {
+ return sophrosyne.Check{}, err
+ }
+
+ return sophrosyne.Check{
+ ID: pp.ID,
+ Name: check.Name,
+ Profiles: profiles,
+ }, nil
+}
+
+func (p *CheckService) DeleteCheck(ctx context.Context, name string) error {
+ cmdTag, err := p.pool.Exec(ctx, `UPDATE checks SET deleted_at = NOW() WHERE name = $1 AND deleted_at IS NULL`, name)
+ if err != nil {
+ return err
+ }
+ if cmdTag.RowsAffected() == 0 {
+ return sophrosyne.ErrNotFound
+ }
+ return nil
+}
diff --git a/internal/pgx/pgx.go b/internal/pgx/pgx.go
new file mode 100644
index 0000000..c4d72bf
--- /dev/null
+++ b/internal/pgx/pgx.go
@@ -0,0 +1,278 @@
+package pgx
+
+import (
+ "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "io"
+ "log/slog"
+ "time"
+
+ "github.com/jackc/pgx/v5/pgtype"
+
+ "github.com/exaring/otelpgx"
+ "github.com/jackc/pgx/v5"
+ "github.com/jackc/pgx/v5/pgconn"
+ "github.com/jackc/pgx/v5/pgxpool"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+func newPool(ctx context.Context, config *sophrosyne.Config, logger *slog.Logger) (*pgxpool.Pool, error) {
+ pgxconfig, err := pgxpool.ParseConfig(fmt.Sprintf("postgres://%s:%s@%s:%d/%s", config.Database.User, config.Database.Password, config.Database.Host, config.Database.Port, config.Database.Name))
+ if err != nil {
+ return nil, err
+ }
+ pgxconfig.ConnConfig.Tracer = otelpgx.NewTracer()
+ pgxconfig.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error {
+ logger.DebugContext(ctx, "database connection established")
+ return nil
+ }
+ return pgxpool.NewWithConfig(ctx, pgxconfig)
+}
+
+type UserService struct {
+ config *sophrosyne.Config
+ pool *pgxpool.Pool
+ logger *slog.Logger
+ randomSource io.Reader
+ profileService sophrosyne.ProfileService
+}
+
+func NewUserService(ctx context.Context, config *sophrosyne.Config, logger *slog.Logger, randomSource io.Reader, profileService sophrosyne.ProfileService) (*UserService, error) {
+ pool, err := newPool(ctx, config, logger)
+ if err != nil {
+ return nil, err
+ }
+
+ ue := &UserService{
+ config: config,
+ pool: pool,
+ logger: logger,
+ randomSource: randomSource,
+ profileService: profileService,
+ }
+
+ err = ue.createRootUser(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ return ue, nil
+}
+
+func (s *UserService) getUser(ctx context.Context, column, input any) (sophrosyne.User, error) {
+ type dbret struct {
+ ID string `db:"id"`
+ Name string `db:"name"`
+ Email string `db:"email"`
+ Token []byte `db:"token"`
+ IsAdmin bool `db:"is_admin"`
+ DefaultProfile pgtype.Text `db:"default_profile"`
+ CreatedAt time.Time `db:"created_at"`
+ UpdatedAt time.Time `db:"updated_at"`
+ DeletedAt *time.Time `db:"deleted_at"`
+ }
+ var rows pgx.Rows
+ if column == "email" {
+ rows, _ = s.pool.Query(ctx, "SELECT * FROM users WHERE email = $1 AND deleted_at IS NULL LIMIT 1", input)
+ } else if column == "name" {
+ rows, _ = s.pool.Query(ctx, "SELECT * FROM users WHERE name = $1 AND deleted_at IS NULL LIMIT 1", input)
+ } else if column == "id" {
+ rows, _ = s.pool.Query(ctx, "SELECT * FROM users WHERE id = $1 AND deleted_at IS NULL LIMIT 1", input)
+ } else if column == "token" {
+ rows, _ = s.pool.Query(ctx, "SELECT * FROM users WHERE token = $1 AND deleted_at IS NULL LIMIT 1", input)
+ } else {
+ return sophrosyne.User{}, sophrosyne.NewUnreachableCodeError()
+ }
+ user, err := pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByName[dbret])
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return sophrosyne.User{}, sophrosyne.ErrNotFound
+ }
+ return sophrosyne.User{}, err
+ }
+
+ ret := sophrosyne.User{
+ ID: user.ID,
+ Name: user.Name,
+ Email: user.Email,
+ Token: user.Token,
+ IsAdmin: user.IsAdmin,
+ CreatedAt: user.CreatedAt,
+ UpdatedAt: user.UpdatedAt,
+ DeletedAt: user.DeletedAt,
+ }
+
+ if user.DefaultProfile.String == "" {
+ prof, err := s.profileService.GetProfileByName(ctx, "default")
+ if err != nil {
+ return sophrosyne.User{}, err
+ }
+ ret.DefaultProfile = prof
+ } else {
+ prof, err := s.profileService.GetProfileByName(ctx, user.DefaultProfile.String)
+ if err != nil {
+ return sophrosyne.User{}, err
+ }
+ ret.DefaultProfile = prof
+ }
+
+ return ret, nil
+}
+
+func (s *UserService) GetUser(ctx context.Context, id string) (sophrosyne.User, error) {
+ return s.getUser(ctx, "id", id)
+}
+func (s *UserService) GetUserByEmail(ctx context.Context, email string) (sophrosyne.User, error) {
+ return s.getUser(ctx, "email", email)
+}
+func (s *UserService) GetUserByName(ctx context.Context, name string) (sophrosyne.User, error) {
+ return s.getUser(ctx, "name", name)
+}
+func (s *UserService) GetUserByToken(ctx context.Context, token []byte) (sophrosyne.User, error) {
+ return s.getUser(ctx, "token", token)
+}
+func (s *UserService) GetUsers(ctx context.Context, cursor *sophrosyne.DatabaseCursor) ([]sophrosyne.User, error) {
+ if cursor == nil {
+ cursor = &sophrosyne.DatabaseCursor{}
+ }
+ s.logger.DebugContext(ctx, "getting users", "cursor", cursor)
+ rows, _ := s.pool.Query(ctx, "SELECT * FROM users WHERE id > $1 AND deleted_at IS NULL ORDER BY id ASC LIMIT $2", cursor.Position, s.config.Services.Users.PageSize+1)
+ users, err := pgx.CollectRows(rows, pgx.RowToStructByName[sophrosyne.User])
+ if err != nil {
+ return []sophrosyne.User{}, err
+ }
+ // Advance the cursor
+ if len(users) == 0 {
+ cursor.Reset() // No users were read, so reset the cursor
+ } else if len(users) <= s.config.Services.Users.PageSize && len(users) > 0 {
+ cursor.Reset() // We read all the users, so reset the cursor
+ } else if len(users) > s.config.Services.Users.PageSize {
+ cursor.Advance(users[len(users)-2].ID) // We read one extra user, so set the cursor to the second-to-last user
+ users = users[:len(users)-1] // Remove the last user
+ }
+ return users, nil
+}
+func (s *UserService) CreateUser(ctx context.Context, user sophrosyne.CreateUserRequest) (sophrosyne.User, error) {
+ token, err := sophrosyne.NewToken(s.randomSource)
+ if err != nil {
+ return sophrosyne.User{}, err
+ }
+ tokenHash := sophrosyne.ProtectToken(token, s.config)
+
+ rows, _ := s.pool.Query(ctx, "INSERT INTO users (name, email, token, is_admin) VALUES ($1, $2, $3, $4) RETURNING *", user.Name, user.Email, tokenHash, user.IsAdmin)
+ newUser, err := pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByName[sophrosyne.User])
+ if err != nil {
+ s.logger.DebugContext(ctx, "database returned error", "error", err)
+ var pgErr *pgconn.PgError
+ if errors.As(err, &pgErr) {
+ if pgErr.Code == "23505" {
+ return sophrosyne.User{}, sophrosyne.NewConstraintViolationError(pgErr, pgErr.Code, pgErr.Detail, pgErr.TableName, pgErr.ConstraintName)
+ }
+ }
+ return sophrosyne.User{}, err
+ }
+ newUser.Token = token // ensure returned token is the raw token, not the hashed token
+ return *newUser, nil
+}
+func (s *UserService) UpdateUser(ctx context.Context, user sophrosyne.UpdateUserRequest) (sophrosyne.User, error) {
+ rows, _ := s.pool.Query(ctx, "UPDATE users SET email = $1, is_admin = $2 WHERE name = $3 AND deleted_at IS NULL RETURNING *", user.Email, user.IsAdmin, user.Name)
+ updatedUser, err := pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByName[sophrosyne.User])
+ if err != nil {
+ s.logger.DebugContext(ctx, "database returned error", "error", err)
+ var pgErr *pgconn.PgError
+ if errors.As(err, &pgErr) {
+ if pgErr.Code == "23505" {
+ return sophrosyne.User{}, sophrosyne.NewConstraintViolationError(pgErr, pgErr.Code, pgErr.Detail, pgErr.TableName, pgErr.ConstraintName)
+ }
+ }
+ return sophrosyne.User{}, err
+ }
+ return *updatedUser, nil
+}
+func (s *UserService) DeleteUser(ctx context.Context, name string) error {
+ cmdTag, err := s.pool.Exec(ctx, "UPDATE users SET deleted_at = NOW() WHERE name = $1 AND deleted_at IS NULL", name)
+ if err != nil {
+ return err
+ }
+ if cmdTag.RowsAffected() == 0 {
+ return sophrosyne.ErrNotFound
+ }
+ return nil
+}
+
+func (s *UserService) RotateToken(ctx context.Context, name string) ([]byte, error) {
+ token, err := sophrosyne.NewToken(s.randomSource)
+ if err != nil {
+ return nil, err
+ }
+ tokenHash := sophrosyne.ProtectToken(token, s.config)
+
+ cmdTag, err := s.pool.Exec(ctx, "UPDATE users SET token = $1 WHERE name = $2 AND deleted_at IS NULL", tokenHash, name)
+ if err != nil {
+ return nil, err
+ }
+ if cmdTag.RowsAffected() == 0 {
+ return nil, sophrosyne.ErrNotFound
+ }
+ return token, nil
+}
+
+func (s *UserService) Health(ctx context.Context) (bool, []byte) {
+ _, err := s.pool.Exec(ctx, "SELECT 1")
+ if err != nil {
+ s.logger.DebugContext(ctx, "healthcheck database error", "error", err)
+ return false, []byte(`{"users":{"healthy":false}}`)
+ }
+ return true, []byte(`{"users":{"healthy":true}}`)
+}
+
+func (s *UserService) createRootUser(ctx context.Context) error {
+ // Begin transaction
+ tx, err := s.pool.Begin(ctx)
+ if err != nil {
+ return err
+ }
+ defer func() {
+ s.logger.DebugContext(ctx, "rolling back transaction")
+ tx.Rollback(ctx)
+ }()
+ // Check if root user exists and exit early if it does
+ var exists bool
+ err = tx.QueryRow(ctx, "SELECT EXISTS (SELECT 1 FROM users WHERE name = $1 AND email = $2 AND is_admin = true)", s.config.Principals.Root.Name, s.config.Principals.Root.Email).Scan(&exists)
+ if err != nil {
+ return err
+ }
+ s.logger.DebugContext(ctx, "root user existence", "exists", exists)
+ if exists {
+ if !s.config.Principals.Root.Recreate {
+ s.logger.DebugContext(ctx, "root user exists and recreate is false")
+ return nil
+ }
+ }
+ var token []byte
+ if s.config.Development.StaticRootToken == "" {
+ token, err = sophrosyne.NewToken(s.randomSource)
+ } else {
+ token = []byte(s.config.Development.StaticRootToken)
+ }
+
+ if err != nil {
+ return err
+ }
+ s.logger.InfoContext(ctx, "root token", "token", base64.StdEncoding.EncodeToString(token))
+ tokenHash := sophrosyne.ProtectToken(token, s.config)
+ _, err = tx.Exec(ctx, "INSERT INTO users (name, email, token, is_admin) VALUES ($1, $2, $3, true) ON CONFLICT (name) DO UPDATE SET email = $2, token = $3, is_admin = true", s.config.Principals.Root.Name, s.config.Principals.Root.Email, tokenHash)
+ if err != nil {
+ return err
+ }
+ // Commit transaction
+ err = tx.Commit(ctx)
+ if err != nil {
+ return err
+ }
+ return nil
+
+}
diff --git a/internal/pgx/profiles.go b/internal/pgx/profiles.go
new file mode 100644
index 0000000..8544123
--- /dev/null
+++ b/internal/pgx/profiles.go
@@ -0,0 +1,251 @@
+package pgx
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log/slog"
+ "time"
+
+ "github.com/jackc/pgx/v5"
+ "github.com/jackc/pgx/v5/pgxpool"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+type ProfileService struct {
+ config *sophrosyne.Config
+ pool *pgxpool.Pool
+ logger *slog.Logger
+ checkService sophrosyne.CheckService
+}
+
+func NewProfileService(ctx context.Context, config *sophrosyne.Config, logger *slog.Logger, checkService sophrosyne.CheckService) (*ProfileService, error) {
+ pool, err := newPool(ctx, config, logger)
+ if err != nil {
+ return nil, err
+ }
+ ps := &ProfileService{
+ config: config,
+ pool: pool,
+ logger: logger,
+ checkService: checkService,
+ }
+
+ err = ps.createDefaultProfile(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ return ps, nil
+}
+
+func (p *ProfileService) nameToID(ctx context.Context, name string) (string, error) {
+ row := p.pool.QueryRow(ctx, `SELECT id FROM profiles WHERE name = $1 LIMIT 1`, name)
+ var id string
+ err := row.Scan(&id)
+ if err != nil {
+ return "", err
+ }
+ return id, nil
+}
+
+func (p *ProfileService) GetProfile(ctx context.Context, id string) (sophrosyne.Profile, error) {
+ type dbret struct {
+ ID string `db:"id"`
+ Name string `db:"name"`
+ CreatedAt time.Time `db:"created_at"`
+ UpdatedAt time.Time `db:"updated_at"`
+ DeletedAt *time.Time `db:"deleted_at"`
+ Checks []string `db:"checks"`
+ }
+ p.logger.DebugContext(ctx, "GetProfile", "id", id)
+ var rows pgx.Rows
+ rows, _ = p.pool.Query(ctx, `SELECT p.*,
+ CASE WHEN array_agg(c.name) IS NOT NULL
+ THEN array_remove(array_agg(c.name), NULL)
+ ELSE '{}'::text[]
+ END AS checks
+FROM profiles p
+LEFT JOIN profiles_checks pc ON p.id = pc.profile_id
+LEFT JOIN checks c ON pc.check_id = c.id AND c.deleted_at IS NULL
+WHERE p.id = $1 AND p.deleted_at IS NULL
+GROUP BY p.id, p.name
+LIMIT 1;`, id)
+ profile, err := pgx.CollectOneRow(rows, pgx.RowToStructByName[dbret])
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return sophrosyne.Profile{}, sophrosyne.ErrNotFound
+ }
+ return sophrosyne.Profile{}, err
+ }
+
+ ret := sophrosyne.Profile{
+ ID: profile.ID,
+ Name: profile.Name,
+ CreatedAt: profile.CreatedAt,
+ UpdatedAt: profile.UpdatedAt,
+ DeletedAt: profile.DeletedAt,
+ Checks: make([]sophrosyne.Check, 0, len(profile.Checks)),
+ }
+ for _, check := range profile.Checks {
+ c, err := p.checkService.GetCheckByName(ctx, check)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+ ret.Checks = append(ret.Checks, c)
+ }
+ return ret, nil
+}
+
+func (p *ProfileService) GetProfileByName(ctx context.Context, name string) (sophrosyne.Profile, error) {
+ id, err := p.nameToID(ctx, name)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+ return p.GetProfile(ctx, id)
+}
+
+func (p *ProfileService) GetProfiles(ctx context.Context, cursor *sophrosyne.DatabaseCursor) ([]sophrosyne.Profile, error) {
+ if cursor == nil {
+ cursor = &sophrosyne.DatabaseCursor{}
+ }
+ p.logger.DebugContext(ctx, "getting profiles", "cursor", cursor)
+ rows, err := p.pool.Query(ctx, `SELECT * FROM profiles WHERE id > $1 AND deleted_at IS NULL ORDER BY id ASC LIMIT $2`, cursor.Position, p.config.Services.Profiles.PageSize+1)
+ profiles, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[sophrosyne.Profile])
+ if err != nil {
+ return []sophrosyne.Profile{}, err
+ }
+ if len(profiles) == 0 {
+ cursor.Reset()
+ } else if len(profiles) <= p.config.Services.Profiles.PageSize && len(profiles) > 0 {
+ cursor.Reset()
+ } else if len(profiles) > p.config.Services.Profiles.PageSize {
+ cursor.Advance(profiles[len(profiles)-2].ID)
+ profiles = profiles[:len(profiles)-1]
+ }
+
+ return profiles, nil
+}
+
+func (p *ProfileService) CreateProfile(ctx context.Context, profile sophrosyne.CreateProfileRequest) (sophrosyne.Profile, error) {
+ tx, err := p.pool.Begin(ctx)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+ defer tx.Rollback(ctx)
+
+ rows, _ := tx.Query(ctx, `INSERT INTO profiles (name) VALUES ($1) RETURNING *`, profile.Name)
+ retP, err := pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByNameLax[sophrosyne.Profile])
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ if len(profile.Checks) > 0 {
+ // translate the list of check names into check ID's.
+ rows, _ := tx.Query(ctx, `SELECT id from checks WHERE name IN $1 AND deleted_at IS NULL`, profile.Checks)
+ checkIDs, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[sophrosyne.Check])
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+ if len(checkIDs) != len(profile.Checks) {
+ return sophrosyne.Profile{}, fmt.Errorf("checks mismatch")
+ }
+
+ // Insert into the profiles_checks table
+ _, err = tx.Exec(ctx, `INSERT INTO profiles_checks (profile_id, check_id)
+SELECT $1, unnest($2);`, retP.ID, checkIDs)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+ }
+
+ err = tx.Commit(ctx)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ return *retP, nil
+
+}
+
+func (p *ProfileService) UpdateProfile(ctx context.Context, profile sophrosyne.UpdateProfileRequest) (sophrosyne.Profile, error) {
+ tx, err := p.pool.Begin(ctx)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+ defer tx.Rollback(ctx)
+
+ rows, _ := tx.Query(ctx, `SELECT id FROM profiles WHERE name = $1 AND deleted_at IS NULL`, profile.Name)
+ pp, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[sophrosyne.Profile])
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ _, err = tx.Exec(ctx, `DELETE FROM profiles_checks
+WHERE profile_id = $1 AND check_id NOT IN (SELECT unnest($2));`, pp.ID, profile.Checks)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ _, err = tx.Exec(ctx, `INSERT INTO profiles_checks (profile_id, check_id)
+SELECT $1, c.check_id
+FROM unnest($2) AS c(check_id)
+ON CONFLICT (profile_id, check_id) DO NOTHING;`, pp.ID, profile.Checks)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ rows, _ = tx.Query(ctx, `SELECT c.*
+FROM checks c
+JOIN profiles_checks pc ON c.id = pc.check_id
+JOIN profiles p ON pc.profile_id = p.id
+WHERE p.id = $1
+AND c.name = ANY($2);`, pp.ID, profile.Checks)
+ checks, err := pgx.CollectRows(rows, pgx.RowToStructByName[sophrosyne.Check])
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ err = tx.Commit(ctx)
+ if err != nil {
+ return sophrosyne.Profile{}, err
+ }
+
+ return sophrosyne.Profile{
+ ID: pp.ID,
+ Name: profile.Name,
+ Checks: checks,
+ }, nil
+}
+
+func (p *ProfileService) DeleteProfile(ctx context.Context, name string) error {
+ cmdTag, err := p.pool.Exec(ctx, `UPDATE profiles SET deleted_at = NOW() WHERE name = $1 AND deleted_at IS NULL`, name)
+ if err != nil {
+ return err
+ }
+ if cmdTag.RowsAffected() == 0 {
+ return sophrosyne.ErrNotFound
+ }
+ return nil
+}
+
+func (p *ProfileService) createDefaultProfile(ctx context.Context) error {
+ p.logger.DebugContext(ctx, "creating default profile")
+ defaultProfile := sophrosyne.CreateProfileRequest{
+ Name: "default",
+ }
+ // Check if root user exists and exit early if it does
+ var exists bool
+ err := p.pool.QueryRow(ctx, "SELECT EXISTS (SELECT 1 FROM profiles WHERE name = $1)", "default").Scan(&exists)
+ if err != nil {
+ return err
+ }
+ p.logger.DebugContext(ctx, "default profile existence", "exists", exists)
+ if exists {
+ return nil
+ }
+
+ _, err = p.CreateProfile(ctx, defaultProfile)
+ return err
+}
diff --git a/internal/rpc/internal/jsonrpc/jsonrpc.go b/internal/rpc/internal/jsonrpc/jsonrpc.go
new file mode 100644
index 0000000..3dfc189
--- /dev/null
+++ b/internal/rpc/internal/jsonrpc/jsonrpc.go
@@ -0,0 +1,852 @@
+package jsonrpc
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// optional is a generic type that represents an optional value in JSON. It is used to represent fields that are
+// optional in the JSON-RPC 2.0 specification, such as the "id" field of a [Request].
+//
+// Implementation is from https://stackoverflow.com/questions/36601367/json-field-set-to-null-vs-field-not-there
+type optional[T any] struct {
+ Defined bool
+ Value *T
+}
+
+// UnmarshalJSON is implemented by deferring to the wrapped type (T).
+// It will be called only if the value is defined in the JSON payload.
+func (o *optional[T]) UnmarshalJSON(data []byte) error {
+ o.Defined = true
+ return json.Unmarshal(data, &o.Value)
+}
+
+// JSONRPC represents the jsonrpc field of a [Request], [Notification], or [Response]. For the JSON-RPC 2.0
+// specification, this field MUST be exactly "2.0" and the use of the [JSONRPC2_0] constant is thus recommended.
+type JSONRPC string
+
+const (
+ // JSONRPC2_0 is the identifier for the JSON-RPC 2.0 specification.
+ JSONRPC2_0 JSONRPC = "2.0"
+)
+
+// Method represents the method field of a [Request] or [Notification] as per the JSON-RPC 2.0 specification.
+//
+// A String containing the name of the method to be invoked. Method names that begin with the word rpc followed by a
+// period character (U+002E or ASCII 46) are reserved for rpc-internal methods and extensions and MUST NOT be used for
+// anything else.
+type Method string
+
+// UnmarshalJSON unmarshals a JSON object into a [Method]. If the value is prefixed with "rpc.", it is considered
+// invalid and an error is returned.
+func (m *Method) UnmarshalJSON(data []byte) error {
+ var value string
+ // Ignoring error since this shouldn't error out.
+ _ = json.Unmarshal(data, &value)
+
+ if strings.HasPrefix(value, "rpc.") {
+ return fmt.Errorf("method names that begin with 'rpc.' are reserved for rpc-internal methods and extensions")
+ }
+
+ *m = Method(value)
+ return nil
+}
+
+// Params represents the Params field of a [Request] or [Notification] as per the JSON-RPC 2.0 specification section
+// 4.2.
+//
+// If present, parameters for the rpc call MUST be provided as a Structured value. Either by-position through an Array
+// or by-name through an Object.
+//
+// by-position: Params MUST be an Array, containing the values in the Server expected order.
+// by-name: Params MUST be an Object, with member names that match the Server expected parameter names. The absence of
+// expected names MAY result in an error being generated. The names MUST match exactly, including case, to the method's
+// expected parameters.
+//
+// Implementations include [ParamsObject] and [ParamsArray] to represent the two types of Params.
+type Params interface {
+ // IsParams is a marker method to determine if the struct is a Params object.
+ isParams()
+}
+
+// ParamsObject represents a by-name Params object as per the JSON-RPC 2.0 specification section 4.2.
+//
+// It implements the private [Params] interface, and as such can be used as a value for the Params field of a [Request]
+// or [Notification].
+type ParamsObject map[string]interface{}
+
+// UnmarshalJSON unmarshalls a JSON object into a [ParamsObject]. This is necessary because the JSON-RPC 2.0
+// specification allows for the Params field to be either an object or an array, and the Go JSON unmarshaller cannot
+// unmarshal into an interface{}.
+//
+// If an element is a JSON number, its mathematical value is referenced in order to determine if it is an integer or a
+// float. If it is an integer, it is converted into an int64, otherwise it is converted into a float64. This is in
+// contrast to the Go JSON unmarshaller, which unmarshals all numbers into float64.
+func (p *ParamsObject) UnmarshalJSON(data []byte) error {
+ var obj map[string]*json.RawMessage
+ err := json.Unmarshal(data, &obj)
+ if err != nil {
+ return err
+ }
+
+ *p = make(ParamsObject)
+
+ for key, raw := range obj {
+ var value interface{}
+ // Skipping error check since this shouldn't error out.
+ _ = json.Unmarshal(*raw, &value)
+
+ switch value := value.(type) {
+ case float64:
+ if value == float64(int(value)) {
+ (*p)[key] = int(value)
+ } else {
+ (*p)[key] = value
+ }
+ default:
+ (*p)[key] = value
+ }
+ }
+
+ return nil
+}
+
+func (*ParamsObject) isParams() {}
+
+// ParamsArray represents a by-position Params array as per the JSON-RPC 2.0 specification section 4.2.
+//
+// It implements the private [Params] interface, and as such can be used as a value for the Params field of a [Request]
+// or [Notification].
+type ParamsArray []interface{}
+
+func (*ParamsArray) isParams() {}
+
+// UnmarshalJSON unmarshals a JSON object into a [ParamsArray]. This is necessary because the JSON-RPC 2.0 specification
+// allows for the Params field to be either an object or an array, and the Go JSON unmarshaller cannot unmarshal into an
+// interface{}.
+//
+// If an element is a JSON number, its mathematical value is referenced in order to determine if it is an integer or a
+// float. If it is an integer, it is converted into an int64, otherwise it is converted into a float64. This is in
+// contrast to the Go JSON unmarshaller, which unmarshals all numbers into float64.
+func (p *ParamsArray) UnmarshalJSON(data []byte) error {
+ var arr []json.RawMessage
+ err := json.Unmarshal(data, &arr)
+ if err != nil {
+ return err
+ }
+
+ for _, raw := range arr {
+ var value interface{}
+ // Skipping error check since this shouldn't error out.
+ _ = json.Unmarshal(raw, &value)
+
+ switch value := value.(type) {
+ case float64:
+ if value == float64(int(value)) {
+ *p = append(*p, int(value))
+ } else {
+ *p = append(*p, value)
+ }
+ default:
+ *p = append(*p, value)
+ }
+ }
+
+ return nil
+}
+
+// ID represents the id field of a [Request] or [Response] as per the JSON-RPC 2.0 specification.
+//
+// The specification mandates that the id field MUST contain a String, Number, or Null value. The use of a Null is
+// discouraged, as it is used for Responses with an unknown id and thus can cause confusion. Number should not contain
+// fractions to avoid issues with binary fractions.
+//
+// To simplify the implementation, this library uses a string type for the id field. When marshalling, the value is
+// always marshalled into a string.
+type ID string
+
+// UnmarshalJSON unmarshals a JSON object into an [ID]. If the value is "null", it is unmarshalled into an empty string.
+// If the value is a number, it is unmarshalled into a string.
+func (id *ID) UnmarshalJSON(data []byte) error {
+ if string(data) == "null" {
+ *id = ""
+ return nil
+ }
+
+ var value string
+ err := json.Unmarshal(data, &value)
+ if err != nil {
+ var number float64
+ err = json.Unmarshal(data, &number)
+ if err != nil {
+ if strings.HasPrefix(string(data), `{`) || strings.HasPrefix(string(data), `[`) {
+ return fmt.Errorf("id must be a string, number, or null")
+ }
+ }
+
+ *id = ID(fmt.Sprintf("%v", int(number)))
+ return nil
+ }
+
+ *id = ID(value)
+ return nil
+}
+
+// Request represents a Request object as per the JSON-RPC 2.0 specification.
+//
+// A rpc call is represented by sending a Request object to a Server. The Request object has the following members:
+//
+// jsonrpc
+//
+// A String specifying the version of the JSON-RPC protocol. MUST be exactly "2.0".
+//
+// method
+//
+// A String containing the name of the method to be invoked. Method names that begin with the word rpc followed by a
+// period character (U+002E or ASCII 46) are reserved for rpc-internal methods and extensions and MUST NOT be used for
+// anything else.
+//
+// Params
+//
+// A Structured value that holds the parameter values to be used during the invocation of the method. This member MAY
+// be omitted.
+//
+// id
+//
+// An identifier established by the Client that MUST contain a String, Number, or NULL value if included. If it is not
+// included it is assumed to be a notification. The value SHOULD normally not be Null [1] and Numbers SHOULD NOT
+// contain fractional parts [2]
+//
+// The Server MUST reply with the same value in the [Response] object if included. This member is used to correlate the
+// context between the two objects.
+//
+// [1] The use of Null as a value for the id member in a Request object is discouraged, because this specification uses
+// a value of Null for Responses with an unknown id. Also, because JSON-RPC 1.0 uses an id value of Null for
+// Notifications this could cause confusion in handling.
+//
+// [2] Fractional parts may be problematic, since many decimal fractions cannot be represented exactly as binary
+// fractions.
+type Request struct {
+ isNotification bool
+ Method Method `json:"method" validate:"required"`
+ Params Params `json:"params,omitempty"`
+ ID ID `json:"id"`
+}
+
+func (r Request) IsNotification() bool {
+ return r.isNotification
+}
+
+func (r *Request) AsNotification() *Request {
+ r.isNotification = true
+ return r
+}
+
+// MarshalJSON marshals a [Request] object into a JSON object. The field "jsonrpc" is added and set to the value of
+// [JSONRPC2_0].
+func (r Request) MarshalJSON() ([]byte, error) {
+ if r.isNotification {
+ return json.Marshal(&struct {
+ JSONRPC JSONRPC `json:"jsonrpc"`
+ Method Method `json:"method" validate:"required"`
+ Params Params `json:"params,omitempty"`
+ }{
+ JSONRPC: JSONRPC2_0,
+ Method: r.Method,
+ Params: r.Params,
+ })
+ } else {
+ return json.Marshal(&struct {
+ JSONRPC JSONRPC `json:"jsonrpc"`
+ Method Method `json:"method" validate:"required"`
+ Params Params `json:"params,omitempty"`
+ ID ID `json:"id"`
+ }{
+ JSONRPC: JSONRPC2_0,
+ Method: r.Method,
+ Params: r.Params,
+ ID: r.ID,
+ })
+ }
+}
+
+func (r *Request) UnmarshalJSON(data []byte) error {
+ var dat map[string]*json.RawMessage
+ err := json.Unmarshal(data, &dat)
+ if err != nil {
+ return err
+ }
+
+ v, ok := dat["jsonrpc"]
+ if ok {
+ var version JSONRPC
+ err = json.Unmarshal(*v, &version)
+ if err != nil {
+ return err
+ }
+ if version != JSONRPC2_0 {
+ return fmt.Errorf("invalid JSON-RPC version: %s", version)
+ }
+ } else {
+ return fmt.Errorf("jsonrpc field is required")
+ }
+
+ if _, ok := dat["id"]; ok {
+ err = json.Unmarshal(*dat["id"], &r.ID)
+ if err != nil {
+ return err
+ }
+ } else {
+ r.isNotification = true
+ }
+
+ if _, ok := dat["method"]; ok {
+ err = json.Unmarshal(*dat["method"], &r.Method)
+ if err != nil {
+ return err
+ }
+ }
+
+ if r.Method == "" {
+ return fmt.Errorf("method is required")
+ }
+
+ // decode Params into a ParamsObject if it is an object, otherwise decode it into a ParamsArray.
+ if _, ok := dat["params"]; ok {
+ if dat["params"] != nil {
+ var obj ParamsObject
+ err = json.Unmarshal(*dat["params"], &obj)
+ if err == nil {
+ r.Params = &obj
+ } else {
+ var arr ParamsArray
+ err = json.Unmarshal(*dat["params"], &arr)
+ if err == nil {
+ r.Params = &arr
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+// Response represents a Response object as per the JSON-RPC 2.0 specification.
+//
+// When a rpc call is made, the Server MUST reply with a Response, except for in the case of Notifications. The Response
+// is expressed as a single JSON Object, with the following members:
+//
+// jsonrpc
+//
+// A String specifying the version of the JSON-RPC protocol. MUST be exactly "2.0".
+//
+// result
+//
+// This member is REQUIRED on success.
+// This member MUST NOT exist if there was an error invoking the method.
+// The value of this member is determined by the method invoked on the Server.
+//
+// error
+//
+// This member is REQUIRED on error.
+// This member MUST NOT exist if there was no error triggered during invocation.
+// The value for this member MUST be an Object as defined in section 5.1.
+//
+// id
+//
+// This member is REQUIRED.
+// It MUST be the same as the value of the id member in the Request Object.
+// If there was an error in detecting the id in the Request object (e.g. Parse error/Invalid Request), it MUST be Null.
+//
+// Either the result member or error member MUST be included, but both members MUST NOT be included.
+type Response struct {
+ Result interface{} `json:"result,omitempty"`
+ Error *Error `json:"error,omitempty" validate:"required_without=Result,excluded_with=Result"`
+ ID ID `json:"id"`
+}
+
+// MarshalJSON marshals a [Response] object into a JSON object. The field "jsonrpc" is added and set to the value of
+// [JSONRPC2_0].
+func (r Response) MarshalJSON() ([]byte, error) {
+ if r.Error != nil {
+ r.Result = nil // Error takes precedence over Result
+ }
+
+ if r.Result == nil && r.Error == nil {
+ r.Result = json.RawMessage("null")
+ }
+
+ type Alias Response
+ return json.Marshal(&struct {
+ JSONRPC JSONRPC `json:"jsonrpc"`
+ *Alias
+ }{
+ JSONRPC: JSONRPC2_0,
+ Alias: (*Alias)(&r),
+ })
+}
+
+func (r *Response) UnmarshalJSON(data []byte) error {
+ type temp struct {
+ ID optional[ID] `json:"id"`
+ Result optional[interface{}] `json:"result"`
+ Error optional[Error] `json:"error"`
+ }
+ var dat map[string]*json.RawMessage
+ err := json.Unmarshal(data, &dat)
+ if err != nil {
+ return err
+ }
+
+ v, ok := dat["jsonrpc"]
+ if ok {
+ var version JSONRPC
+ err = json.Unmarshal(*v, &version)
+ if err != nil {
+ return err
+ }
+ if version != JSONRPC2_0 {
+ return fmt.Errorf("invalid JSON-RPC version: %s", version)
+ }
+ } else {
+ return fmt.Errorf("jsonrpc field is required")
+ }
+
+ var tmp temp
+ err = json.Unmarshal(data, &tmp)
+ if err != nil {
+ return err
+ }
+
+ if !tmp.Result.Defined && !tmp.Error.Defined {
+ return fmt.Errorf("either result or error is required")
+ }
+ if tmp.Result.Value == nil && tmp.Error.Value == nil {
+ return fmt.Errorf("either result or error is required")
+ }
+ if tmp.Result.Defined {
+ r.Result = tmp.Result.Value
+ }
+ if tmp.Error.Defined {
+ r.Error = tmp.Error.Value
+
+ }
+
+ if tmp.ID.Defined {
+ if tmp.ID.Value == nil {
+ r.ID = ""
+ } else {
+ r.ID = *tmp.ID.Value
+ }
+ } else {
+ return fmt.Errorf("id is required")
+ }
+
+ return nil
+}
+
+// Error represents an Error object as per the JSON-RPC 2.0 specification section 5.1.
+//
+// When a rpc call encounters an error, the [Response] Object MUST contain the error member with a value that is a
+// Object with the following members:
+//
+// code
+//
+// A Number that indicates the error type that occurred.
+// This MUST be an integer.
+//
+// message
+//
+// A String providing a short description of the error.
+// The message SHOULD be limited to a concise single sentence.
+//
+// data
+//
+// A Primitive or Structured value that contains additional information about the error.
+// This may be omitted.
+// The value of this member is defined by the Server (e.g. detailed error information, nested errors etc.).
+//
+// The error codes from and including -32768 to -32000 are reserved for pre-defined errors. Any code within this range,
+// but not defined explicitly below is reserved for future use. The error codes are nearly the same as those suggested
+// for XML-RPC at the following url: http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php
+//
+// code message meaning
+// -32700 Parse error Invalid JSON was received by the server. An error occurred on the server while parsing the JSON text.
+// -32600 Invalid Request The JSON sent is not a valid Request object.
+// -32601 Method not found The method does not exist / is not available.
+// -32602 Invalid Params Invalid method parameter(s).
+// -32603 Internal error Internal JSON-RPC error.
+// -32000 to -32099 Server error Reserved for implementation-defined server-errors.
+//
+// The remainder of the space is available for application defined errors.
+type Error struct {
+ Code RPCErrorCode `json:"code" validate:"required"`
+ Message string `json:"message" validate:"required"`
+ Data interface{} `json:"data,omitempty"`
+}
+
+// BatchRequest represents a Batch Request as per the JSON-RPC 2.0 specification.
+//
+// To send several [Request] objects at the same time, the Client MAY send an Array filled with [Request] objects.
+//
+// The Server should respond with an Array containing the corresponding [Response] objects, after all of the batch
+// [Request] objects have been processed. A [Response] object SHOULD exist for each [Request] object, except that there
+// SHOULD NOT be any [Response] objects for notifications. The Server MAY process a batch rpc call as a set of
+// concurrent tasks, processing them in any order and with any width of parallelism.
+//
+// The [Response] objects being returned from a batch call MAY be returned in any order within the Array. The Client
+// SHOULD match contexts between the set of [Request] objects and the resulting set of [Response] objects based on the
+// id member within each Object.
+//
+// If the batch rpc call itself fails to be recognized as an valid JSON or as an Array with at least one value, the
+// response from the Server MUST be a single [Response] object. If there are no [Response] objects contained within the
+// [Response] array as it is to be sent to the client, the server MUST NOT return an empty Array and should return
+// nothing at all.
+type BatchRequest []Request
+
+func (b *BatchRequest) UnmarshalJSON(data []byte) error {
+ var arr []json.RawMessage
+ err := json.Unmarshal(data, &arr)
+ if err != nil {
+ return err
+ }
+
+ type O struct {
+ ID optional[ID] `json:"id"`
+ }
+
+ if arr == nil {
+ return fmt.Errorf("batch request must be an array")
+ }
+
+ var me error
+
+ for i, raw := range arr {
+ var obj O
+ err = json.Unmarshal(raw, &obj)
+ if err != nil {
+ me = errors.Join(fmt.Errorf("error unmarshalling object at index %d: %v", i, err))
+ continue
+ }
+ var req Request
+ if !obj.ID.Defined {
+ // It is a notification
+ req.isNotification = true
+ }
+ err = json.Unmarshal(raw, &req)
+ if err != nil {
+ me = errors.Join(fmt.Errorf("error unmarshalling object at index %d into Request: %v", i, err))
+ } else {
+ *b = append(*b, req)
+ }
+ }
+
+ return me
+}
+
+// BatchResponse represents a Batch Response as per the JSON-RPC 2.0 specification.
+//
+// To send several [Request] objects at the same time, the Client MAY send an Array filled with [Request] objects.
+//
+// The Server should respond with an Array containing the corresponding [Response] objects, after all of the batch
+// [Request] objects have been processed. A [Response] object SHOULD exist for each [Request] object, except that there
+// SHOULD NOT be any [Response] objects for notifications. The Server MAY process a batch rpc call as a set of
+// concurrent tasks, processing them in any order and with any width of parallelism.
+//
+// The [Response] objects being returned from a batch call MAY be returned in any order within the Array. The Client
+// SHOULD match contexts between the set of [Request] objects and the resulting set of [Response] objects based on the
+// id member within each Object.
+//
+// If the batch rpc call itself fails to be recognized as an valid JSON or as an Array with at least one value, the
+// response from the Server MUST be a single [Response] object. If there are no [Response] objects contained within the
+// [Response] array as it is to be sent to the client, the server MUST NOT return an empty Array and should return
+// nothing at all.
+type BatchResponse []Response
+
+// RPCErrorCode represents an error code as per the JSON-RPC 2.0 specification section 5.1.
+type RPCErrorCode int
+
+const (
+ // ParseError signals that an invalid JSON was received by the server and that an error occurred on the server while
+ // parsing the JSON text.
+ ParseError RPCErrorCode = -32700
+ // InvalidRequest signals that the JSON sent is not a valid Request object.
+ InvalidRequest RPCErrorCode = -32600
+ // MethodNotFound signals that the method does not exist / is not available.
+ MethodNotFound RPCErrorCode = -32601
+ // InvalidParams signals that invalid method parameter(s) was given.
+ InvalidParams RPCErrorCode = -32602
+ // InternalError signals an internal JSON-RPC error.
+ InternalError RPCErrorCode = -32603
+ // ServerError0 to ServerError99 are reserved for implementation-defined server-errors.
+ ServerError0 RPCErrorCode = -32000
+ ServerError1 RPCErrorCode = -32001
+ ServerError2 RPCErrorCode = -32002
+ ServerError3 RPCErrorCode = -32003
+ ServerError4 RPCErrorCode = -32004
+ ServerError5 RPCErrorCode = -32005
+ ServerError6 RPCErrorCode = -32006
+ ServerError7 RPCErrorCode = -32007
+ ServerError8 RPCErrorCode = -32008
+ ServerError9 RPCErrorCode = -32009
+ ServerError10 RPCErrorCode = -32010
+ ServerError11 RPCErrorCode = -32011
+ ServerError12 RPCErrorCode = -32012
+ ServerError13 RPCErrorCode = -32013
+ ServerError14 RPCErrorCode = -32014
+ ServerError15 RPCErrorCode = -32015
+ ServerError16 RPCErrorCode = -32016
+ ServerError17 RPCErrorCode = -32017
+ ServerError18 RPCErrorCode = -32018
+ ServerError19 RPCErrorCode = -32019
+ ServerError20 RPCErrorCode = -32020
+ ServerError21 RPCErrorCode = -32021
+ ServerError22 RPCErrorCode = -32022
+ ServerError23 RPCErrorCode = -32023
+ ServerError24 RPCErrorCode = -32024
+ ServerError25 RPCErrorCode = -32025
+ ServerError26 RPCErrorCode = -32026
+ ServerError27 RPCErrorCode = -32027
+ ServerError28 RPCErrorCode = -32028
+ ServerError29 RPCErrorCode = -32029
+ ServerError30 RPCErrorCode = -32030
+ ServerError31 RPCErrorCode = -32031
+ ServerError32 RPCErrorCode = -32032
+ ServerError33 RPCErrorCode = -32033
+ ServerError34 RPCErrorCode = -32034
+ ServerError35 RPCErrorCode = -32035
+ ServerError36 RPCErrorCode = -32036
+ ServerError37 RPCErrorCode = -32037
+ ServerError38 RPCErrorCode = -32038
+ ServerError39 RPCErrorCode = -32039
+ ServerError40 RPCErrorCode = -32040
+ ServerError41 RPCErrorCode = -32041
+ ServerError42 RPCErrorCode = -32042
+ ServerError43 RPCErrorCode = -32043
+ ServerError44 RPCErrorCode = -32044
+ ServerError45 RPCErrorCode = -32045
+ ServerError46 RPCErrorCode = -32046
+ ServerError47 RPCErrorCode = -32047
+ ServerError48 RPCErrorCode = -32048
+ ServerError49 RPCErrorCode = -32049
+ ServerError50 RPCErrorCode = -32050
+ ServerError51 RPCErrorCode = -32051
+ ServerError52 RPCErrorCode = -32052
+ ServerError53 RPCErrorCode = -32053
+ ServerError54 RPCErrorCode = -32054
+ ServerError55 RPCErrorCode = -32055
+ ServerError56 RPCErrorCode = -32056
+ ServerError57 RPCErrorCode = -32057
+ ServerError58 RPCErrorCode = -32058
+ ServerError59 RPCErrorCode = -32059
+ ServerError60 RPCErrorCode = -32060
+ ServerError61 RPCErrorCode = -32061
+ ServerError62 RPCErrorCode = -32062
+ ServerError63 RPCErrorCode = -32063
+ ServerError64 RPCErrorCode = -32064
+ ServerError65 RPCErrorCode = -32065
+ ServerError66 RPCErrorCode = -32066
+ ServerError67 RPCErrorCode = -32067
+ ServerError68 RPCErrorCode = -32068
+ ServerError69 RPCErrorCode = -32069
+ ServerError70 RPCErrorCode = -32070
+ ServerError71 RPCErrorCode = -32071
+ ServerError72 RPCErrorCode = -32072
+ ServerError73 RPCErrorCode = -32073
+ ServerError74 RPCErrorCode = -32074
+ ServerError75 RPCErrorCode = -32075
+ ServerError76 RPCErrorCode = -32076
+ ServerError77 RPCErrorCode = -32077
+ ServerError78 RPCErrorCode = -32078
+ ServerError79 RPCErrorCode = -32079
+ ServerError80 RPCErrorCode = -32080
+ ServerError81 RPCErrorCode = -32081
+ ServerError82 RPCErrorCode = -32082
+ ServerError83 RPCErrorCode = -32083
+ ServerError84 RPCErrorCode = -32084
+ ServerError85 RPCErrorCode = -32085
+ ServerError86 RPCErrorCode = -32086
+ ServerError87 RPCErrorCode = -32087
+ ServerError88 RPCErrorCode = -32088
+ ServerError89 RPCErrorCode = -32089
+ ServerError90 RPCErrorCode = -32090
+ ServerError91 RPCErrorCode = -32091
+ ServerError92 RPCErrorCode = -32092
+ ServerError93 RPCErrorCode = -32093
+ ServerError94 RPCErrorCode = -32094
+ ServerError95 RPCErrorCode = -32095
+ ServerError96 RPCErrorCode = -32096
+ ServerError97 RPCErrorCode = -32097
+ ServerError98 RPCErrorCode = -32098
+ ServerError99 RPCErrorCode = -32099
+)
+
+// RPCErrorMessage represents an error message as per the JSON-RPC 2.0 specification section 5.1.
+type RPCErrorMessage string
+
+const (
+ // ParseErrorMessage is the message for [ParseError].
+ ParseErrorMessage RPCErrorMessage = "Parse error"
+ // InvalidRequestMessage is the message for [InvalidRequest].
+ InvalidRequestMessage RPCErrorMessage = "Invalid Request"
+ // MethodNotFoundMessage is the message for [MethodNotFound].
+ MethodNotFoundMessage RPCErrorMessage = "Method not found"
+ // InvalidParamsMessage is the message for [InvalidParams].
+ InvalidParamsMessage RPCErrorMessage = "Invalid Params"
+ // InternalErrorMessage is the message for [InternalError].
+ InternalErrorMessage RPCErrorMessage = "Internal error"
+ // ServerErrorMessage0 to ServerErrorMessage99 are reserved for implementation-defined server-errors.
+ ServerErrorMessage0 RPCErrorMessage = "Server error"
+ ServerErrorMessage1 RPCErrorMessage = "Server error"
+ ServerErrorMessage2 RPCErrorMessage = "Server error"
+ ServerErrorMessage3 RPCErrorMessage = "Server error"
+ ServerErrorMessage4 RPCErrorMessage = "Server error"
+ ServerErrorMessage5 RPCErrorMessage = "Server error"
+ ServerErrorMessage6 RPCErrorMessage = "Server error"
+ ServerErrorMessage7 RPCErrorMessage = "Server error"
+ ServerErrorMessage8 RPCErrorMessage = "Server error"
+ ServerErrorMessage9 RPCErrorMessage = "Server error"
+ ServerErrorMessage10 RPCErrorMessage = "Server error"
+ ServerErrorMessage11 RPCErrorMessage = "Server error"
+ ServerErrorMessage12 RPCErrorMessage = "Server error"
+ ServerErrorMessage13 RPCErrorMessage = "Server error"
+ ServerErrorMessage14 RPCErrorMessage = "Server error"
+ ServerErrorMessage15 RPCErrorMessage = "Server error"
+ ServerErrorMessage16 RPCErrorMessage = "Server error"
+ ServerErrorMessage17 RPCErrorMessage = "Server error"
+ ServerErrorMessage18 RPCErrorMessage = "Server error"
+ ServerErrorMessage19 RPCErrorMessage = "Server error"
+ ServerErrorMessage20 RPCErrorMessage = "Server error"
+ ServerErrorMessage21 RPCErrorMessage = "Server error"
+ ServerErrorMessage22 RPCErrorMessage = "Server error"
+ ServerErrorMessage23 RPCErrorMessage = "Server error"
+ ServerErrorMessage24 RPCErrorMessage = "Server error"
+ ServerErrorMessage25 RPCErrorMessage = "Server error"
+ ServerErrorMessage26 RPCErrorMessage = "Server error"
+ ServerErrorMessage27 RPCErrorMessage = "Server error"
+ ServerErrorMessage28 RPCErrorMessage = "Server error"
+ ServerErrorMessage29 RPCErrorMessage = "Server error"
+ ServerErrorMessage30 RPCErrorMessage = "Server error"
+ ServerErrorMessage31 RPCErrorMessage = "Server error"
+ ServerErrorMessage32 RPCErrorMessage = "Server error"
+ ServerErrorMessage33 RPCErrorMessage = "Server error"
+ ServerErrorMessage34 RPCErrorMessage = "Server error"
+ ServerErrorMessage35 RPCErrorMessage = "Server error"
+ ServerErrorMessage36 RPCErrorMessage = "Server error"
+ ServerErrorMessage37 RPCErrorMessage = "Server error"
+ ServerErrorMessage38 RPCErrorMessage = "Server error"
+ ServerErrorMessage39 RPCErrorMessage = "Server error"
+ ServerErrorMessage40 RPCErrorMessage = "Server error"
+ ServerErrorMessage41 RPCErrorMessage = "Server error"
+ ServerErrorMessage42 RPCErrorMessage = "Server error"
+ ServerErrorMessage43 RPCErrorMessage = "Server error"
+ ServerErrorMessage44 RPCErrorMessage = "Server error"
+ ServerErrorMessage45 RPCErrorMessage = "Server error"
+ ServerErrorMessage46 RPCErrorMessage = "Server error"
+ ServerErrorMessage47 RPCErrorMessage = "Server error"
+ ServerErrorMessage48 RPCErrorMessage = "Server error"
+ ServerErrorMessage49 RPCErrorMessage = "Server error"
+ ServerErrorMessage50 RPCErrorMessage = "Server error"
+ ServerErrorMessage51 RPCErrorMessage = "Server error"
+ ServerErrorMessage52 RPCErrorMessage = "Server error"
+ ServerErrorMessage53 RPCErrorMessage = "Server error"
+ ServerErrorMessage54 RPCErrorMessage = "Server error"
+ ServerErrorMessage55 RPCErrorMessage = "Server error"
+ ServerErrorMessage56 RPCErrorMessage = "Server error"
+ ServerErrorMessage57 RPCErrorMessage = "Server error"
+ ServerErrorMessage58 RPCErrorMessage = "Server error"
+ ServerErrorMessage59 RPCErrorMessage = "Server error"
+ ServerErrorMessage60 RPCErrorMessage = "Server error"
+ ServerErrorMessage61 RPCErrorMessage = "Server error"
+ ServerErrorMessage62 RPCErrorMessage = "Server error"
+ ServerErrorMessage63 RPCErrorMessage = "Server error"
+ ServerErrorMessage64 RPCErrorMessage = "Server error"
+ ServerErrorMessage65 RPCErrorMessage = "Server error"
+ ServerErrorMessage66 RPCErrorMessage = "Server error"
+ ServerErrorMessage67 RPCErrorMessage = "Server error"
+ ServerErrorMessage68 RPCErrorMessage = "Server error"
+ ServerErrorMessage69 RPCErrorMessage = "Server error"
+ ServerErrorMessage70 RPCErrorMessage = "Server error"
+ ServerErrorMessage71 RPCErrorMessage = "Server error"
+ ServerErrorMessage72 RPCErrorMessage = "Server error"
+ ServerErrorMessage73 RPCErrorMessage = "Server error"
+ ServerErrorMessage74 RPCErrorMessage = "Server error"
+ ServerErrorMessage75 RPCErrorMessage = "Server error"
+ ServerErrorMessage76 RPCErrorMessage = "Server error"
+ ServerErrorMessage77 RPCErrorMessage = "Server error"
+ ServerErrorMessage78 RPCErrorMessage = "Server error"
+ ServerErrorMessage79 RPCErrorMessage = "Server error"
+ ServerErrorMessage80 RPCErrorMessage = "Server error"
+ ServerErrorMessage81 RPCErrorMessage = "Server error"
+ ServerErrorMessage82 RPCErrorMessage = "Server error"
+ ServerErrorMessage83 RPCErrorMessage = "Server error"
+ ServerErrorMessage84 RPCErrorMessage = "Server error"
+ ServerErrorMessage85 RPCErrorMessage = "Server error"
+ ServerErrorMessage86 RPCErrorMessage = "Server error"
+ ServerErrorMessage87 RPCErrorMessage = "Server error"
+ ServerErrorMessage88 RPCErrorMessage = "Server error"
+ ServerErrorMessage89 RPCErrorMessage = "Server error"
+ ServerErrorMessage90 RPCErrorMessage = "Server error"
+ ServerErrorMessage91 RPCErrorMessage = "Server error"
+ ServerErrorMessage92 RPCErrorMessage = "Server error"
+ ServerErrorMessage93 RPCErrorMessage = "Server error"
+ ServerErrorMessage94 RPCErrorMessage = "Server error"
+ ServerErrorMessage95 RPCErrorMessage = "Server error"
+ ServerErrorMessage96 RPCErrorMessage = "Server error"
+ ServerErrorMessage97 RPCErrorMessage = "Server error"
+ ServerErrorMessage98 RPCErrorMessage = "Server error"
+ ServerErrorMessage99 RPCErrorMessage = "Server error"
+)
+
+// ValidateMethod validates the method field of a [Request], [Notification], or [Response].
+//
+// If [github.com/go-playground/validator] is used, this function can be used as a custom validation function.
+//
+// Example:
+//
+// validate = validator.New()
+// validate.RegisterValidation(jsonrpc.ValidateMethod, jsonrpc.Method)
+func ValidateMethod(field reflect.Value) interface{} {
+ if field.Kind() != reflect.String {
+ return "method must be a string"
+ }
+ if field.String() == "" {
+ return "method cannot be empty"
+ }
+
+ if strings.HasPrefix(field.String(), "rpc.") {
+ return "methods beginning with rpc. are reserved"
+ }
+
+ return nil
+}
+
+// ValidateErrorCode validates an [RPCErrorCode].
+//
+// If [github.com/go-playground/validator] is used, this function can be used as a custom validation function.
+//
+// Example:
+//
+// validate = validator.New()
+// validate.RegisterValidation(jsonrpc.ValidateErrorCode, jsonrpc.RPCErrorCode)
+func ValidateErrorCode(field reflect.Value) interface{} {
+ if field.Kind() != reflect.Int {
+ return "error code must be an integer"
+ }
+
+ switch RPCErrorCode(field.Int()) {
+ case ParseError, InvalidRequest, MethodNotFound, InvalidParams, InternalError:
+ // These codes are reserved, but has their purpose defined in the JSON-RPC 2.0 specification.
+ return nil
+ }
+
+ if field.Int() <= -32000 && field.Int() >= -32768 {
+ // This code is reserved for future use.
+ return fmt.Sprintf("invalid error code - code %d is reserved for future use", field.Int())
+ }
+
+ return nil
+}
diff --git a/internal/rpc/internal/jsonrpc/jsonrpc_test.go b/internal/rpc/internal/jsonrpc/jsonrpc_test.go
new file mode 100644
index 0000000..33945ad
--- /dev/null
+++ b/internal/rpc/internal/jsonrpc/jsonrpc_test.go
@@ -0,0 +1,1032 @@
+package jsonrpc
+
+import (
+ "encoding/json"
+ "reflect"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestID_MarshalJSON(t *testing.T) {
+ tests := []struct {
+ name string
+ id ID
+ want []byte
+ wantErr bool
+ }{
+ {
+ name: "id is string",
+ id: ID("test"),
+ want: []byte(`"test"`),
+ },
+ {
+ name: "id is int",
+ id: ID("1"),
+ want: []byte(`"1"`),
+ },
+ {
+ name: "id is float",
+ id: ID("1.1"),
+ want: []byte(`"1.1"`),
+ },
+ {
+ name: "id is empty",
+ id: ID(""),
+ want: []byte(`""`),
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := json.Marshal(tt.id)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("MarshalJSON() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("MarshalJSON() got = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestID_UnmarshalJSON(t *testing.T) {
+ type args struct {
+ data []byte
+ }
+ tests := []struct {
+ name string
+ id ID
+ args args
+ wantErr bool
+ }{
+ {
+ name: "id is string",
+ id: ID("test"),
+ args: args{data: []byte(`"test"`)},
+ },
+ {
+ name: "id is int",
+ id: ID("1"),
+ args: args{data: []byte(`1`)},
+ },
+ {
+ name: "id is float",
+ id: ID("1.1"),
+ args: args{data: []byte(`"1.1"`)},
+ },
+ {
+ name: "id is null",
+ id: ID(""),
+ args: args{data: []byte(`null`)},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if err := tt.id.UnmarshalJSON(tt.args.data); (err != nil) != tt.wantErr {
+ t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
+
+func TestParamsArray_UnmarshalJSON(t *testing.T) {
+ type args struct {
+ data []byte
+ }
+ tests := []struct {
+ name string
+ pa ParamsArray
+ args args
+ wantErr bool
+ }{
+ {
+ name: "Params is array",
+ pa: ParamsArray{},
+ args: args{data: []byte(`[1,2,3]`)},
+ },
+ {
+ name: "Params is object",
+ pa: ParamsArray{},
+ args: args{data: []byte(`{"test":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "Params is string",
+ pa: ParamsArray{},
+ args: args{data: []byte(`"test"`)},
+ wantErr: true,
+ },
+ {
+ name: "Params are floats",
+ pa: ParamsArray{},
+ args: args{data: []byte(`[1.1,2.2,3.3]`)},
+ },
+ {
+ name: "Params are mixed",
+ pa: ParamsArray{},
+ args: args{data: []byte(`[1,"test",3.3]`)},
+ },
+ {
+ name: "Params are empty",
+ pa: ParamsArray{},
+ args: args{data: []byte(`[]`)},
+ },
+ {
+ name: "Params are null",
+ pa: ParamsArray{},
+ args: args{data: []byte(`null`)},
+ },
+ {
+ name: "empty bytes",
+ pa: ParamsArray{},
+ args: args{data: []byte(``)},
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if err := tt.pa.UnmarshalJSON(tt.args.data); (err != nil) != tt.wantErr {
+ t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
+
+func TestParamsArray_isParams(t *testing.T) {
+ tests := []struct {
+ name string
+ pa ParamsArray
+ }{
+ {
+ name: "Params is array",
+ pa: ParamsArray{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tt.pa.isParams()
+ })
+ }
+}
+
+func TestParamsObject_UnmarshalJSON(t *testing.T) {
+ type args struct {
+ data []byte
+ }
+ tests := []struct {
+ name string
+ pa ParamsObject
+ args args
+ wantErr bool
+ }{
+ {
+ name: "Params is object",
+ pa: ParamsObject{},
+ args: args{data: []byte(`{"test":1}`)},
+ },
+ {
+ name: "Params is array",
+ pa: ParamsObject{},
+ args: args{data: []byte(`[1,2,3]`)},
+ wantErr: true,
+ },
+ {
+ name: "Params is string",
+ pa: ParamsObject{},
+ args: args{data: []byte(`"test"`)},
+ wantErr: true,
+ },
+ {
+ name: "Params are floats",
+ pa: ParamsObject{},
+ args: args{data: []byte(`{"test":1.1}`)},
+ },
+ {
+ name: "Params are mixed",
+ pa: ParamsObject{},
+ args: args{data: []byte(`{"test":1,"test2":"test","test3":3.3}`)},
+ },
+ {
+ name: "Params are empty",
+ pa: ParamsObject{},
+ args: args{data: []byte(`{}`)},
+ },
+ {
+ name: "Params are null",
+ pa: ParamsObject{},
+ args: args{data: []byte(`null`)},
+ },
+ {
+ name: "empty bytes",
+ pa: ParamsObject{},
+ args: args{data: []byte(``)},
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if err := tt.pa.UnmarshalJSON(tt.args.data); (err != nil) != tt.wantErr {
+ t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
+
+func TestParamsObject_isParams(t *testing.T) {
+ tests := []struct {
+ name string
+ pa ParamsObject
+ }{
+ {
+ name: "Params is object",
+ pa: ParamsObject{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tt.pa.isParams()
+ })
+ }
+}
+
+func TestRequest_UnmarshalJSON(t *testing.T) {
+ type args struct {
+ data []byte
+ }
+ tests := []struct {
+ name string
+ r Request
+ args args
+ wantErr bool
+ isNotification bool
+ }{
+ {
+ name: "request is object",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1}`)},
+ },
+ {
+ name: "request is array",
+ r: Request{},
+ args: args{data: []byte(`[1,2,3]`)},
+ wantErr: true,
+ },
+ {
+ name: "request is string",
+ r: Request{},
+ args: args{data: []byte(`"test"`)},
+ wantErr: true,
+ },
+ {
+ name: "request is null",
+ r: Request{},
+ args: args{data: []byte(`null`)},
+ wantErr: true,
+ },
+ {
+ name: "empty bytes",
+ r: Request{},
+ args: args{data: []byte(``)},
+ wantErr: true,
+ },
+ {
+ name: "request is missing jsonrpc",
+ r: Request{},
+ args: args{data: []byte(`{"method":"test","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "request has bad jsonrpc version",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"1.0","method":"test","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "request is missing method",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "request has empty method",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "request version is a number",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":2.0,"method":"test","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "request ID is zero",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":0}`)},
+ },
+ {
+ name: "request ID is zero string",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":{"one":"two"},"id":"0"}`)},
+ },
+ {
+ name: "request ID is an empty object",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":{}}`)},
+ wantErr: true,
+ },
+ {
+ name: "request ID is an object",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":{"test":1}}`)},
+ wantErr: true,
+ },
+ {
+ name: "request ID is an array",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":[1,2,3]}`)},
+ wantErr: true,
+ },
+ {
+ name: "request ID is an empty array",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":[]}`)},
+ wantErr: true,
+ },
+ {
+ name: "request ID is an empty string",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":""}`)},
+ },
+ {
+ name: "method is rpc.",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"rpc.test","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "method is empty",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"","params":[1,2,3],"id":1}`)},
+ wantErr: true,
+ },
+ {
+ name: "request has null Params",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":null,"id":1}`)},
+ },
+ {
+ name: "request is notification",
+ r: Request{},
+ args: args{data: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3]}`)},
+ isNotification: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := tt.r.UnmarshalJSON(tt.args.data)
+ if tt.wantErr {
+ require.Error(t, err, "unmarshalled object: %+v", tt.r)
+ } else {
+ require.NoError(t, err, "unmarshalled object: %+v", tt.r)
+ if tt.isNotification {
+ require.True(t, tt.r.IsNotification(), "unmarshalled object: %+v", tt.r)
+ }
+ }
+ })
+ }
+}
+
+func TestRequest_isNotification(t *testing.T) {
+ tests := []struct {
+ name string
+ r Request
+ want bool
+ }{
+ {
+ name: "request is notification",
+ r: Request{
+ isNotification: true,
+ },
+ want: true,
+ },
+ {
+ name: "request is not notification",
+ r: Request{
+ isNotification: false,
+ },
+ want: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := tt.r.IsNotification(); got != tt.want {
+ t.Errorf("IsNotification() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestRequest_AsNotification(t *testing.T) {
+ tests := []struct {
+ name string
+ r Request
+ want Request
+ }{
+ {
+ name: "request is notification",
+ r: Request{
+ isNotification: true,
+ },
+ want: Request{
+ isNotification: true,
+ },
+ },
+ {
+ name: "request is not notification",
+ r: Request{
+ isNotification: false,
+ },
+ want: Request{
+ isNotification: true,
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tt.r.AsNotification()
+ require.Equal(t, tt.want, tt.r)
+ })
+ }
+}
+
+func TestRequest_MarshalJSON(t *testing.T) {
+ tests := []struct {
+ name string
+ r Request
+ want []byte
+ wantErr bool
+ }{
+ {
+ name: "request is object",
+ r: Request{
+ Method: "test",
+ Params: &ParamsArray{1, 2, 3},
+ ID: ID("1"),
+ },
+ want: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":"1"}`),
+ },
+ {
+ name: "request is object with null params",
+ r: Request{
+ Method: "test",
+ Params: nil,
+ ID: ID("1"),
+ },
+ want: []byte(`{"jsonrpc":"2.0","method":"test","id":"1"}`),
+ },
+ {
+ name: "request is object with empty params",
+ r: Request{
+ Method: "test",
+ Params: &ParamsObject{},
+ ID: ID("1"),
+ },
+ want: []byte(`{"jsonrpc":"2.0","method":"test","params":{},"id":"1"}`),
+ },
+ {
+ name: "request is object with empty id",
+ r: Request{
+ Method: "test",
+ Params: &ParamsArray{1, 2, 3},
+ ID: ID(""),
+ },
+ want: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":""}`),
+ },
+ {
+ name: "request is object with zero id",
+ r: Request{
+ Method: "test",
+ Params: &ParamsArray{1, 2, 3},
+ ID: ID("0"),
+ },
+ want: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":"0"}`),
+ },
+ {
+ name: "request is a notification",
+ r: Request{
+ Method: "test",
+ Params: &ParamsArray{1, 2, 3},
+ isNotification: true,
+ },
+ want: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3]}`),
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := tt.r.MarshalJSON()
+ if tt.wantErr {
+ require.Error(t, err, "marshalled object: %+v", tt.r)
+ } else {
+ require.NoError(t, err, "marshalled object: %+v", tt.r)
+ require.JSONEq(t, string(tt.want), string(got), "marshalled object: %+v", tt.r)
+ }
+ })
+ }
+}
+
+func TestValidateErrorCode(t *testing.T) {
+ type args struct {
+ field reflect.Value
+ }
+ tests := []struct {
+ name string
+ args args
+ want interface{}
+ }{
+ {
+ name: "ParseError passes validation",
+ args: args{field: reflect.ValueOf(ParseError)},
+ want: nil,
+ },
+ {
+ name: "InvalidRequest passes validation",
+ args: args{field: reflect.ValueOf(InvalidRequest)},
+ want: nil,
+ },
+ {
+ name: "MethodNotFound passes validation",
+ args: args{field: reflect.ValueOf(MethodNotFound)},
+ want: nil,
+ },
+ {
+ name: "InvalidParams passes validation",
+ args: args{field: reflect.ValueOf(InvalidParams)},
+ want: nil,
+ },
+ {
+ name: "InternalError passes validation",
+ args: args{field: reflect.ValueOf(InternalError)},
+ want: nil,
+ },
+ {
+ name: "-32769 passes validation",
+ args: args{field: reflect.ValueOf(-32769)},
+ want: nil,
+ },
+ {
+ name: "-32768 fails validation",
+ args: args{field: reflect.ValueOf(-32768)},
+ want: "invalid error code - code -32768 is reserved for future use",
+ },
+ {
+ name: "-32767 fails validation",
+ args: args{field: reflect.ValueOf(-32767)},
+ want: "invalid error code - code -32767 is reserved for future use",
+ },
+
+ {
+ name: "-32000 fails validation",
+ args: args{field: reflect.ValueOf(-32000)},
+ want: "invalid error code - code -32000 is reserved for future use",
+ },
+ {
+ name: "-31999 passes validation",
+ args: args{field: reflect.ValueOf(-31999)},
+ want: nil,
+ },
+ {
+ name: "string fails validation",
+ args: args{field: reflect.ValueOf("test")},
+ want: "error code must be an integer",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := ValidateErrorCode(tt.args.field); !reflect.DeepEqual(got, tt.want) {
+ if v, ok := got.(error); ok {
+ if v.Error() == tt.want {
+ return
+ }
+ }
+ t.Errorf("ValidateErrorCode() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestResponse_UnmarshalJSON(t *testing.T) {
+ tests := []struct {
+ name string
+ r Response
+ args []byte
+ wantErr bool
+ }{
+ {
+ name: "response is object",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":1}`),
+ },
+ {
+ name: "response is array",
+ r: Response{},
+ args: []byte(`[1,2,3]`),
+ wantErr: true,
+ },
+ {
+ name: "response is string",
+ r: Response{},
+ args: []byte(`"test"`),
+ wantErr: true,
+ },
+ {
+ name: "response is null",
+ r: Response{},
+ args: []byte(`null`),
+ wantErr: true,
+ },
+ {
+ name: "empty bytes",
+ r: Response{},
+ args: []byte(``),
+ wantErr: true,
+ },
+ {
+ name: "response is missing jsonrpc",
+ r: Response{},
+ args: []byte(`{"result":1,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has bad jsonrpc version",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"1.0","result":1,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response jsonrpc is bad array",
+ r: Response{},
+ args: []byte(`{"jsonrpc":[1,2,3],"result":1,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response is missing result and error",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has nil result and error",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":null,"error":null,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has both result and error",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"error":1,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has empty result",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":null,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has empty error",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","error":null,"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has empty id - null used for unknown id",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":null}`),
+ },
+ {
+ name: "id is zero",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":0}`),
+ },
+ {
+ name: "id is zero string",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":"0"}`),
+ },
+ {
+ name: "id is an empty object",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":{}}`),
+ wantErr: true,
+ },
+ {
+ name: "id is an object",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":{"test":1}}`),
+ wantErr: true,
+ },
+ {
+ name: "id is an array",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":[1,2,3]}`),
+ wantErr: true,
+ },
+ {
+ name: "id is an empty array",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":[]}`),
+ wantErr: true,
+ },
+ {
+ name: "id is an empty string",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":""}`),
+ },
+ {
+ name: "id is null",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1,"id":null}`),
+ },
+ {
+ name: "id is missing",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","result":1}`),
+ wantErr: true,
+ },
+ {
+ name: "response has error",
+ r: Response{},
+ args: []byte(`{"jsonrpc":"2.0","error":{"code":1,"message":"test"},"id":1}`),
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := tt.r.UnmarshalJSON(tt.args)
+ if tt.wantErr {
+ require.Error(t, err, "unmarshalled object: %+v", tt.r)
+ } else {
+ require.NoError(t, err, "unmarshalled object: %+v", tt.r)
+ }
+ })
+
+ }
+}
+
+func TestValidateMethod(t *testing.T) {
+ type args struct {
+ field reflect.Value
+ }
+ tests := []struct {
+ name string
+ args args
+ want interface{}
+ }{
+ {
+ name: "integer fails validation",
+ args: args{field: reflect.ValueOf(1)},
+ want: "method must be a string",
+ },
+ {
+ name: "empty string fails validation",
+ args: args{field: reflect.ValueOf("")},
+ want: "method cannot be empty",
+ },
+ {
+ name: "reserved method fails validation",
+ args: args{field: reflect.ValueOf("rpc.")},
+ want: "methods beginning with rpc. are reserved",
+ },
+ {
+ name: "other reserved method fails validation",
+ args: args{field: reflect.ValueOf("rpc.someMethod")},
+ want: "methods beginning with rpc. are reserved",
+ },
+ {
+ name: "valid method passes validation",
+ args: args{field: reflect.ValueOf("someMethod")},
+ want: nil,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := ValidateMethod(tt.args.field); !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("ValidateMethod() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func Test_optional_UnmarshalJSON(t *testing.T) {
+ type args struct {
+ data []byte
+ }
+ type testCase[T any] struct {
+ name string
+ o optional[T]
+ args args
+ wantErr bool
+ }
+ tests := []testCase[string]{
+ {
+ name: "value is string",
+ o: optional[string]{},
+ args: args{data: []byte(`"test"`)},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if err := tt.o.UnmarshalJSON(tt.args.data); (err != nil) != tt.wantErr {
+ t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
+
+func Test_BatchRequest_UnmarshalJSON(t *testing.T) {
+ tests := []struct {
+ name string
+ br BatchRequest
+ args []byte
+ wantErr bool
+ }{
+ {
+ name: "batch request is array",
+ br: BatchRequest{},
+ args: []byte(`[{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1}]`),
+ },
+ {
+ name: "batch request is object",
+ br: BatchRequest{},
+ args: []byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1}`),
+ wantErr: true,
+ },
+ {
+ name: "batch request is string",
+ br: BatchRequest{},
+ args: []byte(`"test"`),
+ wantErr: true,
+ },
+ {
+ name: "batch request is null",
+ br: BatchRequest{},
+ args: []byte(`null`),
+ wantErr: true,
+ },
+ {
+ name: "empty bytes",
+ br: BatchRequest{},
+ args: []byte(``),
+ wantErr: true,
+ },
+ {
+ name: "batch request contains both Request and Notification",
+ br: BatchRequest{},
+ args: []byte(`[{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1},{"jsonrpc":"2.0","method":"test","params":[1,2,3]}]`),
+ },
+ {
+ name: "batch request contains a string",
+ br: BatchRequest{},
+ args: []byte(`[{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1},"test"]`),
+ wantErr: true,
+ },
+ {
+ name: "batch contains bad Request and bad Notification",
+ br: BatchRequest{},
+ args: []byte(`[{"method":"test","params":[1,2,3],"id":1},{"method":"test","params":[1,2,3]}]`),
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := tt.br.UnmarshalJSON(tt.args)
+ if tt.wantErr {
+ require.Error(t, err, "unmarshalled object: %+v", tt.br)
+ } else {
+ require.NoError(t, err, "unmarshalled object: %+v", tt.br)
+ }
+ })
+ }
+}
+
+func Test_optional_Defined(t *testing.T) {
+ type O struct {
+ ID optional[ID] `json:"id"`
+ }
+ b := []byte(`{"id":"test"}`)
+ o := O{}
+ err := json.Unmarshal(b, &o)
+ require.NoError(t, err)
+ require.True(t, o.ID.Defined)
+
+ b = []byte(`{"id":null}`)
+ o = O{}
+ err = json.Unmarshal(b, &o)
+ require.NoError(t, err)
+ require.True(t, o.ID.Defined)
+
+ b = []byte(`{}`)
+ o = O{}
+ err = json.Unmarshal(b, &o)
+ require.NoError(t, err)
+ require.False(t, o.ID.Defined)
+}
+
+func Test_Request_with_ParamsArray(t *testing.T) {
+ r := Request{}
+ err := json.Unmarshal([]byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1}`), &r)
+ require.NoError(t, err)
+ require.Equal(t, ID("1"), r.ID)
+ require.Equal(t, Method("test"), r.Method)
+ require.Equal(t, &ParamsArray{1, 2, 3}, r.Params)
+
+}
+
+func Test_Notification_with_ParamsArray(t *testing.T) {
+ n := Request{}
+ err := json.Unmarshal([]byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3]}`), &n)
+ require.NoError(t, err)
+ require.Equal(t, Method("test"), n.Method)
+ require.Equal(t, &ParamsArray{1, 2, 3}, n.Params)
+
+}
+
+func Test_BatchRequest_with_Notification(t *testing.T) {
+ b := []byte(`[{"jsonrpc":"2.0","method":"test","params":[1,2,3]}]`)
+ br := BatchRequest{}
+ err := json.Unmarshal(b, &br)
+ require.NoError(t, err)
+ require.True(t, br[0].isNotification)
+ require.Equal(t, Method("test"), br[0].Method)
+ require.Equal(t, &ParamsArray{1, 2, 3}, br[0].Params)
+}
+
+func Test_BatchRequest_with_Request(t *testing.T) {
+ b := []byte(`[{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1}]`)
+ br := BatchRequest{}
+ err := json.Unmarshal(b, &br)
+ require.NoError(t, err)
+ require.False(t, br[0].isNotification)
+ require.Equal(t, ID("1"), br[0].ID)
+ require.Equal(t, Method("test"), br[0].Method)
+ require.Equal(t, &ParamsArray{1, 2, 3}, br[0].Params)
+}
+
+func Test_BatchRequest_with_Mixed(t *testing.T) {
+ b := []byte(`[{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1},{"jsonrpc":"2.0","method":"test","params":[1,2,3]}]`)
+ br := BatchRequest{}
+ err := json.Unmarshal(b, &br)
+ require.NoError(t, err)
+ require.False(t, br[0].isNotification)
+ require.True(t, br[1].isNotification)
+}
+
+func Test_Request_EndToEnd(t *testing.T) {
+ r := Request{}
+ err := json.Unmarshal([]byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":1}`), &r)
+ require.NoError(t, err)
+ b, err := json.Marshal(r)
+ require.NoError(t, err)
+ require.JSONEq(t, `{"jsonrpc":"2.0","method":"test","params":[1,2,3],"id":"1"}`, string(b))
+}
+
+func Test_Notification_EndToEnd(t *testing.T) {
+ n := Request{}
+ err := json.Unmarshal([]byte(`{"jsonrpc":"2.0","method":"test","params":[1,2,3]}`), &n)
+ require.NoError(t, err)
+ b, err := json.Marshal(n)
+ require.NoError(t, err)
+ require.JSONEq(t, `{"jsonrpc":"2.0","method":"test","params":[1,2,3]}`, string(b))
+}
+
+func Test_Response_EndToEnd(t *testing.T) {
+ r := Response{}
+ err := json.Unmarshal([]byte(`{"jsonrpc":"2.0","result":1,"id":1}`), &r)
+ require.NoError(t, err)
+ b, err := json.Marshal(r)
+ require.NoError(t, err)
+ require.JSONEq(t, `{"jsonrpc":"2.0","result":1,"id":"1"}`, string(b))
+}
+
+func TestResponse_without_result_result_not_null(t *testing.T) {
+ r := Response{
+ ID: ID("1234"),
+ Error: &Error{
+ Code: 12345,
+ Message: "test",
+ },
+ }
+ b, err := r.MarshalJSON()
+ require.NoError(t, err)
+ require.JSONEq(t, `{"jsonrpc":"2.0","error":{"code":12345,"message":"test"},"id":"1234"}`, string(b))
+}
+
+func TestResponse_without_result_or_error(t *testing.T) {
+ r := Response{
+ ID: ID("1234"),
+ }
+ b, err := r.MarshalJSON()
+ require.NoError(t, err)
+ require.JSONEq(t, `{"jsonrpc":"2.0","result":null,"id":"1234"}`, string(b))
+}
diff --git a/internal/rpc/rpc.go b/internal/rpc/rpc.go
new file mode 100644
index 0000000..abcb7a0
--- /dev/null
+++ b/internal/rpc/rpc.go
@@ -0,0 +1,136 @@
+package rpc
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "log/slog"
+ "strings"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+)
+
+type Server struct {
+ services map[string]Service
+ logger *slog.Logger
+}
+
+func NewRPCServer(logger *slog.Logger) (*Server, error) {
+ return &Server{
+ services: make(map[string]Service),
+ logger: logger,
+ }, nil
+}
+
+func (s *Server) HandleRPCRequest(ctx context.Context, req []byte) ([]byte, error) {
+ pReq := jsonrpc.Request{}
+ err := pReq.UnmarshalJSON(req)
+ if err != nil {
+ return nil, err
+ }
+
+ svcName := strings.Split(string(pReq.Method), "::")[0]
+
+ service, ok := s.services[svcName]
+ if !ok {
+ s.logger.InfoContext(ctx, "rpc service not found", "service", svcName, "method", pReq.Method)
+ return ErrorFromRequest(&pReq, jsonrpc.MethodNotFound, string(jsonrpc.MethodNotFoundMessage))
+ }
+ data, err := service.InvokeMethod(ctx, pReq)
+ if err != nil {
+ s.logger.ErrorContext(ctx, "error handling rpc request", "error", err)
+ return nil, err
+ }
+
+ return data, nil
+}
+
+func (s *Server) Register(name string, service Service) {
+ s.services[name] = service
+}
+
+type Service interface {
+ sophrosyne.AuthorizationEntity
+ InvokeMethod(ctx context.Context, req jsonrpc.Request) ([]byte, error)
+}
+
+type Method interface {
+ sophrosyne.AuthorizationEntity
+ GetService() Service
+ Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error)
+}
+
+func ErrorFromRequest(req *jsonrpc.Request, code jsonrpc.RPCErrorCode, message string) ([]byte, error) {
+ return jsonrpc.Response{
+ ID: req.ID,
+ Error: &jsonrpc.Error{
+ Code: code,
+ Message: message,
+ },
+ }.MarshalJSON()
+}
+
+func ResponseToRequest(req *jsonrpc.Request, result interface{}) ([]byte, error) {
+ if req.IsNotification() {
+ return nil, nil
+ }
+ return jsonrpc.Response{
+ ID: req.ID,
+ Result: result,
+ }.MarshalJSON()
+}
+
+func GetParams(req *jsonrpc.Request) (*jsonrpc.ParamsObject, *jsonrpc.ParamsArray, bool) {
+ var ook bool
+ po, ok := req.Params.(*jsonrpc.ParamsObject)
+ if ok {
+ ook = ok
+ }
+ pa, ok := req.Params.(*jsonrpc.ParamsArray)
+ if ok {
+ ook = ok
+ }
+
+ return po, pa, ook
+}
+
+var NoParamsError = fmt.Errorf("no params found")
+
+func ParamsIntoAny(req *jsonrpc.Request, target any, validate sophrosyne.Validator) error {
+ pa, po, ok := GetParams(req)
+ if !ok {
+ return NoParamsError
+ }
+
+ var b []byte
+ var err error
+ if pa != nil {
+ b, err = json.Marshal(pa)
+ }
+ if po != nil {
+ b, err = json.Marshal(po)
+ }
+ if err != nil {
+ return err
+ }
+
+ err = json.Unmarshal(b, &target)
+ if err != nil {
+ return err
+ }
+
+ if validate != nil {
+ err = validate.Validate(target)
+ if err != nil {
+ return err
+ }
+ }
+
+ vd, ok := target.(sophrosyne.Validator)
+ if ok {
+ return vd.Validate(nil)
+ }
+
+ return nil
+}
diff --git a/internal/rpc/rpc_test.go b/internal/rpc/rpc_test.go
new file mode 100644
index 0000000..6db03ab
--- /dev/null
+++ b/internal/rpc/rpc_test.go
@@ -0,0 +1,81 @@
+package rpc
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+ "github.com/madsrc/sophrosyne/internal/validator"
+)
+
+func TestParamsIntoAny(t *testing.T) {
+ type testTarget struct {
+ ID string `json:"id" validate:"required_without=Name Something"`
+ Name string `json:"name" validate:"required_without=ID Something"`
+ Something string `json:"something" validate:"required_without=ID Name"`
+ }
+ type args struct {
+ req *jsonrpc.Request
+ target any
+ validate sophrosyne.Validator
+ }
+ tests := []struct {
+ name string
+ args args
+ want any
+ wantErr bool
+ }{
+ {
+ name: "ParamsIntoAny_success",
+ args: args{
+ req: &jsonrpc.Request{
+ Params: &jsonrpc.ParamsObject{
+ "id": "1",
+ },
+ },
+ target: &testTarget{},
+ validate: validator.NewValidator(),
+ },
+ want: &testTarget{
+ ID: "1",
+ },
+ },
+ {
+ name: "ParamsIntoAny_validate_error",
+ args: args{
+ req: &jsonrpc.Request{
+ Params: &jsonrpc.ParamsObject{
+ "id": "1",
+ "Name": "name",
+ },
+ },
+ target: &testTarget{},
+ validate: validator.NewValidator(),
+ },
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := ParamsIntoAny(tt.args.req, tt.args.target, tt.args.validate)
+ if tt.wantErr {
+ require.Error(t, err)
+ } else {
+ require.Equal(t, tt.want, tt.args.target)
+ }
+
+ })
+ }
+}
+
+func TestSomething(t *testing.T) {
+ b := []byte(`{"jsonrpc":"2.0","method":"Users::GetUser","id":"1234","params":{"id":"coo1tog2e0g00gf27t70"}}`)
+ req := &jsonrpc.Request{}
+ err := req.UnmarshalJSON(b)
+ require.NoError(t, err)
+
+ require.NotNil(t, req)
+ require.NotNil(t, req.Params)
+}
diff --git a/internal/rpc/services/checkservice.go b/internal/rpc/services/checkservice.go
new file mode 100644
index 0000000..c60e3b3
--- /dev/null
+++ b/internal/rpc/services/checkservice.go
@@ -0,0 +1,328 @@
+package services
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/rpc"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+)
+
+type CheckService struct {
+ methods map[jsonrpc.Method]rpc.Method
+ checkService sophrosyne.CheckService
+ authz sophrosyne.AuthorizationProvider
+ logger *slog.Logger
+ validator sophrosyne.Validator
+}
+
+func NewCheckService(checkService sophrosyne.CheckService, authz sophrosyne.AuthorizationProvider, logger *slog.Logger, validator sophrosyne.Validator) (*CheckService, error) {
+ u := &CheckService{
+ methods: make(map[jsonrpc.Method]rpc.Method),
+ checkService: checkService,
+ authz: authz,
+ logger: logger,
+ validator: validator,
+ }
+
+ u.methods["Checks::GetCheck"] = getCheck{service: u}
+ u.methods["Checks::GetChecks"] = getChecks{service: u}
+ u.methods["Checks::CreateCheck"] = createCheck{service: u}
+ u.methods["Checks::UpdateCheck"] = updateCheck{service: u}
+ u.methods["Checks::DeleteCheck"] = deleteCheck{service: u}
+
+ return u, nil
+}
+
+func (u CheckService) EntityType() string {
+ return "Service"
+}
+
+func (u CheckService) EntityID() string {
+ return "Checks"
+}
+
+func (u CheckService) InvokeMethod(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ return invokeMethod(ctx, u.logger, u.methods, req)
+}
+
+type getCheck struct {
+ service *CheckService
+}
+
+func (u getCheck) GetService() rpc.Service {
+ return u.service
+}
+
+func (u getCheck) EntityType() string {
+ return "Checks"
+}
+
+func (u getCheck) EntityID() string {
+ return "GetCheck"
+}
+
+func (u getCheck) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.GetCheckRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ if params.Name != "" {
+ u, _ := u.service.checkService.GetCheckByName(ctx, params.Name)
+ params.ID = u.ID
+ }
+
+ curCheck := sophrosyne.ExtractUser(ctx)
+ if curCheck == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ if !u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curCheck,
+ Action: u,
+ Resource: sophrosyne.Check{ID: params.ID},
+ }) {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ check, err := u.service.checkService.GetCheck(ctx, params.ID)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to get check", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "check not found")
+ }
+
+ resp := sophrosyne.GetCheckResponse{}
+
+ return rpc.ResponseToRequest(&req, resp.FromCheck(check))
+}
+
+type getChecks struct {
+ service *CheckService
+}
+
+func (u getChecks) GetService() rpc.Service {
+ return u.service
+}
+
+func (u getChecks) EntityType() string {
+ return "Checks"
+}
+
+func (u getChecks) EntityID() string {
+ return "GetChecks"
+}
+
+func (u getChecks) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.GetChecksRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ if errors.Is(err, rpc.NoParamsError) {
+ params = sophrosyne.GetChecksRequest{}
+ } else {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+ }
+
+ curCheck := sophrosyne.ExtractUser(ctx)
+ if curCheck == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ var cursor *sophrosyne.DatabaseCursor
+ if params.Cursor != "" {
+ cursor, err = sophrosyne.DecodeDatabaseCursorWithOwner(params.Cursor, curCheck.ID)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to decode cursor", "error", err)
+ return rpc.ErrorFromRequest(&req, 12347, "invalid cursor")
+ }
+ } else {
+ cursor = sophrosyne.NewDatabaseCursor(curCheck.ID, "")
+ }
+
+ checks, err := u.service.checkService.GetChecks(ctx, cursor)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to get checks", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "checks not found")
+ }
+
+ var checksResponse []sophrosyne.GetCheckResponse
+ for _, uu := range checks {
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curCheck,
+ Action: u,
+ Resource: sophrosyne.Check{ID: uu.ID},
+ })
+ if ok {
+ ent := &sophrosyne.GetCheckResponse{}
+ checksResponse = append(checksResponse, *ent.FromCheck(uu))
+ }
+ }
+
+ u.service.logger.DebugContext(ctx, "returning checks", "total", len(checksResponse), "checks", checksResponse)
+ return rpc.ResponseToRequest(&req, sophrosyne.GetChecksResponse{
+ Checks: checksResponse,
+ Cursor: cursor.String(),
+ Total: len(checksResponse),
+ })
+}
+
+type createCheck struct {
+ service *CheckService
+}
+
+func (u createCheck) GetService() rpc.Service {
+ return u.service
+}
+
+func (u createCheck) EntityType() string {
+ return "Checks"
+}
+
+func (u createCheck) EntityID() string {
+ return "CreateCheck"
+}
+
+func (u createCheck) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.CreateCheckRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curCheck := sophrosyne.ExtractUser(ctx)
+ if curCheck == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curCheck,
+ Action: u,
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ check, err := u.service.checkService.CreateCheck(ctx, params)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to create check", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to create check")
+ }
+
+ resp := sophrosyne.CreateCheckResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromCheck(check))
+}
+
+type updateCheck struct {
+ service *CheckService
+}
+
+func (u updateCheck) GetService() rpc.Service {
+ return u.service
+}
+
+func (u updateCheck) EntityType() string {
+ return "Checks"
+}
+
+func (u updateCheck) EntityID() string {
+ return "CreateCheck"
+}
+
+func (u updateCheck) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.UpdateCheckRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curCheck := sophrosyne.ExtractUser(ctx)
+ if curCheck == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ checkToUpdate, err := u.service.checkService.GetCheckByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "check not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curCheck,
+ Action: u,
+ Resource: sophrosyne.Check{ID: checkToUpdate.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ check, err := u.service.checkService.UpdateCheck(ctx, params)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to update check", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to update check")
+ }
+
+ resp := &sophrosyne.UpdateCheckResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromCheck(check))
+}
+
+type deleteCheck struct {
+ service *CheckService
+}
+
+func (u deleteCheck) GetService() rpc.Service {
+ return u.service
+}
+
+func (u deleteCheck) EntityType() string {
+ return "Checks"
+}
+
+func (u deleteCheck) EntityID() string {
+ return "CreateCheck"
+}
+
+func (u deleteCheck) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.DeleteCheckRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curCheck := sophrosyne.ExtractUser(ctx)
+ if curCheck == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ checkToDelete, err := u.service.checkService.GetCheckByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "check not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curCheck,
+ Action: u,
+ Resource: sophrosyne.Check{ID: checkToDelete.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ err = u.service.checkService.DeleteCheck(ctx, checkToDelete.Name)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to delete check", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to delete check")
+ }
+
+ return rpc.ResponseToRequest(&req, "ok")
+}
diff --git a/internal/rpc/services/profileservice.go b/internal/rpc/services/profileservice.go
new file mode 100644
index 0000000..4927e43
--- /dev/null
+++ b/internal/rpc/services/profileservice.go
@@ -0,0 +1,328 @@
+package services
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/rpc"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+)
+
+type ProfileService struct {
+ methods map[jsonrpc.Method]rpc.Method
+ profileService sophrosyne.ProfileService
+ authz sophrosyne.AuthorizationProvider
+ logger *slog.Logger
+ validator sophrosyne.Validator
+}
+
+func NewProfileService(profileService sophrosyne.ProfileService, authz sophrosyne.AuthorizationProvider, logger *slog.Logger, validator sophrosyne.Validator) (*ProfileService, error) {
+ u := &ProfileService{
+ methods: make(map[jsonrpc.Method]rpc.Method),
+ profileService: profileService,
+ authz: authz,
+ logger: logger,
+ validator: validator,
+ }
+
+ u.methods["Profiles::GetProfile"] = getProfile{service: u}
+ u.methods["Profiles::GetProfiles"] = getProfiles{service: u}
+ u.methods["Profiles::CreateProfile"] = createProfile{service: u}
+ u.methods["Profiles::UpdateProfile"] = updateProfile{service: u}
+ u.methods["Profiles::DeleteProfile"] = deleteProfile{service: u}
+
+ return u, nil
+}
+
+func (u ProfileService) EntityType() string {
+ return "Service"
+}
+
+func (u ProfileService) EntityID() string {
+ return "Profiles"
+}
+
+func (u ProfileService) InvokeMethod(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ return invokeMethod(ctx, u.logger, u.methods, req)
+}
+
+type getProfile struct {
+ service *ProfileService
+}
+
+func (u getProfile) GetService() rpc.Service {
+ return u.service
+}
+
+func (u getProfile) EntityType() string {
+ return "Profiles"
+}
+
+func (u getProfile) EntityID() string {
+ return "GetProfile"
+}
+
+func (u getProfile) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.GetProfileRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ if params.Name != "" {
+ u, _ := u.service.profileService.GetProfileByName(ctx, params.Name)
+ params.ID = u.ID
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ if !u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ Resource: sophrosyne.Profile{ID: params.ID},
+ }) {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ Profile, err := u.service.profileService.GetProfile(ctx, params.ID)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to get Profile", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "Profile not found")
+ }
+
+ resp := sophrosyne.GetProfileResponse{}
+
+ return rpc.ResponseToRequest(&req, resp.FromProfile(Profile))
+}
+
+type getProfiles struct {
+ service *ProfileService
+}
+
+func (u getProfiles) GetService() rpc.Service {
+ return u.service
+}
+
+func (u getProfiles) EntityType() string {
+ return "Profiles"
+}
+
+func (u getProfiles) EntityID() string {
+ return "GetProfiles"
+}
+
+func (u getProfiles) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.GetProfilesRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ if errors.Is(err, rpc.NoParamsError) {
+ params = sophrosyne.GetProfilesRequest{}
+ } else {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+ }
+
+ curProfile := sophrosyne.ExtractUser(ctx)
+ if curProfile == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ var cursor *sophrosyne.DatabaseCursor
+ if params.Cursor != "" {
+ cursor, err = sophrosyne.DecodeDatabaseCursorWithOwner(params.Cursor, curProfile.ID)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to decode cursor", "error", err)
+ return rpc.ErrorFromRequest(&req, 12347, "invalid cursor")
+ }
+ } else {
+ cursor = sophrosyne.NewDatabaseCursor(curProfile.ID, "")
+ }
+
+ Profiles, err := u.service.profileService.GetProfiles(ctx, cursor)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to get Profiles", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "Profiles not found")
+ }
+
+ var ProfilesResponse []sophrosyne.GetProfileResponse
+ for _, uu := range Profiles {
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curProfile,
+ Action: u,
+ Resource: sophrosyne.Profile{ID: uu.ID},
+ })
+ if ok {
+ ent := &sophrosyne.GetProfileResponse{}
+ ProfilesResponse = append(ProfilesResponse, *ent.FromProfile(uu))
+ }
+ }
+
+ u.service.logger.DebugContext(ctx, "returning Profiles", "total", len(ProfilesResponse), "Profiles", ProfilesResponse)
+ return rpc.ResponseToRequest(&req, sophrosyne.GetProfilesResponse{
+ Profiles: ProfilesResponse,
+ Cursor: cursor.String(),
+ Total: len(ProfilesResponse),
+ })
+}
+
+type createProfile struct {
+ service *ProfileService
+}
+
+func (u createProfile) GetService() rpc.Service {
+ return u.service
+}
+
+func (u createProfile) EntityType() string {
+ return "Profiles"
+}
+
+func (u createProfile) EntityID() string {
+ return "CreateProfile"
+}
+
+func (u createProfile) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.CreateProfileRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curProfile := sophrosyne.ExtractUser(ctx)
+ if curProfile == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curProfile,
+ Action: u,
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ Profile, err := u.service.profileService.CreateProfile(ctx, params)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to create Profile", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to create Profile")
+ }
+
+ resp := sophrosyne.CreateProfileResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromProfile(Profile))
+}
+
+type updateProfile struct {
+ service *ProfileService
+}
+
+func (u updateProfile) GetService() rpc.Service {
+ return u.service
+}
+
+func (u updateProfile) EntityType() string {
+ return "Profiles"
+}
+
+func (u updateProfile) EntityID() string {
+ return "CreateProfile"
+}
+
+func (u updateProfile) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.UpdateProfileRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curProfile := sophrosyne.ExtractUser(ctx)
+ if curProfile == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ ProfileToUpdate, err := u.service.profileService.GetProfileByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "Profile not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curProfile,
+ Action: u,
+ Resource: sophrosyne.Profile{ID: ProfileToUpdate.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ Profile, err := u.service.profileService.UpdateProfile(ctx, params)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to update Profile", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to update Profile")
+ }
+
+ resp := &sophrosyne.UpdateProfileResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromProfile(Profile))
+}
+
+type deleteProfile struct {
+ service *ProfileService
+}
+
+func (u deleteProfile) GetService() rpc.Service {
+ return u.service
+}
+
+func (u deleteProfile) EntityType() string {
+ return "Profiles"
+}
+
+func (u deleteProfile) EntityID() string {
+ return "CreateProfile"
+}
+
+func (u deleteProfile) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.DeleteProfileRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curProfile := sophrosyne.ExtractUser(ctx)
+ if curProfile == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ ProfileToDelete, err := u.service.profileService.GetProfileByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "Profile not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curProfile,
+ Action: u,
+ Resource: sophrosyne.Profile{ID: ProfileToDelete.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ err = u.service.profileService.DeleteProfile(ctx, ProfileToDelete.Name)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to delete Profile", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to delete Profile")
+ }
+
+ return rpc.ResponseToRequest(&req, "ok")
+}
diff --git a/internal/rpc/services/scanservice.go b/internal/rpc/services/scanservice.go
new file mode 100644
index 0000000..aea881d
--- /dev/null
+++ b/internal/rpc/services/scanservice.go
@@ -0,0 +1,158 @@
+package services
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+
+ "google.golang.org/grpc"
+ "google.golang.org/grpc/credentials/insecure"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/grpc/checks"
+ "github.com/madsrc/sophrosyne/internal/rpc"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+)
+
+type ScanService struct {
+ methods map[jsonrpc.Method]rpc.Method
+ authz sophrosyne.AuthorizationProvider
+ logger *slog.Logger
+ validator sophrosyne.Validator
+ profileService sophrosyne.ProfileService
+ checkService sophrosyne.CheckService
+}
+
+func NewScanService(authz sophrosyne.AuthorizationProvider, logger *slog.Logger, validator sophrosyne.Validator, profileService sophrosyne.ProfileService, checkService sophrosyne.CheckService) (*ScanService, error) {
+ s := &ScanService{
+ methods: make(map[jsonrpc.Method]rpc.Method),
+ authz: authz,
+ logger: logger,
+ validator: validator,
+ profileService: profileService,
+ checkService: checkService,
+ }
+
+ s.methods["Scans::PerformScan"] = performScan{service: s}
+
+ return s, nil
+}
+
+func (s ScanService) EntityType() string { return "Service" }
+
+func (s ScanService) EntityID() string { return "Scans" }
+
+func (s ScanService) InvokeMethod(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ return invokeMethod(ctx, s.logger, s.methods, req)
+}
+
+type performScan struct {
+ service *ScanService
+}
+
+func (p performScan) GetService() rpc.Service { return p.service }
+
+func (p performScan) EntityType() string { return "Scans" }
+
+func (p performScan) EntityID() string { return "PerformScan" }
+
+func (p performScan) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ var params sophrosyne.PerformScanRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, p.service.validator)
+ if err != nil {
+ p.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ var profile *sophrosyne.Profile
+ if params.Profile != "" {
+ dbp, err := p.service.profileService.GetProfileByName(ctx, params.Profile)
+ if err != nil {
+ p.service.logger.ErrorContext(ctx, "error getting profile by name", "profile", params.Profile, "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+ p.service.logger.DebugContext(ctx, "using profile from params for scan", "profile", params.Profile)
+ profile = &dbp
+ } else {
+ if curUser.DefaultProfile.Name == "" {
+ dbp, err := p.service.profileService.GetProfileByName(ctx, "default")
+ if err != nil {
+ p.service.logger.ErrorContext(ctx, "error getting default profile", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+ p.service.logger.DebugContext(ctx, "using service-wide default profile for scan", "profile", dbp.Name)
+ profile = &dbp
+ } else {
+ p.service.logger.DebugContext(ctx, "using default profile for scan", "profile", curUser.DefaultProfile.Name)
+ profile = &curUser.DefaultProfile
+ }
+ }
+
+ checkResults := make(map[string]checkResult)
+ var success bool
+
+ for _, check := range profile.Checks {
+ p.service.logger.DebugContext(ctx, "running check from profile", "profile", profile.Name, "check", check.Name)
+ res, err := doCheck(ctx, p.service.logger, check)
+ if err != nil {
+ p.service.logger.ErrorContext(ctx, "error running check", "check", check.Name, "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+ checkResults[check.Name] = res
+ if res.Status {
+ success = true
+ } else {
+ success = false
+ }
+ }
+
+ resp := struct {
+ Result bool `json:"result"`
+ Checks map[string]checkResult `json:"checks"`
+ }{
+ Result: success,
+ Checks: checkResults,
+ }
+
+ return rpc.ResponseToRequest(&req, resp)
+}
+
+type checkResult struct {
+ Status bool `json:"status"`
+ Detail string `json:"detail"`
+}
+
+func doCheck(ctx context.Context, logger *slog.Logger, check sophrosyne.Check) (checkResult, error) {
+ if len(check.UpstreamServices) == 0 {
+ logger.ErrorContext(ctx, "no upstream services for check", "check", check.Name)
+ return checkResult{}, fmt.Errorf("missing upstream services")
+ }
+ var opts []grpc.DialOption
+ opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials()))
+ conn, err := grpc.NewClient(check.UpstreamServices[0].Host, opts...)
+ if err != nil {
+ logger.ErrorContext(ctx, "error connecting to check", "check", check.Name, "error", err)
+ return checkResult{}, err
+ }
+ defer func() {
+ err := conn.Close()
+ if err != nil {
+ logger.ErrorContext(ctx, "error closing grpc connection", "check", check.Name, "error", err)
+ }
+ }()
+ client := checks.NewCheckServiceClient(conn)
+ resp, err := client.Check(ctx, &checks.CheckRequest{Check: &checks.CheckRequest_Text{Text: "something"}})
+ if err != nil {
+ logger.ErrorContext(ctx, "error calling check", "check", check.Name, "error", err)
+ return checkResult{}, err
+ }
+ return checkResult{
+ Status: resp.Result,
+ Detail: resp.Details,
+ }, nil
+}
diff --git a/internal/rpc/services/services.go b/internal/rpc/services/services.go
new file mode 100644
index 0000000..6889306
--- /dev/null
+++ b/internal/rpc/services/services.go
@@ -0,0 +1,18 @@
+package services
+
+import (
+ "context"
+ "log/slog"
+
+ "github.com/madsrc/sophrosyne/internal/rpc"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+)
+
+func invokeMethod(ctx context.Context, logger *slog.Logger, methods map[jsonrpc.Method]rpc.Method, req jsonrpc.Request) ([]byte, error) {
+ if methods[req.Method] == nil {
+ logger.DebugContext(ctx, "cannot invoke method", "method", req.Method)
+ return rpc.ErrorFromRequest(&req, jsonrpc.MethodNotFound, string(jsonrpc.MethodNotFoundMessage))
+ }
+
+ return methods[req.Method].Invoke(ctx, req)
+}
diff --git a/internal/rpc/services/userservice.go b/internal/rpc/services/userservice.go
new file mode 100644
index 0000000..06cb71a
--- /dev/null
+++ b/internal/rpc/services/userservice.go
@@ -0,0 +1,387 @@
+package services
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+
+ "github.com/madsrc/sophrosyne"
+ "github.com/madsrc/sophrosyne/internal/rpc"
+ "github.com/madsrc/sophrosyne/internal/rpc/internal/jsonrpc"
+)
+
+type UserService struct {
+ methods map[jsonrpc.Method]rpc.Method
+ userService sophrosyne.UserService
+ authz sophrosyne.AuthorizationProvider
+ logger *slog.Logger
+ validator sophrosyne.Validator
+}
+
+func NewUserService(userService sophrosyne.UserService, authz sophrosyne.AuthorizationProvider, logger *slog.Logger, validator sophrosyne.Validator) (*UserService, error) {
+ u := &UserService{
+ methods: make(map[jsonrpc.Method]rpc.Method),
+ userService: userService,
+ authz: authz,
+ logger: logger,
+ validator: validator,
+ }
+
+ u.methods["Users::GetUser"] = getUser{service: u}
+ u.methods["Users::GetUsers"] = getUsers{service: u}
+ u.methods["Users::CreateUser"] = createUser{service: u}
+ u.methods["Users::UpdateUser"] = updateUser{service: u}
+ u.methods["Users::DeleteUser"] = deleteUser{service: u}
+ u.methods["Users::RotateToken"] = rotateToken{service: u}
+
+ return u, nil
+}
+
+func (u UserService) EntityType() string {
+ return "Service"
+}
+
+func (u UserService) EntityID() string {
+ return "Users"
+}
+
+func (u UserService) InvokeMethod(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ return invokeMethod(ctx, u.logger, u.methods, req)
+}
+
+type getUser struct {
+ service *UserService
+}
+
+func (u getUser) GetService() rpc.Service {
+ return u.service
+}
+
+func (u getUser) EntityType() string {
+ return "Users"
+}
+
+func (u getUser) EntityID() string {
+ return "GetUser"
+}
+
+func (u getUser) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.GetUserRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ if params.Email != "" {
+ u, _ := u.service.userService.GetUserByEmail(ctx, params.Email)
+ params.ID = u.ID
+ }
+ if params.Name != "" {
+ u, _ := u.service.userService.GetUserByName(ctx, params.Name)
+ params.ID = u.ID
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ if !u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ Resource: sophrosyne.User{ID: params.ID},
+ }) {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ user, err := u.service.userService.GetUser(ctx, params.ID)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to get user", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "user not found")
+ }
+
+ resp := sophrosyne.GetUserResponse{}
+
+ return rpc.ResponseToRequest(&req, resp.FromUser(user))
+}
+
+type getUsers struct {
+ service *UserService
+}
+
+func (u getUsers) GetService() rpc.Service {
+ return u.service
+}
+
+func (u getUsers) EntityType() string {
+ return "Users"
+}
+
+func (u getUsers) EntityID() string {
+ return "GetUsers"
+}
+
+func (u getUsers) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.GetUsersRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ if errors.Is(err, rpc.NoParamsError) {
+ params = sophrosyne.GetUsersRequest{}
+ } else {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ var cursor *sophrosyne.DatabaseCursor
+ if params.Cursor != "" {
+ cursor, err = sophrosyne.DecodeDatabaseCursorWithOwner(params.Cursor, curUser.ID)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to decode cursor", "error", err)
+ return rpc.ErrorFromRequest(&req, 12347, "invalid cursor")
+ }
+ } else {
+ cursor = sophrosyne.NewDatabaseCursor(curUser.ID, "")
+ }
+
+ users, err := u.service.userService.GetUsers(ctx, cursor)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to get users", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "users not found")
+ }
+
+ var usersResponse []sophrosyne.GetUserResponse
+ for _, uu := range users {
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ Resource: sophrosyne.User{ID: uu.ID},
+ })
+ if ok {
+ ent := &sophrosyne.GetUserResponse{}
+ usersResponse = append(usersResponse, *ent.FromUser(uu))
+ }
+ }
+
+ u.service.logger.DebugContext(ctx, "returning users", "total", len(usersResponse), "users", usersResponse)
+ return rpc.ResponseToRequest(&req, sophrosyne.GetUsersResponse{
+ Users: usersResponse,
+ Cursor: cursor.String(),
+ Total: len(usersResponse),
+ })
+}
+
+type createUser struct {
+ service *UserService
+}
+
+func (u createUser) GetService() rpc.Service {
+ return u.service
+}
+
+func (u createUser) EntityType() string {
+ return "Users"
+}
+
+func (u createUser) EntityID() string {
+ return "CreateUser"
+}
+
+func (u createUser) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.CreateUserRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ user, err := u.service.userService.CreateUser(ctx, params)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to create user", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to create user")
+ }
+
+ resp := sophrosyne.CreateUserResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromUser(user))
+}
+
+type updateUser struct {
+ service *UserService
+}
+
+func (u updateUser) GetService() rpc.Service {
+ return u.service
+}
+
+func (u updateUser) EntityType() string {
+ return "Users"
+}
+
+func (u updateUser) EntityID() string {
+ return "CreateUser"
+}
+
+func (u updateUser) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.UpdateUserRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ userToUpdate, err := u.service.userService.GetUserByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "user not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ Resource: sophrosyne.User{ID: userToUpdate.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ user, err := u.service.userService.UpdateUser(ctx, params)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to update user", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to update user")
+ }
+
+ resp := &sophrosyne.UpdateUserResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromUser(user))
+}
+
+type deleteUser struct {
+ service *UserService
+}
+
+func (u deleteUser) GetService() rpc.Service {
+ return u.service
+}
+
+func (u deleteUser) EntityType() string {
+ return "Users"
+}
+
+func (u deleteUser) EntityID() string {
+ return "CreateUser"
+}
+
+func (u deleteUser) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.DeleteUserRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ userToDelete, err := u.service.userService.GetUserByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "user not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ Resource: sophrosyne.User{ID: userToDelete.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ err = u.service.userService.DeleteUser(ctx, userToDelete.Name)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to delete user", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to delete user")
+ }
+
+ return rpc.ResponseToRequest(&req, "ok")
+}
+
+type rotateToken struct {
+ service *UserService
+}
+
+func (u rotateToken) GetService() rpc.Service {
+ return u.service
+}
+
+func (u rotateToken) EntityType() string {
+ return "Users"
+}
+
+func (u rotateToken) EntityID() string {
+ return "CreateUser"
+}
+
+func (u rotateToken) Invoke(ctx context.Context, req jsonrpc.Request) ([]byte, error) {
+ var params sophrosyne.RotateTokenRequest
+ err := rpc.ParamsIntoAny(&req, ¶ms, u.service.validator)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "error extracting params from request", "error", err)
+ return rpc.ErrorFromRequest(&req, jsonrpc.InvalidParams, string(jsonrpc.InvalidParamsMessage))
+ }
+
+ curUser := sophrosyne.ExtractUser(ctx)
+ if curUser == nil {
+ return rpc.ErrorFromRequest(&req, jsonrpc.InternalError, string(jsonrpc.InternalErrorMessage))
+ }
+
+ userToRotate, err := u.service.userService.GetUserByName(ctx, params.Name)
+ if err != nil {
+ return rpc.ErrorFromRequest(&req, 12346, "user not found")
+ }
+
+ ok := u.service.authz.IsAuthorized(ctx, sophrosyne.AuthorizationRequest{
+ Principal: curUser,
+ Action: u,
+ Resource: sophrosyne.User{ID: userToRotate.ID},
+ })
+
+ if !ok {
+ return rpc.ErrorFromRequest(&req, 12345, "unauthorized")
+ }
+
+ token, err := u.service.userService.RotateToken(ctx, userToRotate.Name)
+ if err != nil {
+ u.service.logger.ErrorContext(ctx, "unable to rotate token", "error", err)
+ return rpc.ErrorFromRequest(&req, 12346, "unable to rotate token")
+ }
+
+ resp := &sophrosyne.RotateTokenResponse{}
+ return rpc.ResponseToRequest(&req, resp.FromUser(sophrosyne.User{Token: token}))
+}
diff --git a/internal/tls/tls.go b/internal/tls/tls.go
new file mode 100644
index 0000000..4bc5d8f
--- /dev/null
+++ b/internal/tls/tls.go
@@ -0,0 +1,224 @@
+package tls
+
+import (
+ "bufio"
+ "crypto/ecdsa"
+ "crypto/ed25519"
+ "crypto/elliptic"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/tls"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/pem"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "math/big"
+ "net"
+ "os"
+ "strings"
+ "time"
+
+ "github.com/madsrc/sophrosyne"
+)
+
+var (
+ host = flag.String("host", "", "Comma-separated hostnames and IPs to generate a certificate for")
+ validFrom = flag.String("start-date", "", "Creation date formatted as Jan 1 15:04:05 2011")
+ validFor = flag.Duration("duration", 365*24*time.Hour, "Duration that certificate is valid for")
+ isCA = flag.Bool("ca", false, "whether this cert should be its own Certificate Authority")
+)
+
+type KeyType string
+
+const (
+ KeyTypeRSA4096 KeyType = "RSA-4096"
+ KeyTypeECP224 KeyType = "EC-P224"
+ KeyTypeECP256 KeyType = "EC-P256"
+ KeyTypeECP384 KeyType = "EC-P384"
+ KeyTypeECP521 KeyType = "EC-P521"
+ KeyTypeED25519 KeyType = "ED25519"
+)
+
+func publicKey(priv interface{}) interface{} {
+ switch k := priv.(type) {
+ case *rsa.PrivateKey:
+ return &k.PublicKey
+ case *ecdsa.PrivateKey:
+ return &k.PublicKey
+ case ed25519.PrivateKey:
+ return k.Public().(ed25519.PublicKey)
+ default:
+ return nil
+ }
+}
+
+func generateKey(keytype KeyType, randSource io.Reader) (interface{}, error) {
+ var priv interface{}
+ var err error
+ switch keytype {
+ case KeyTypeRSA4096:
+ priv, err = rsa.GenerateKey(randSource, 4096)
+ case KeyTypeED25519:
+ _, priv, err = ed25519.GenerateKey(randSource)
+ case KeyTypeECP224:
+ priv, err = ecdsa.GenerateKey(elliptic.P224(), randSource)
+ case KeyTypeECP256:
+ priv, err = ecdsa.GenerateKey(elliptic.P256(), randSource)
+ case KeyTypeECP384:
+ priv, err = ecdsa.GenerateKey(elliptic.P384(), randSource)
+ case KeyTypeECP521:
+ priv, err = ecdsa.GenerateKey(elliptic.P521(), randSource)
+ default:
+ return nil, fmt.Errorf("unsupported key type: %s", keytype)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ return priv, nil
+}
+
+func generateCert(priv interface{}, randSource io.Reader) ([]byte, error) {
+ var err error
+ keyUsage := x509.KeyUsageDigitalSignature
+ if _, isRSA := priv.(*rsa.PrivateKey); isRSA {
+ keyUsage |= x509.KeyUsageKeyEncipherment
+ }
+ var notBefore time.Time
+ if len(*validFrom) == 0 {
+ notBefore = time.Now()
+ } else {
+ notBefore, err = time.Parse("Jan 2 15:04:05 2006", *validFrom)
+ if err != nil {
+ log.Fatalf("Failed to parse creation date: %v", err)
+ }
+ }
+ notAfter := notBefore.Add(*validFor)
+ serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
+ serialNumber, err := rand.Int(randSource, serialNumberLimit)
+ if err != nil {
+ log.Fatalf("Failed to generate serial number: %v", err)
+ }
+ template := x509.Certificate{
+ SerialNumber: serialNumber,
+ Subject: pkix.Name{
+ Organization: []string{"Acme Co"},
+ },
+ NotBefore: notBefore,
+ NotAfter: notAfter,
+ KeyUsage: keyUsage,
+ ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
+ BasicConstraintsValid: true,
+ }
+ hosts := strings.Split(*host, ",")
+ for _, h := range hosts {
+ if ip := net.ParseIP(h); ip != nil {
+ template.IPAddresses = append(template.IPAddresses, ip)
+ } else {
+ template.DNSNames = append(template.DNSNames, h)
+ }
+ }
+ if *isCA {
+ template.IsCA = true
+ template.KeyUsage |= x509.KeyUsageCertSign
+ }
+ derBytes, err := x509.CreateCertificate(randSource, &template, &template, publicKey(priv), priv)
+ if err != nil {
+ log.Fatalf("Failed to create certificate: %v", err)
+ }
+ return derBytes, nil
+}
+
+func readPEMFile(path string) (*pem.Block, error) {
+ file, err := os.Open(path)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ _ = file.Close()
+ }()
+
+ pemfileinfo, _ := file.Stat()
+ var size int64 = pemfileinfo.Size()
+ pembytes := make([]byte, size)
+ buffer := bufio.NewReader(file)
+ _, err = buffer.Read(pembytes)
+ if err != nil {
+ return nil, err
+ }
+ data, _ := pem.Decode([]byte(pembytes))
+
+ return data, nil
+}
+
+func readCertificate(path string) ([]byte, error) {
+ data, err := readPEMFile(path)
+ if err != nil {
+ return nil, err
+ }
+
+ if !strings.Contains(data.Type, "CERTIFICATE") {
+ return nil, fmt.Errorf("PEM data does not contain a certificate. Type is %s", data.Type)
+ }
+
+ return data.Bytes, nil
+}
+
+// Has to be PKCS8.
+func readPrivateKeyPath(path string) (interface{}, error) {
+ data, err := readPEMFile(path)
+ if err != nil {
+ return nil, err
+ }
+
+ if !strings.Contains(data.Type, "PRIVATE KEY") {
+ return nil, fmt.Errorf("decoded PEM file not as expected. Type is %s", data.Type)
+ }
+
+ return x509.ParsePKCS8PrivateKey(data.Bytes)
+}
+
+func NewTLSServerConfig(config *sophrosyne.Config, randSource io.Reader) (*tls.Config, error) {
+ var priv interface{}
+ var err error
+ var certBytes []byte
+ if config.Security.TLS.KeyPath == "" {
+ priv, err = generateKey(KeyType(config.Security.TLS.KeyType), randSource)
+ } else {
+ priv, err = readPrivateKeyPath(config.Security.TLS.KeyPath)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ if config.Security.TLS.CertificatePath == "" {
+ certBytes, err = generateCert(priv, randSource)
+ } else {
+ certBytes, err = readCertificate(config.Security.TLS.CertificatePath)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ cert := tls.Certificate{
+ Certificate: [][]byte{certBytes},
+ PrivateKey: priv,
+ }
+
+ return &tls.Config{
+ Certificates: []tls.Certificate{cert},
+ }, nil
+}
+
+func NewTLSClientConfig(config *sophrosyne.Config) (*tls.Config, error) {
+ c := &tls.Config{}
+ if config.Security.TLS.InsecureSkipVerify {
+ c.InsecureSkipVerify = true
+ }
+
+ return c, nil
+}
diff --git a/internal/validator/validator.go b/internal/validator/validator.go
new file mode 100644
index 0000000..a7d50dc
--- /dev/null
+++ b/internal/validator/validator.go
@@ -0,0 +1,17 @@
+package validator
+
+import (
+ "github.com/go-playground/validator/v10"
+)
+
+type Validator struct {
+ v *validator.Validate
+}
+
+func NewValidator() *Validator {
+ return &Validator{v: validator.New(validator.WithRequiredStructEnabled())}
+}
+
+func (v *Validator) Validate(i interface{}) error {
+ return v.v.Struct(i)
+}
diff --git a/internal/validator/validator_test.go b/internal/validator/validator_test.go
new file mode 100644
index 0000000..b9bbb02
--- /dev/null
+++ b/internal/validator/validator_test.go
@@ -0,0 +1,182 @@
+package validator
+
+import (
+ "testing"
+
+ "github.com/go-playground/validator/v10"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewValidator(t *testing.T) {
+ tests := []struct {
+ name string
+ }{
+ {
+ name: "NewValidator",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := NewValidator()
+ require.NotNil(t, got)
+ require.NotNil(t, got.v)
+ })
+ }
+}
+
+type stupid struct{}
+
+func (s stupid) Error() string {
+ return "stupid"
+}
+
+func TestValidator_Validate(t *testing.T) {
+
+ type fields struct {
+ v *validator.Validate
+ }
+ type args struct {
+ i interface{}
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ wantErr bool
+ panics bool
+ }{
+ {
+ name: "Validate_success",
+ fields: fields{
+ v: validator.New(validator.WithRequiredStructEnabled()),
+ },
+ args: args{
+ i: struct {
+ ID string `validate:"required"`
+ Name string `validate:"required"`
+ }{
+ ID: "1",
+ Name: "name",
+ },
+ },
+ },
+ {
+ name: "Validate_bad_tag_cause_panic",
+ fields: fields{
+ v: validator.New(validator.WithRequiredStructEnabled()),
+ },
+ args: args{
+ i: struct {
+ ID string `validate:"somethingVeryCustom"`
+ }{
+ ID: "1",
+ },
+ },
+ wantErr: true,
+ panics: true,
+ },
+ {
+ name: "Validate_error",
+ fields: fields{
+ v: validator.New(validator.WithRequiredStructEnabled()),
+ },
+ args: args{
+ i: struct {
+ ID string `validate:"required"`
+ Name string `validate:"required"`
+ }{
+ ID: "1",
+ },
+ },
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ v := &Validator{
+ v: tt.fields.v,
+ }
+ if tt.panics {
+ require.Panics(t, func() {
+ _ = v.Validate(tt.args.i)
+ })
+ return
+ }
+ err := v.Validate(tt.args.i)
+ if tt.wantErr {
+ require.Error(t, err)
+ require.ErrorAs(t, err, &validator.ValidationErrors{})
+ } else {
+ require.NoError(t, err)
+ }
+ })
+ }
+}
+
+func TestMutualExclusivity_Two_Fields(t *testing.T) {
+ type obj struct {
+ A string
+ B string `validate:"required_without=A,excluded_with=A"`
+ }
+ type args struct {
+ i interface{}
+ }
+ tests := []struct {
+ name string
+ args args
+ wantErr bool
+ failedTag string
+ }{
+ {
+ name: "only A set",
+ args: args{
+ i: obj{
+ A: "a",
+ },
+ },
+ },
+ {
+ name: "A and B set",
+ args: args{
+ i: obj{
+ A: "a",
+ B: "b",
+ },
+ },
+ wantErr: true,
+ failedTag: "excluded_with",
+ },
+ {
+ name: "only B set",
+ args: args{
+ i: obj{
+ B: "b",
+ },
+ },
+ },
+ {
+ name: "none is set",
+ args: args{
+ i: obj{},
+ },
+ wantErr: true,
+ failedTag: "required_without",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ v := NewValidator()
+ err := v.Validate(tt.args.i)
+ if tt.wantErr {
+ require.Error(t, err)
+ var ve validator.ValidationErrors
+ require.ErrorAs(t, err, &ve)
+ require.Len(t, ve, 1)
+ require.Equal(t, "B", ve[0].Field())
+ require.Equal(t, tt.failedTag, ve[0].Tag())
+ } else {
+ require.NoError(t, err)
+ }
+ })
+ }
+}
diff --git a/logging.go b/logging.go
new file mode 100644
index 0000000..bad3583
--- /dev/null
+++ b/logging.go
@@ -0,0 +1,90 @@
+package sophrosyne
+
+import (
+ "context"
+ "log/slog"
+ "os"
+)
+
+type LogLevel string
+
+const (
+ LogLevelDebug LogLevel = "debug"
+ LogLevelInfo LogLevel = "info"
+)
+
+type LogFormat string
+
+const (
+ LogFormatText LogFormat = "text"
+ LogFormatJSON LogFormat = "json"
+)
+
+func LogLevelToSlogLevel(level LogLevel) slog.Level {
+ switch level {
+ case LogLevelDebug:
+ return slog.LevelDebug
+ case LogLevelInfo:
+ return slog.LevelInfo
+ default:
+ return slog.LevelInfo
+ }
+}
+
+type LogHandler struct {
+ Handler slog.Handler `validate:"required"`
+ config *Config `validate:"required"`
+ tracingService TracingService `validate:"required"`
+}
+
+func NewLogHandler(config *Config, tracingService TracingService) *LogHandler {
+ h := LogHandler{
+ config: config,
+ tracingService: tracingService,
+ }
+ handlerOpts := slog.HandlerOptions{
+ Level: LogLevelToSlogLevel(config.Logging.Level),
+ }
+
+ if config.Logging.Format == LogFormatJSON {
+ h.Handler = slog.NewJSONHandler(os.Stdout, &handlerOpts)
+ } else {
+ h.Handler = slog.NewTextHandler(os.Stdout, &handlerOpts)
+ }
+
+ return &h
+}
+
+// Enabled returns true if the log level is enabled for the handler and false
+// otherwise.
+//
+// The log level is enabled if the level of the record is greater than or equal
+// to the level defined in [config.Log.Level].
+//
+// This is called early in the logging process to determine if the handler
+// should be called. Because the handler has access to the configuration, it
+// allows us to not have to restart the application to change the log level,
+// provided that the part of the configuraiton we change allows for hot
+// reloading.
+func (h LogHandler) Enabled(ctx context.Context, Level slog.Level) bool {
+ return Level >= LogLevelToSlogLevel(h.config.Logging.Level)
+}
+
+// Handle adds contextual attributes to the Record before calling the underlying
+// handler.
+func (h LogHandler) Handle(ctx context.Context, r slog.Record) error {
+ if h.tracingService.GetTraceID(ctx) != "" {
+ r.AddAttrs(slog.String("trace_id", h.tracingService.GetTraceID(ctx)))
+ }
+ if ExtractUser(ctx) != nil {
+ r.AddAttrs(slog.String("user_id", ExtractUser(ctx).ID))
+ }
+
+ return h.Handler.Handle(ctx, r)
+}
+func (h LogHandler) WithAttrs(attrs []slog.Attr) slog.Handler {
+ return h.Handler.WithAttrs(attrs)
+}
+func (h LogHandler) WithGroup(name string) slog.Handler {
+ return h.Handler.WithGroup(name)
+}
diff --git a/main.go b/main.go
new file mode 100644
index 0000000..3930937
--- /dev/null
+++ b/main.go
@@ -0,0 +1,191 @@
+package sophrosyne
+
+import (
+ "context"
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "io"
+ "log/slog"
+ "net/http"
+ "regexp"
+ "strings"
+ "time"
+)
+
+type OtelOutput string
+
+const (
+ OtelOutputStdout OtelOutput = "stdout"
+ OtelOutputHTTP OtelOutput = "http"
+)
+
+type HttpService interface {
+ Start() error
+}
+
+type Validator interface {
+ Validate(interface{}) error
+}
+
+func ExtractUser(ctx context.Context) *User {
+ v := ctx.Value(UserContextKey{})
+ u, ok := v.(*User)
+ if ok {
+ return u
+ }
+ return nil
+}
+
+type MetricService interface {
+ RecordPanic(ctx context.Context)
+}
+
+type Span interface {
+ End()
+}
+
+type TracingService interface {
+ StartSpan(ctx context.Context, name string) (context.Context, Span)
+ GetTraceID(ctx context.Context) string
+ NewHTTPHandler(route string, h http.Handler) http.Handler
+ WithRouteTag(route string, h http.Handler) http.Handler
+}
+
+func NewToken(source io.Reader) ([]byte, error) {
+ b := make([]byte, 64)
+ _, err := source.Read(b)
+ if err != nil {
+ return nil, err
+ }
+ return b, nil
+}
+
+// ProtectToken applies a Keyed-Hash Message Authentication Code (HMAC) to the
+// token using the site key, salt and SHA-256.
+//
+// If, for any reason, the HMAC fails, the function will panic.
+func ProtectToken(token []byte, config *Config) []byte {
+ h := hmac.New(sha256.New, config.Security.SiteKey)
+ n, err := h.Write(token)
+ if err != nil {
+ panic(err)
+ }
+ if n != len(token) {
+ panic(fmt.Errorf("failed to write all bytes (token) to HMAC"))
+ }
+ n, err = h.Write(config.Security.Salt)
+ if err != nil {
+ panic(err)
+ }
+ if n != len(config.Security.Salt) {
+ panic(fmt.Errorf("failed to write all bytes (salt) to HMAC"))
+ }
+
+ var out []byte
+ out = h.Sum(out)
+ return out
+}
+
+var TimeFormatInResponse = time.RFC3339
+
+var xidRegex *regexp.Regexp = regexp.MustCompile("^[0-9a-v]{20}$")
+
+func IsValidXID(s string) bool {
+ return xidRegex.MatchString(s)
+}
+
+const DatabaseCursorSeparator = "::"
+
+type DatabaseCursor struct {
+ OwnerID string
+ Position string
+}
+
+func NewDatabaseCursor(ownerID, position string) *DatabaseCursor {
+ return &DatabaseCursor{
+ OwnerID: ownerID,
+ Position: position,
+ }
+}
+
+func (c DatabaseCursor) String() string {
+ if c.OwnerID == "" || c.Position == "" {
+ return ""
+ }
+ return base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s%s%s", c.OwnerID, DatabaseCursorSeparator, c.Position)))
+}
+
+func (c *DatabaseCursor) Reset() {
+ c.Position = ""
+}
+
+func (c *DatabaseCursor) Advance(position string) {
+ c.Position = position
+}
+
+func (c *DatabaseCursor) LogValue() slog.Value {
+ return slog.GroupValue(slog.String("owner_id", c.OwnerID), slog.String("last_read", c.Position))
+}
+
+func DecodeDatabaseCursorWithOwner(s string, ownerID string) (*DatabaseCursor, error) {
+ cursor, err := DecodeDatabaseCursor(s)
+ if err != nil {
+ return nil, err
+ }
+ if cursor.OwnerID != ownerID {
+ return nil, errors.New("invalid cursor")
+ }
+ return cursor, nil
+}
+
+func DecodeDatabaseCursor(s string) (*DatabaseCursor, error) {
+ b, err := base64.StdEncoding.DecodeString(s)
+ if err != nil {
+ return nil, err
+ }
+ parts := strings.Split(string(b), DatabaseCursorSeparator)
+ if len(parts) != 2 {
+ return nil, errors.New("invalid cursor")
+ }
+
+ if !IsValidXID(parts[0]) || !IsValidXID(parts[1]) {
+ return nil, errors.New("invalid cursor")
+ }
+
+ return &DatabaseCursor{
+ OwnerID: parts[0],
+ Position: parts[1],
+ }, nil
+}
+
+type AuthorizationProvider interface {
+ IsAuthorized(ctx context.Context, req AuthorizationRequest) bool
+}
+
+type AuthorizationEntity interface {
+ EntityType() string
+ EntityID() string
+}
+
+type AuthorizationRequest struct {
+ Principal AuthorizationEntity
+ Action AuthorizationEntity
+ Resource AuthorizationEntity
+ Context map[string]interface{}
+}
+
+type RPCServer interface {
+ HandleRPCRequest(ctx context.Context, req []byte) ([]byte, error)
+}
+
+type HealthCheckService interface {
+ UnauthenticatedHealthcheck(ctx context.Context) bool
+ AuthenticatedHealthcheck(ctx context.Context) ([]byte, error)
+}
+
+type HealthChecker interface {
+ Health(ctx context.Context) (bool, []byte)
+}
diff --git a/poetry.lock b/poetry.lock
deleted file mode 100644
index 3eaa8f5..0000000
--- a/poetry.lock
+++ /dev/null
@@ -1,2145 +0,0 @@
-# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
-
-[[package]]
-name = "alembic"
-version = "1.13.1"
-description = "A database migration tool for SQLAlchemy."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"},
- {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"},
-]
-
-[package.dependencies]
-Mako = "*"
-SQLAlchemy = ">=1.3.0"
-typing-extensions = ">=4"
-
-[package.extras]
-tz = ["backports.zoneinfo"]
-
-[[package]]
-name = "annotated-types"
-version = "0.6.0"
-description = "Reusable constraint types to use with typing.Annotated"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
- {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
-]
-
-[[package]]
-name = "anyio"
-version = "4.3.0"
-description = "High level compatibility layer for multiple asynchronous event loop implementations"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
- {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
-]
-
-[package.dependencies]
-idna = ">=2.8"
-sniffio = ">=1.1"
-
-[package.extras]
-doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
-test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
-trio = ["trio (>=0.23)"]
-
-[[package]]
-name = "asyncpg"
-version = "0.29.0"
-description = "An asyncio PostgreSQL driver"
-optional = false
-python-versions = ">=3.8.0"
-files = [
- {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"},
- {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"},
- {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"},
- {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"},
- {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"},
- {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"},
- {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"},
- {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"},
- {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"},
- {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"},
- {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"},
- {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"},
- {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"},
- {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"},
- {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"},
- {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"},
- {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"},
- {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"},
- {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"},
- {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"},
- {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"},
- {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"},
- {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"},
- {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"},
- {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"},
- {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"},
- {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"},
- {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"},
- {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"},
- {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"},
- {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"},
- {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"},
- {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"},
- {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"},
- {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"},
- {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"},
- {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"},
- {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"},
- {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"},
- {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"},
- {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"},
-]
-
-[package.extras]
-docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
-test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"]
-
-[[package]]
-name = "better-exceptions"
-version = "0.3.3"
-description = "Pretty and helpful exceptions, automatically"
-optional = false
-python-versions = "*"
-files = [
- {file = "better_exceptions-0.3.3-py3-none-any.whl", hash = "sha256:9c70b1c61d5a179b84cd2c9d62c3324b667d74286207343645ed4306fdaad976"},
- {file = "better_exceptions-0.3.3.tar.gz", hash = "sha256:e4e6bc18444d5f04e6e894b10381e5e921d3d544240418162c7db57e9eb3453b"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-
-[[package]]
-name = "black"
-version = "23.12.1"
-description = "The uncompromising code formatter."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"},
- {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"},
- {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"},
- {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"},
- {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"},
- {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"},
- {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"},
- {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"},
- {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
- {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
- {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
- {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
- {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"},
- {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"},
- {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"},
- {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"},
- {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"},
- {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"},
- {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"},
- {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"},
- {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
- {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-packaging = ">=22.0"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
-[[package]]
-name = "certifi"
-version = "2024.2.2"
-description = "Python package for providing Mozilla's CA Bundle."
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
- {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
-]
-
-[[package]]
-name = "cffi"
-version = "1.16.0"
-description = "Foreign Function Interface for Python calling C code."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
- {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
- {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
- {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
- {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
- {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
- {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
- {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
- {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
- {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
- {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
- {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
- {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
- {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
- {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
- {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
- {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
- {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
- {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
-]
-
-[package.dependencies]
-pycparser = "*"
-
-[[package]]
-name = "charset-normalizer"
-version = "3.3.2"
-description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
- {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
-]
-
-[[package]]
-name = "click"
-version = "8.1.7"
-description = "Composable command line interface toolkit"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
- {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-description = "Cross-platform colored terminal text."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-files = [
- {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
- {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-
-[[package]]
-name = "coverage"
-version = "7.4.4"
-description = "Code coverage measurement for Python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"},
- {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"},
- {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"},
- {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"},
- {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"},
- {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"},
- {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"},
- {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"},
- {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"},
- {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"},
- {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"},
- {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"},
- {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"},
- {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"},
- {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"},
- {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"},
- {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"},
- {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"},
- {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"},
- {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"},
- {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"},
- {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"},
- {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"},
- {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"},
- {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"},
- {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"},
- {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"},
- {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"},
- {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"},
- {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"},
- {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"},
- {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"},
- {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"},
- {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"},
- {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"},
- {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"},
- {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"},
- {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"},
- {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"},
- {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"},
- {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"},
- {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"},
- {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"},
- {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"},
- {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"},
- {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"},
- {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"},
- {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"},
- {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"},
- {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"},
- {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"},
- {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"},
-]
-
-[package.extras]
-toml = ["tomli"]
-
-[[package]]
-name = "cryptography"
-version = "42.0.5"
-description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"},
- {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"},
- {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"},
- {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"},
- {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"},
- {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"},
- {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"},
- {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"},
- {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"},
- {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"},
- {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"},
- {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"},
- {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"},
- {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"},
- {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"},
- {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"},
- {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"},
- {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"},
- {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"},
- {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"},
- {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"},
- {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"},
- {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"},
- {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"},
- {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"},
- {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"},
- {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"},
- {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"},
- {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"},
- {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"},
- {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"},
- {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"},
-]
-
-[package.dependencies]
-cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
-
-[package.extras]
-docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
-docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
-nox = ["nox"]
-pep8test = ["check-sdist", "click", "mypy", "ruff"]
-sdist = ["build"]
-ssh = ["bcrypt (>=3.1.5)"]
-test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
-test-randomorder = ["pytest-randomly"]
-
-[[package]]
-name = "databind"
-version = "4.5.1"
-description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. The `databind` package will install the full suite of databind packages. Compatible with Python 3.8 and newer."
-optional = false
-python-versions = "<4.0.0,>=3.8.0"
-files = [
- {file = "databind-4.5.1-py3-none-any.whl", hash = "sha256:e8eae14d2bbf41dfbe598883deda5c8088dfcb3ce55344b216925ea4ad48d5cf"},
- {file = "databind-4.5.1.tar.gz", hash = "sha256:66f912f100d81acbbe8b9b2c5280a04341bca5d3925669371a551cc354350962"},
-]
-
-[package.dependencies]
-Deprecated = ">=1.2.12,<2.0.0"
-nr-date = ">=2.0.0,<3.0.0"
-nr-stream = ">=1.0.0,<2.0.0"
-typeapi = ">=2.0.1,<3"
-typing-extensions = ">=3.10.0,<5"
-
-[[package]]
-name = "databind-core"
-version = "4.5.1"
-description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. Compatible with Python 3.8 and newer. Deprecated, use `databind` package."
-optional = false
-python-versions = "<4.0.0,>=3.8.0"
-files = [
- {file = "databind.core-4.5.1-py3-none-any.whl", hash = "sha256:df782c1bd2e416e268796918c4bdcdc2b9e948e2c263423ae9af1f5c50087973"},
- {file = "databind.core-4.5.1.tar.gz", hash = "sha256:d938777ab612188bef2070f12150e1cf07d0659459dd858a12edce80bdad64fa"},
-]
-
-[package.dependencies]
-databind = ">=4.5.1,<5.0.0"
-
-[[package]]
-name = "databind-json"
-version = "4.5.1"
-description = "De-/serialize Python dataclasses to or from JSON payloads. Compatible with Python 3.8 and newer. Deprecated, use `databind` module instead."
-optional = false
-python-versions = "<4.0.0,>=3.8.0"
-files = [
- {file = "databind.json-4.5.1-py3-none-any.whl", hash = "sha256:d05854afbc0e398f427b6bd6172560eee41ab36ca4953c3b92fd8aa4c81f7502"},
- {file = "databind.json-4.5.1.tar.gz", hash = "sha256:40363b1ae4322e877a02e4b7b5dc2c9ca8be4176e2b567a3f9718ee52543c1d1"},
-]
-
-[package.dependencies]
-databind = ">=4.5.1,<5.0.0"
-
-[[package]]
-name = "deprecated"
-version = "1.2.14"
-description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
- {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
-]
-
-[package.dependencies]
-wrapt = ">=1.10,<2"
-
-[package.extras]
-dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
-
-[[package]]
-name = "dnspython"
-version = "2.6.1"
-description = "DNS toolkit"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
- {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
-]
-
-[package.extras]
-dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
-dnssec = ["cryptography (>=41)"]
-doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
-doq = ["aioquic (>=0.9.25)"]
-idna = ["idna (>=3.6)"]
-trio = ["trio (>=0.23)"]
-wmi = ["wmi (>=1.5.1)"]
-
-[[package]]
-name = "docspec"
-version = "2.2.1"
-description = "Docspec is a JSON object specification for representing API documentation of programming languages."
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "docspec-2.2.1-py3-none-any.whl", hash = "sha256:7538f750095a9688c6980ff9a4e029a823a500f64bd00b6b4bdb27951feb31cb"},
- {file = "docspec-2.2.1.tar.gz", hash = "sha256:4854e77edc0e2de40e785e57e95880f7095a05fe978f8b54cef7a269586e15ff"},
-]
-
-[package.dependencies]
-"databind.core" = ">=4.2.6,<5.0.0"
-"databind.json" = ">=4.2.6,<5.0.0"
-Deprecated = ">=1.2.12,<2.0.0"
-
-[[package]]
-name = "docspec-python"
-version = "2.2.1"
-description = "A parser based on lib2to3 producing docspec data from Python source code."
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "docspec_python-2.2.1-py3-none-any.whl", hash = "sha256:76ac41d35a8face35b2d766c2e8a416fb8832359785d396f0d53bcb00f178e54"},
- {file = "docspec_python-2.2.1.tar.gz", hash = "sha256:c41b850b4d6f4de30999ea6f82c9cdb9183d9bcba45559ee9173d3dab7281559"},
-]
-
-[package.dependencies]
-black = ">=23.1.0,<24.0.0"
-docspec = ">=2.2.1,<3.0.0"
-"nr.util" = ">=0.7.0"
-
-[[package]]
-name = "docstring-parser"
-version = "0.11"
-description = "\"Parse Python docstrings in reST, Google and Numpydoc format\""
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "docstring_parser-0.11.tar.gz", hash = "sha256:93b3f8f481c7d24e37c5d9f30293c89e2933fa209421c8abd731dd3ef0715ecb"},
-]
-
-[package.extras]
-test = ["black", "pytest"]
-
-[[package]]
-name = "email-validator"
-version = "2.1.1"
-description = "A robust email address syntax and deliverability validation library."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"},
- {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"},
-]
-
-[package.dependencies]
-dnspython = ">=2.0.0"
-idna = ">=2.0.0"
-
-[[package]]
-name = "fastapi"
-version = "0.110.1"
-description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"},
- {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"},
-]
-
-[package.dependencies]
-pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
-starlette = ">=0.37.2,<0.38.0"
-typing-extensions = ">=4.8.0"
-
-[package.extras]
-all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
-
-[[package]]
-name = "greenlet"
-version = "3.0.3"
-description = "Lightweight in-process concurrent programming"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"},
- {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"},
- {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"},
- {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"},
- {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"},
- {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"},
- {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"},
- {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"},
- {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"},
- {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"},
- {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"},
- {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"},
- {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"},
- {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"},
- {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"},
- {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"},
- {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"},
- {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"},
- {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"},
- {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"},
- {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"},
- {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"},
- {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"},
- {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"},
- {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"},
- {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"},
- {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"},
- {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"},
-]
-
-[package.extras]
-docs = ["Sphinx", "furo"]
-test = ["objgraph", "psutil"]
-
-[[package]]
-name = "grpc-stubs"
-version = "1.53.0.5"
-description = "Mypy stubs for gRPC"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "grpc-stubs-1.53.0.5.tar.gz", hash = "sha256:3e1b642775cbc3e0c6332cfcedfccb022176db87e518757bef3a1241397be406"},
- {file = "grpc_stubs-1.53.0.5-py3-none-any.whl", hash = "sha256:04183fb65a1b166a1febb9627e3d9647d3926ccc2dfe049fe7b6af243428dbe1"},
-]
-
-[package.dependencies]
-grpcio = "*"
-
-[[package]]
-name = "grpcio"
-version = "1.62.1"
-description = "HTTP/2-based RPC framework"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"},
- {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"},
- {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"},
- {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"},
- {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"},
- {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"},
- {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"},
- {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"},
- {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"},
- {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"},
- {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"},
- {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"},
- {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"},
- {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"},
- {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"},
- {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"},
- {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"},
- {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"},
- {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"},
- {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"},
- {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"},
- {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"},
- {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"},
- {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"},
- {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"},
- {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"},
- {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"},
- {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"},
- {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"},
- {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"},
- {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"},
- {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"},
- {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"},
- {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"},
- {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"},
- {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"},
- {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"},
- {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"},
- {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"},
- {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"},
- {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"},
- {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"},
- {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"},
- {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"},
- {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"},
- {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"},
- {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"},
- {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"},
- {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"},
- {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"},
- {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"},
- {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"},
- {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"},
- {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"},
-]
-
-[package.extras]
-protobuf = ["grpcio-tools (>=1.62.1)"]
-
-[[package]]
-name = "grpcio-tools"
-version = "1.62.1"
-description = "Protobuf code generator for gRPC"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "grpcio-tools-1.62.1.tar.gz", hash = "sha256:a4991e5ee8a97ab791296d3bf7e8700b1445635cc1828cc98df945ca1802d7f2"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:f2b404bcae7e2ef9b0b9803b2a95119eb7507e6dc80ea4a64a78be052c30cebc"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:fdd987a580b4474769adfd40144486f54bcc73838d5ec5d3647a17883ea78e76"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:07af1a6442e2313cff22af93c2c4dd37ae32b5239b38e0d99e2cbf93de65429f"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41384c9ee18e61ef20cad2774ef71bd8854b63efce263b5177aa06fccb84df1f"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c38006f7702d2ff52122e4c77a47348709374050c76216e84b30a9f06e45afa"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08fecc3c5b4e6dd3278f2b9d12837e423c7dcff551ca1e587018b4a0fc5f8019"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a01e8dcd0f041f6fa6d815c54a2017d032950e310c41d514a8bc041e872c4d12"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-win32.whl", hash = "sha256:dd933b8e0b3c13fe3543d58f849a6a5e0d7987688cb6801834278378c724f695"},
- {file = "grpcio_tools-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:2b04844a9382f1bde4b4174e476e654ab3976168d2469cb4b29e352f4f35a5aa"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:024380536ba71a96cdf736f0954f6ad03f5da609c09edbcc2ca02fdd639e0eed"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:21f14b99e0cd38ad56754cc0b62b2bf3cf75f9f7fc40647da54669e0da0726fe"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:975ac5fb482c23f3608c16e06a43c8bab4d79c2e2564cdbc25cf753c6e998775"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50739aaab0c8076ad5957204e71f2e0c9876e11fd8338f7f09de12c2d75163c5"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598c54318f0326cf5020aa43fc95a15e933aba4a71943d3bff2677d2d21ddfa1"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f309bdb33a61f8e049480d41498ee2e525cfb5e959958b326abfdf552bf9b9cb"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f358effd3c11d66c150e0227f983d54a5cd30e14038566dadcf25f9f6844e6e8"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-win32.whl", hash = "sha256:b76aead9b73f1650a091870fe4e9ed15ac4d8ed136f962042367255199c23594"},
- {file = "grpcio_tools-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:d66a5d47eaa427039752fa0a83a425ff2a487b6a0ac30556fd3be2f3a27a0130"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:575535d039b97d63e6a9abee626d6c7cd47bd8cb73dd00a5c84a98254a2164a4"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:22644c90e43d1a888477899af917979e17364fdd6e9bbb92679cd6a54c4d36c3"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:156d3e1b227c16e903003a56881dbe60e40f2b4bd66f0bc3b27c53e466e6384d"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ad7c5691625a85327e5b683443baf73ae790fd5afc938252041ed5cd665e377"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e140bbc08eea8abf51c0274f45fb1e8350220e64758998d7f3c7f985a0b2496"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7444fcab861911525470d398e5638b70d5cbea3b4674a3de92b5c58c5c515d4d"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e643cd14a5d1e59865cba68a5a6f0175d987f36c5f4cb0db80dee9ed60b4c174"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-win32.whl", hash = "sha256:1344a773d2caa9bb7fbea7e879b84f33740c808c34a5bd2a2768e526117a6b44"},
- {file = "grpcio_tools-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:2eea1db3748b2f37b4dce84d8e0c15d9bc811094807cabafe7b0ea47f424dfd5"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:45d2e6cf04d27286b6f73e6e20ba3f0a1f6d8f5535e5dcb1356200419bb457f4"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:46ae58e6926773e7315e9005f0f17aacedbc0895a8752bec087d24efa2f1fb21"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:4c28086df31478023a36f45e50767872ab3aed2419afff09814cb61c88b77db4"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4fba5b339f4797548591036c9481e6895bf920fab7d3dc664d2697f8fb7c0bf"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23eb3d47f78f509fcd201749b1f1e44b76f447913f7fbb3b8bae20f109086295"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fd5d47707bd6bc2b707ece765c362d2a1d2e8f6cd92b04c99fab49a929f3610c"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d1924a6a943df7c73b9ef0048302327c75962b567451479710da729ead241228"},
- {file = "grpcio_tools-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:fe71ca30aabe42591e84ecb9694c0297dc699cc20c5b24d2cb267fb0fc01f947"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:1819fd055c1ae672d1d725ec75eefd1f700c18acba0ed9332202be31d69c401d"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:5dbe1f7481dd14b6d477b4bace96d275090bc7636b9883975a08b802c94e7b78"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:771c051c5ece27ad03e4f2e33624a925f0ad636c01757ab7dbb04a37964af4ba"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98209c438b38b6f1276dbc27b1c04e346a75bfaafe72a25a548f2dc5ce71d226"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2152308e5321cb90fb45aaa84d03d6dedb19735a8779aaf36c624f97b831842d"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ed1f27dc2b2262c8b8d9036276619c1bb18791311c16ccbf1f31b660f2aad7cf"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2744947b6c5e907af21133431809ccca535a037356864e32c122efed8cb9de1f"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-win32.whl", hash = "sha256:13b20e269d14ad629ff9a2c9a2450f3dbb119d5948de63b27ffe624fa7aea85a"},
- {file = "grpcio_tools-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:999823758e9eacd0095863d06cd6d388be769f80c9abb65cdb11c4f2cfce3fea"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:941f8a5c31986053e75fa466bcfa743c2bf1b513b7978cf1f4ab4e96a8219d27"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b9c02c88c77ef6057c6cbeea8922d7c2424aabf46bfc40ddf42a32765ba91061"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:6abd4eb3ccb444383a40156139acc3aaa73745d395139cb6bc8e2a3429e1e627"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:449503213d142f8470b331a1c2f346f8457f16c7fe20f531bc2500e271f7c14c"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a11bcf609d00cfc9baed77ab308223cabc1f0b22a05774a26dd4c94c0c80f1f"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5d7bdea33354b55acf40bb4dd3ba7324d6f1ef6b4a1a4da0807591f8c7e87b9a"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d03b645852d605f43003020e78fe6d573cae6ee6b944193e36b8b317e7549a20"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-win32.whl", hash = "sha256:52b185dfc3bf32e70929310367dbc66185afba60492a6a75a9b1141d407e160c"},
- {file = "grpcio_tools-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:63a273b70896d3640b7a883eb4a080c3c263d91662d870a2e9c84b7bbd978e7b"},
-]
-
-[package.dependencies]
-grpcio = ">=1.62.1"
-protobuf = ">=4.21.6,<5.0dev"
-setuptools = "*"
-
-[[package]]
-name = "h11"
-version = "0.14.0"
-description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
- {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
-]
-
-[[package]]
-name = "idna"
-version = "3.7"
-description = "Internationalized Domain Names in Applications (IDNA)"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
- {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
-]
-
-[[package]]
-name = "importlib-metadata"
-version = "7.1.0"
-description = "Read metadata from Python packages"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"},
- {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"},
-]
-
-[package.dependencies]
-zipp = ">=0.5"
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-perf = ["ipython"]
-testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
-
-[[package]]
-name = "iniconfig"
-version = "2.0.0"
-description = "brain-dead simple config-ini parsing"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
- {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
-]
-
-[[package]]
-name = "jinja2"
-version = "3.1.3"
-description = "A very fast and expressive template engine."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
- {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
-]
-
-[package.dependencies]
-MarkupSafe = ">=2.0"
-
-[package.extras]
-i18n = ["Babel (>=2.7)"]
-
-[[package]]
-name = "mako"
-version = "1.3.3"
-description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "Mako-1.3.3-py3-none-any.whl", hash = "sha256:5324b88089a8978bf76d1629774fcc2f1c07b82acdf00f4c5dd8ceadfffc4b40"},
- {file = "Mako-1.3.3.tar.gz", hash = "sha256:e16c01d9ab9c11f7290eef1cfefc093fb5a45ee4a3da09e2fec2e4d1bae54e73"},
-]
-
-[package.dependencies]
-MarkupSafe = ">=0.9.2"
-
-[package.extras]
-babel = ["Babel"]
-lingua = ["lingua"]
-testing = ["pytest"]
-
-[[package]]
-name = "markdown-it-py"
-version = "3.0.0"
-description = "Python port of markdown-it. Markdown parsing, done right!"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
- {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
-]
-
-[package.dependencies]
-mdurl = ">=0.1,<1.0"
-
-[package.extras]
-benchmarking = ["psutil", "pytest", "pytest-benchmark"]
-code-style = ["pre-commit (>=3.0,<4.0)"]
-compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
-linkify = ["linkify-it-py (>=1,<3)"]
-plugins = ["mdit-py-plugins"]
-profiling = ["gprof2dot"]
-rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
-testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
-
-[[package]]
-name = "markupsafe"
-version = "2.1.5"
-description = "Safely add untrusted strings to HTML/XML markup."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
- {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
-]
-
-[[package]]
-name = "mdurl"
-version = "0.1.2"
-description = "Markdown URL utilities"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
- {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
-]
-
-[[package]]
-name = "mypy"
-version = "1.9.0"
-description = "Optional static typing for Python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"},
- {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"},
- {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"},
- {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"},
- {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"},
- {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"},
- {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"},
- {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"},
- {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"},
- {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"},
- {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"},
- {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"},
- {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"},
- {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"},
- {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"},
- {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"},
- {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"},
- {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"},
- {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"},
- {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"},
- {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"},
- {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"},
- {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"},
- {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"},
- {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"},
- {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"},
- {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"},
-]
-
-[package.dependencies]
-mypy-extensions = ">=1.0.0"
-typing-extensions = ">=4.1.0"
-
-[package.extras]
-dmypy = ["psutil (>=4.0)"]
-install-types = ["pip"]
-mypyc = ["setuptools (>=50)"]
-reports = ["lxml"]
-
-[[package]]
-name = "mypy-extensions"
-version = "1.0.0"
-description = "Type system extensions for programs checked with the mypy type checker."
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
- {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
-]
-
-[[package]]
-name = "nr-date"
-version = "2.1.0"
-description = ""
-optional = false
-python-versions = ">=3.6,<4.0"
-files = [
- {file = "nr_date-2.1.0-py3-none-any.whl", hash = "sha256:bd672a9dfbdcf7c4b9289fea6750c42490eaee08036a72059dcc78cb236ed568"},
- {file = "nr_date-2.1.0.tar.gz", hash = "sha256:0643aea13bcdc2a8bc56af9d5e6a89ef244c9744a1ef00cdc735902ba7f7d2e6"},
-]
-
-[[package]]
-name = "nr-stream"
-version = "1.1.5"
-description = ""
-optional = false
-python-versions = ">=3.6,<4.0"
-files = [
- {file = "nr_stream-1.1.5-py3-none-any.whl", hash = "sha256:47e12150b331ad2cb729cfd9d2abd281c9949809729ba461c6aa87dd9927b2d4"},
- {file = "nr_stream-1.1.5.tar.gz", hash = "sha256:eb0216c6bfc61a46d4568dba3b588502c610ec8ddef4ac98f3932a2bd7264f65"},
-]
-
-[[package]]
-name = "nr-util"
-version = "0.8.12"
-description = "General purpose Python utility library."
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "nr.util-0.8.12-py3-none-any.whl", hash = "sha256:91da02ac9795eb8e015372275c1efe54bac9051231ee9b0e7e6f96b0b4e7d2bb"},
- {file = "nr.util-0.8.12.tar.gz", hash = "sha256:a4549c2033d99d2f0379b3f3d233fd2a8ade286bbf0b3ad0cc7cea16022214f4"},
-]
-
-[package.dependencies]
-deprecated = ">=1.2.0,<2.0.0"
-typing-extensions = ">=3.0.0"
-
-[[package]]
-name = "packaging"
-version = "24.0"
-description = "Core utilities for Python packages"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
- {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
-]
-
-[[package]]
-name = "pathspec"
-version = "0.12.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
- {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
-]
-
-[[package]]
-name = "platformdirs"
-version = "4.2.0"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"},
- {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"},
-]
-
-[package.extras]
-docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
-
-[[package]]
-name = "pluggy"
-version = "1.4.0"
-description = "plugin and hook calling mechanisms for python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
- {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
-]
-
-[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
-
-[[package]]
-name = "protobuf"
-version = "4.25.3"
-description = ""
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"},
- {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"},
- {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"},
- {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"},
- {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"},
- {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"},
- {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"},
- {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"},
- {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"},
- {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"},
- {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
-]
-
-[[package]]
-name = "pycparser"
-version = "2.22"
-description = "C parser in Python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
- {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
-]
-
-[[package]]
-name = "pydantic"
-version = "2.7.0"
-description = "Data validation using Python type hints"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"},
- {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"},
-]
-
-[package.dependencies]
-annotated-types = ">=0.4.0"
-email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""}
-pydantic-core = "2.18.1"
-typing-extensions = ">=4.6.1"
-
-[package.extras]
-email = ["email-validator (>=2.0.0)"]
-
-[[package]]
-name = "pydantic-core"
-version = "2.18.1"
-description = "Core functionality for Pydantic validation and serialization"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"},
- {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"},
- {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"},
- {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"},
- {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"},
- {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"},
- {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"},
- {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"},
- {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"},
- {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"},
- {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"},
- {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"},
- {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"},
- {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"},
- {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"},
- {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"},
- {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"},
- {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"},
- {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"},
- {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"},
- {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"},
- {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"},
- {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"},
- {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"},
- {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"},
- {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"},
- {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"},
- {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"},
- {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"},
- {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"},
- {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"},
- {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"},
- {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"},
-]
-
-[package.dependencies]
-typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
-
-[[package]]
-name = "pydantic-settings"
-version = "2.2.1"
-description = "Settings management using Pydantic"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"},
- {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"},
-]
-
-[package.dependencies]
-pydantic = ">=2.3.0"
-python-dotenv = ">=0.21.0"
-pyyaml = {version = ">=6.0.1", optional = true, markers = "extra == \"yaml\""}
-
-[package.extras]
-toml = ["tomli (>=2.0.1)"]
-yaml = ["pyyaml (>=6.0.1)"]
-
-[[package]]
-name = "pydoc-markdown"
-version = "4.8.2"
-description = "Create Python API documentation in Markdown format."
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "pydoc_markdown-4.8.2-py3-none-any.whl", hash = "sha256:203f74119e6bb2f9deba43d452422de7c8ec31955b61e0620fa4dd8c2611715f"},
- {file = "pydoc_markdown-4.8.2.tar.gz", hash = "sha256:fb6c927e31386de17472d42f9bd3d3be2905977d026f6216881c65145aa67f0b"},
-]
-
-[package.dependencies]
-click = ">=7.1,<9.0"
-"databind.core" = ">=4.4.0,<5.0.0"
-"databind.json" = ">=4.4.0,<5.0.0"
-docspec = ">=2.2.1,<3.0.0"
-docspec-python = ">=2.2.1,<3.0.0"
-docstring-parser = ">=0.11,<0.12"
-jinja2 = ">=3.0.0,<4.0.0"
-"nr.util" = ">=0.7.5,<1.0.0"
-PyYAML = ">=5.0,<7.0"
-requests = ">=2.23.0,<3.0.0"
-tomli = ">=2.0.0,<3.0.0"
-tomli_w = ">=1.0.0,<2.0.0"
-watchdog = "*"
-yapf = ">=0.30.0"
-
-[[package]]
-name = "pygments"
-version = "2.17.2"
-description = "Pygments is a syntax highlighting package written in Python."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
- {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
-]
-
-[package.extras]
-plugins = ["importlib-metadata"]
-windows-terminal = ["colorama (>=0.4.6)"]
-
-[[package]]
-name = "pytest"
-version = "8.1.1"
-description = "pytest: simple powerful testing with Python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"},
- {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-iniconfig = "*"
-packaging = "*"
-pluggy = ">=1.4,<2.0"
-
-[package.extras]
-testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
-
-[[package]]
-name = "python-dotenv"
-version = "1.0.1"
-description = "Read key-value pairs from a .env file and set them as environment variables"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
- {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
-]
-
-[package.extras]
-cli = ["click (>=5.0)"]
-
-[[package]]
-name = "pyyaml"
-version = "6.0.1"
-description = "YAML parser and emitter for Python"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
- {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
- {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
- {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
- {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
- {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
- {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
- {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
- {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
- {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
- {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
- {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
- {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
-]
-
-[[package]]
-name = "regex"
-version = "2023.12.25"
-description = "Alternative regular expression module, to replace re."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
- {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"},
- {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"},
- {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"},
- {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"},
- {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"},
- {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"},
- {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"},
- {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"},
- {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"},
- {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"},
- {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"},
- {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"},
- {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"},
- {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"},
- {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"},
- {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"},
- {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"},
- {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"},
- {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"},
- {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"},
- {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"},
- {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"},
- {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"},
- {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"},
- {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"},
- {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"},
- {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"},
- {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"},
- {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"},
- {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"},
- {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"},
- {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"},
- {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"},
- {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"},
- {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"},
- {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"},
- {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"},
- {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"},
- {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"},
- {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"},
- {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"},
- {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"},
- {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"},
- {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"},
- {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"},
- {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"},
- {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"},
- {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"},
- {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"},
- {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"},
- {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"},
- {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"},
- {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"},
- {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"},
- {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"},
- {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"},
- {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"},
- {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"},
- {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"},
- {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"},
- {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"},
- {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"},
- {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"},
- {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"},
- {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"},
- {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"},
- {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"},
- {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"},
- {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"},
- {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"},
- {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"},
- {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"},
- {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"},
- {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"},
- {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"},
- {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"},
- {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"},
- {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"},
- {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"},
- {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"},
- {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"},
- {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"},
- {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"},
- {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"},
- {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"},
- {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"},
- {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"},
- {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"},
- {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"},
- {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"},
- {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"},
- {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"},
-]
-
-[[package]]
-name = "requests"
-version = "2.31.0"
-description = "Python HTTP for Humans."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
- {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
-]
-
-[package.dependencies]
-certifi = ">=2017.4.17"
-charset-normalizer = ">=2,<4"
-idna = ">=2.5,<4"
-urllib3 = ">=1.21.1,<3"
-
-[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
-
-[[package]]
-name = "rich"
-version = "13.7.1"
-description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
-optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
- {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"},
-]
-
-[package.dependencies]
-markdown-it-py = ">=2.2.0"
-pygments = ">=2.13.0,<3.0.0"
-
-[package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<9)"]
-
-[[package]]
-name = "ruff"
-version = "0.3.7"
-description = "An extremely fast Python linter and code formatter, written in Rust."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
- {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
- {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
- {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
- {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
- {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
-]
-
-[[package]]
-name = "setuptools"
-version = "69.5.1"
-description = "Easily download, build, install, upgrade, and uninstall Python packages"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
- {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
-]
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
-
-[[package]]
-name = "shellingham"
-version = "1.5.4"
-description = "Tool to Detect Surrounding Shell"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
- {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
-]
-
-[[package]]
-name = "sniffio"
-version = "1.3.1"
-description = "Sniff out which async library your code is running under"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
- {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
-]
-
-[[package]]
-name = "sqlalchemy"
-version = "2.0.29"
-description = "Database Abstraction Library"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"},
- {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"},
- {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"},
- {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"},
- {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"},
- {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"},
- {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"},
- {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"},
- {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"},
-]
-
-[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
-typing-extensions = ">=4.6.0"
-
-[package.extras]
-aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
-aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
-aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
-asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
-mssql = ["pyodbc"]
-mssql-pymssql = ["pymssql"]
-mssql-pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)"]
-mysql = ["mysqlclient (>=1.4.0)"]
-mysql-connector = ["mysql-connector-python"]
-oracle = ["cx_oracle (>=8)"]
-oracle-oracledb = ["oracledb (>=1.0.1)"]
-postgresql = ["psycopg2 (>=2.7)"]
-postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
-postgresql-psycopg = ["psycopg (>=3.0.7)"]
-postgresql-psycopg2binary = ["psycopg2-binary"]
-postgresql-psycopg2cffi = ["psycopg2cffi"]
-postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
-pymysql = ["pymysql"]
-sqlcipher = ["sqlcipher3_binary"]
-
-[[package]]
-name = "sqlmodel"
-version = "0.0.16"
-description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "sqlmodel-0.0.16-py3-none-any.whl", hash = "sha256:b972f5d319580d6c37ecc417881f6ec4d1ad3ed3583d0ac0ed43234a28bf605a"},
- {file = "sqlmodel-0.0.16.tar.gz", hash = "sha256:966656f18a8e9a2d159eb215b07fb0cf5222acfae3362707ca611848a8a06bd1"},
-]
-
-[package.dependencies]
-pydantic = ">=1.10.13,<3.0.0"
-SQLAlchemy = ">=2.0.0,<2.1.0"
-
-[[package]]
-name = "starlette"
-version = "0.37.2"
-description = "The little ASGI library that shines."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
- {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
-]
-
-[package.dependencies]
-anyio = ">=3.4.0,<5"
-
-[package.extras]
-full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
-
-[[package]]
-name = "structlog"
-version = "24.1.0"
-description = "Structured Logging for Python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "structlog-24.1.0-py3-none-any.whl", hash = "sha256:3f6efe7d25fab6e86f277713c218044669906537bb717c1807a09d46bca0714d"},
- {file = "structlog-24.1.0.tar.gz", hash = "sha256:41a09886e4d55df25bdcb9b5c9674bccfab723ff43e0a86a1b7b236be8e57b16"},
-]
-
-[package.extras]
-dev = ["structlog[tests,typing]"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"]
-tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"]
-typing = ["mypy (>=1.4)", "rich", "twisted"]
-
-[[package]]
-name = "toml-cli"
-version = "0.6.1"
-description = "Command line interface to read and write keys/values to/from toml files"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "toml_cli-0.6.1-py3-none-any.whl", hash = "sha256:1aeba7a0aa21ee818051fc40211a79c0ce3041c3ebd96705110d782725120d71"},
- {file = "toml_cli-0.6.1.tar.gz", hash = "sha256:7a7fb28f90c04519191752dacfbf823ab129edd658bd182bac2487d82d7917ed"},
-]
-
-[package.dependencies]
-regex = ">=2020.7.14"
-tomlkit = ">=0.12.1"
-typer = ">=0.3.2"
-
-[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
-]
-
-[[package]]
-name = "tomli-w"
-version = "1.0.0"
-description = "A lil' TOML writer"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"},
- {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"},
-]
-
-[[package]]
-name = "tomlkit"
-version = "0.12.4"
-description = "Style preserving TOML library"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"},
- {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"},
-]
-
-[[package]]
-name = "typeapi"
-version = "2.2.1"
-description = ""
-optional = false
-python-versions = ">=3.8,<4.0"
-files = [
- {file = "typeapi-2.2.1-py3-none-any.whl", hash = "sha256:e5ef719e7e6f5c7b1ae566f8751d094dbd93422203ef43263468a6e69a6ae33c"},
- {file = "typeapi-2.2.1.tar.gz", hash = "sha256:181a30c6dd79c2ed70bba8c50e56a889cbba0f479b187698513c985fbadec47f"},
-]
-
-[package.dependencies]
-typing-extensions = ">=3.0.0"
-
-[[package]]
-name = "typer"
-version = "0.12.3"
-description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"},
- {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-rich = ">=10.11.0"
-shellingham = ">=1.3.0"
-typing-extensions = ">=3.7.4.3"
-
-[[package]]
-name = "types-protobuf"
-version = "4.25.0.20240410"
-description = "Typing stubs for protobuf"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "types-protobuf-4.25.0.20240410.tar.gz", hash = "sha256:86576c2e7e691b8b75f4cabec430f7405edef411b5d191e847c91307935b1b38"},
- {file = "types_protobuf-4.25.0.20240410-py3-none-any.whl", hash = "sha256:335b2e8cf9f39c233dbf0f977a2a4fbc2c0bac720225c544cc1412a67ab1e1d3"},
-]
-
-[[package]]
-name = "types-requests"
-version = "2.31.0.20240406"
-description = "Typing stubs for requests"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"},
- {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"},
-]
-
-[package.dependencies]
-urllib3 = ">=2"
-
-[[package]]
-name = "typing-extensions"
-version = "4.11.0"
-description = "Backported and Experimental Type Hints for Python 3.8+"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
- {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
-]
-
-[[package]]
-name = "urllib3"
-version = "2.2.1"
-description = "HTTP library with thread-safe connection pooling, file post, and more."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
- {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
-]
-
-[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
-h2 = ["h2 (>=4,<5)"]
-socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
-zstd = ["zstandard (>=0.18.0)"]
-
-[[package]]
-name = "uvicorn"
-version = "0.29.0"
-description = "The lightning-fast ASGI server."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
- {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
-]
-
-[package.dependencies]
-click = ">=7.0"
-h11 = ">=0.8"
-
-[package.extras]
-standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
-
-[[package]]
-name = "watchdog"
-version = "4.0.0"
-description = "Filesystem events monitoring"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"},
- {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"},
- {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"},
- {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"},
- {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"},
- {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"},
- {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"},
- {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"},
- {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"},
- {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"},
-]
-
-[package.extras]
-watchmedo = ["PyYAML (>=3.10)"]
-
-[[package]]
-name = "wrapt"
-version = "1.16.0"
-description = "Module for decorators, wrappers and monkey patching."
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
- {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
- {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
- {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
- {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
- {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
- {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
- {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
- {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
- {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
- {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
- {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
- {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
- {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
- {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
- {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
- {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
- {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
- {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
- {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
- {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
- {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
- {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
- {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
- {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
- {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
- {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
- {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
- {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
- {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
- {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"},
- {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"},
- {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"},
- {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"},
- {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"},
- {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"},
- {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"},
- {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"},
- {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"},
- {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"},
- {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"},
- {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"},
- {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"},
- {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"},
- {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"},
- {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"},
- {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"},
- {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"},
- {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"},
- {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"},
- {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"},
- {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"},
- {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"},
- {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"},
- {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"},
- {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"},
- {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"},
- {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"},
- {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"},
- {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"},
- {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"},
- {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"},
- {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"},
- {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"},
- {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"},
- {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"},
- {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"},
- {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"},
- {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
- {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
-]
-
-[[package]]
-name = "yapf"
-version = "0.40.2"
-description = "A formatter for Python code"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "yapf-0.40.2-py3-none-any.whl", hash = "sha256:adc8b5dd02c0143108878c499284205adb258aad6db6634e5b869e7ee2bd548b"},
- {file = "yapf-0.40.2.tar.gz", hash = "sha256:4dab8a5ed7134e26d57c1647c7483afb3f136878b579062b786c9ba16b94637b"},
-]
-
-[package.dependencies]
-importlib-metadata = ">=6.6.0"
-platformdirs = ">=3.5.1"
-tomli = ">=2.0.1"
-
-[[package]]
-name = "zipp"
-version = "3.18.1"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"},
- {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"},
-]
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
-
-[metadata]
-lock-version = "2.0"
-python-versions = "^3.12"
-content-hash = "fa755d48b0a9c33474b7f2ddc98a922629e8cfc0ced2c1b09643969c19492bbd"
diff --git a/poetry.toml b/poetry.toml
deleted file mode 100644
index ab1033b..0000000
--- a/poetry.toml
+++ /dev/null
@@ -1,2 +0,0 @@
-[virtualenvs]
-in-project = true
diff --git a/profiles.go b/profiles.go
new file mode 100644
index 0000000..139a8e8
--- /dev/null
+++ b/profiles.go
@@ -0,0 +1,189 @@
+package sophrosyne
+
+import (
+ "context"
+ "time"
+)
+
+type Profile struct {
+ ID string
+ Name string
+ Checks []Check
+ CreatedAt time.Time
+ UpdatedAt time.Time
+ DeletedAt *time.Time
+}
+
+func (p Profile) EntityType() string { return "Profile" }
+
+func (p Profile) EntityID() string { return p.ID }
+
+type ProfileService interface {
+ GetProfile(ctx context.Context, id string) (Profile, error)
+ GetProfileByName(ctx context.Context, name string) (Profile, error)
+ GetProfiles(ctx context.Context, cursor *DatabaseCursor) ([]Profile, error)
+ CreateProfile(ctx context.Context, profile CreateProfileRequest) (Profile, error)
+ UpdateProfile(ctx context.Context, profile UpdateProfileRequest) (Profile, error)
+ DeleteProfile(ctx context.Context, name string) error
+}
+
+type GetProfileRequest struct {
+ ID string `json:"id"`
+ Name string `json:"name" validate:"required_without=ID,excluded_with=ID"`
+}
+
+type GetProfileResponse struct {
+ Name string `json:"name"`
+ Checks []string `json:"checks"`
+ CreatedAt string `json:"createdAt"`
+ UpdatedAt string `json:"updatedAt"`
+ DeletedAt string `json:"deletedAt,omitempty"`
+}
+
+func (r *GetProfileResponse) FromProfile(p Profile) *GetProfileResponse {
+ var c []string
+ for _, entry := range p.Checks {
+ c = append(c, entry.Name)
+ }
+ r.Name = p.Name
+ r.Checks = c
+ r.CreatedAt = p.CreatedAt.Format(TimeFormatInResponse)
+ r.UpdatedAt = p.UpdatedAt.Format(TimeFormatInResponse)
+ if p.DeletedAt != nil {
+ r.DeletedAt = p.DeletedAt.Format(TimeFormatInResponse)
+ }
+ return r
+}
+
+type GetProfilesRequest struct {
+ Cursor string `json:"cursor"`
+}
+
+type GetProfilesResponse struct {
+ Profiles []GetProfileResponse `json:"profiles"`
+ Cursor string `json:"cursor"`
+ Total int `json:"total"`
+}
+
+type CreateProfileRequest struct {
+ Name string `json:"name" validate:"required"`
+ Checks []string `json:"checks"`
+}
+
+type CreateProfileResponse struct {
+ GetProfileResponse
+}
+
+type UpdateProfileRequest struct {
+ Name string `json:"name" validate:"required"`
+ Checks []string `json:"checks"`
+}
+
+type UpdateProfileResponse struct {
+ GetProfileResponse
+}
+
+type DeleteProfileRequest struct {
+ Name string `json:"name" validate:"required"`
+}
+
+type ProfileServiceCache struct {
+ cache *Cache
+ profileService ProfileService
+ tracingService TracingService
+}
+
+func NewProfileServiceCache(config *Config, profileService ProfileService, tracingService TracingService) *ProfileServiceCache {
+ return &ProfileServiceCache{
+ cache: NewCache(config.Services.Profiles.CacheTTL),
+ profileService: profileService,
+ tracingService: tracingService,
+ }
+}
+
+func (p ProfileServiceCache) GetProfile(ctx context.Context, id string) (Profile, error) {
+ ctx, span := p.tracingService.StartSpan(ctx, "ProfileServiceCache.GetProfile")
+ v, ok := p.cache.Get(id)
+ if ok {
+ span.End()
+ return v.(Profile), nil
+ }
+
+ profile, err := p.profileService.GetProfile(ctx, id)
+ if err != nil {
+ span.End()
+ return Profile{}, err
+ }
+
+ p.cache.Set(id, profile)
+ span.End()
+ return profile, nil
+}
+
+func (p ProfileServiceCache) GetProfileByName(ctx context.Context, name string) (Profile, error) {
+ ctx, span := p.tracingService.StartSpan(ctx, "ProfileServiceCache.GetProfileByName")
+ profile, err := p.profileService.GetProfileByName(ctx, name)
+ if err != nil {
+ span.End()
+ return Profile{}, err
+ }
+
+ p.cache.Set(profile.ID, profile)
+ span.End()
+ return profile, nil
+}
+
+func (p ProfileServiceCache) GetProfiles(ctx context.Context, cursor *DatabaseCursor) ([]Profile, error) {
+ ctx, span := p.tracingService.StartSpan(ctx, "ProfileServiceCache.GetProfiles")
+ profiles, err := p.profileService.GetProfiles(ctx, cursor)
+ if err != nil {
+ span.End()
+ return nil, err
+ }
+
+ for _, user := range profiles {
+ p.cache.Set(user.ID, user)
+ }
+
+ span.End()
+ return profiles, nil
+}
+
+func (p ProfileServiceCache) CreateProfile(ctx context.Context, profile CreateProfileRequest) (Profile, error) {
+ ctx, span := p.tracingService.StartSpan(ctx, "ProfileServiceCache.CreateProfile")
+ createProfile, err := p.profileService.CreateProfile(ctx, profile)
+ if err != nil {
+ span.End()
+ return Profile{}, err
+ }
+
+ p.cache.Set(createProfile.ID, createProfile)
+ span.End()
+ return createProfile, nil
+}
+
+func (p ProfileServiceCache) UpdateProfile(ctx context.Context, profile UpdateProfileRequest) (Profile, error) {
+ ctx, span := p.tracingService.StartSpan(ctx, "ProfileServiceCache.UpdateProfile")
+ updateProfile, err := p.profileService.UpdateProfile(ctx, profile)
+ if err != nil {
+ span.End()
+ return Profile{}, err
+ }
+
+ p.cache.Set(updateProfile.ID, updateProfile)
+ span.End()
+ return updateProfile, nil
+}
+
+func (p ProfileServiceCache) DeleteProfile(ctx context.Context, name string) error {
+ ctx, span := p.tracingService.StartSpan(ctx, "ProfileServiceCache.DeleteProfile")
+ err := p.profileService.DeleteProfile(ctx, name)
+ if err != nil {
+ span.End()
+ return err
+ }
+
+ p.cache.Delete(name)
+ span.End()
+ return nil
+}
diff --git a/src/sophrosyne/grpc/checks/checks.proto b/proto/checks/checks.proto
similarity index 80%
rename from src/sophrosyne/grpc/checks/checks.proto
rename to proto/checks/checks.proto
index ab93823..163f3fa 100644
--- a/src/sophrosyne/grpc/checks/checks.proto
+++ b/proto/checks/checks.proto
@@ -2,6 +2,8 @@ syntax = "proto3";
package checks.v1;
+option go_package = "github.com/madsrc/sophrosyne/grpc/checks";
+
message CheckRequest {
oneof check {
string text = 1;
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index 306854c..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,106 +0,0 @@
-[tool.poetry]
-name = "sophrosyne"
-version = "0.1.0"
-description = ""
-authors = ["Mads R. Havmand "]
-readme = "README.md"
-packages = [
- { include = "sophrosyne", from = "src" },
-]
-
-[tool.poetry.dependencies]
-python = "^3.12"
-fastapi = "^0.110.0"
-pydantic = {extras = ["email"], version = "^2.6.4"}
-pydantic-settings = {extras = ["yaml"], version = "^2.2.1"}
-sqlmodel = "^0.0.16"
-click = "^8.1.7"
-h11 = "^0.14.0"
-grpcio = "^1.62.1"
-protobuf = "<5.0dev"
-uvicorn = "^0.29.0"
-asyncpg = "^0.29.0"
-greenlet = "^3.0.3"
-alembic = "^1.13.1"
-cryptography = "^42.0.5"
-requests = "^2.31.0"
-structlog = "^24.1.0"
-
-[tool.poetry.group.test]
-optional = true
-
-[tool.poetry.group.test.dependencies]
-pytest = "^8.0.0"
-coverage = "^7.2.7"
-toml-cli = "^0.6.0"
-
-[tool.poetry.group.dev]
-optional = true
-
-[tool.poetry.group.dev.dependencies]
-pydoc-markdown = "^4.8.2"
-ruff = "^0.3.4"
-mypy = "^1.9.0"
-grpcio-tools = "^1.62.1"
-grpc-stubs = "^1.53.0.5"
-types-protobuf = "^4.24.0.20240311"
-types-requests = "^2.31.0.20240406"
-better-exceptions = "^0.3.3"
-
-[build-system]
-requires = ["poetry-core"]
-build-backend = "poetry.core.masonry.api"
-
-[tool.pytest.ini_options]
-pythonpath = [ "src" ]
-testpaths = [
- "tests",
-]
-
-[tool.coverage.run]
-branch = true
-data_file = "reports/.coverage"
-source = [ "src" ]
-
-[tool.coverage.xml]
-output = "reports/coverage.xml"
-
-[tool.coverage.report]
-show_missing = true
-
-[tool.coverage.html]
-directory = "reports/htmlcov"
-
-[[tool.pydoc-markdown.loaders]]
-type = "python"
-search_path = [ "src" ]
-packages = [ "sophrosyne" ]
-
-[tool.pydoc-markdown.renderer]
-type = "markdown"
-filename = "docs/api.md"
-
-[tool.ruff]
-extend-exclude = ["*_pb2.py", "*_pb2_grpc.py", "*_pb2.pyi", "*/migrations/versions/*.py"]
-
-[tool.ruff.lint]
-# Enable all `pydocstyle` rules, limiting to those that adhere to the
-# Google convention via `convention = "google"`, below.
-select = ["D", "I", "F"]
-
-[tool.ruff.lint.pydocstyle]
-convention = "google"
-
-[tool.ruff.lint.extend-per-file-ignores]
-# Do not lint test files for pydocstyle
-"tests/*" = ["D"]
-
-[tool.mypy]
-packages = [
- "sophrosyne",
- "tests"
-]
-# https://mypy.readthedocs.io/en/stable/running_mypy.html#mapping-file-paths-to-modules
-mypy_path = "src,tests"
-namespace_packages = true
-explicit_package_bases = true
diff --git a/scans.go b/scans.go
new file mode 100644
index 0000000..f220629
--- /dev/null
+++ b/scans.go
@@ -0,0 +1,5 @@
+package sophrosyne
+
+type PerformScanRequest struct {
+ Profile string `json:"profile"`
+}
diff --git a/sonar-project.properties b/sonar-project.properties
index 9058622..912ce77 100644
--- a/sonar-project.properties
+++ b/sonar-project.properties
@@ -1,2 +1,3 @@
sonar.projectKey=MadsRC_sophrosyne
sonar.organization=madsrc
+sonar.go.golangci-lint.reportPaths=build/report.xml
diff --git a/src/sophrosyne/__init__.py b/src/sophrosyne/__init__.py
deleted file mode 100644
index 405dae1..0000000
--- a/src/sophrosyne/__init__.py
+++ /dev/null
@@ -1,23 +0,0 @@
-"""sophrosyne module.
-
-This module contains the sophrosyne application logic. The sophrosyne application is
-responsible for handling the incoming requests, classifying the content, and
-returning the classification results.
-
-The application is divided into the following modules:
- - api: The API module contains the FastAPI application and the API routers.
- - core: The core module contains the core logic of the application, such as
- database operations and configuration.
- - grpc: The gRPC module contains autogenerated gRPC client code used to
- communicate with upstream services.
-
-The application is started by running the main module. The main module creates
-the FastAPI application and starts the uvicorn server. Requests are received by
-the FastAPI application and routed to the appropriate API endpoint. The API is
-in the style of a RPC-over-HTTP API, where each endpoint corresponds to a
-specific action. The API is __not__ RESTful and does not follow RESTful design
-principles.
-
-The application serves an OpenAPI schema at the `/.well-known/openapi` endpoint.
-Additionally, the application serves a ReDoc UI at the `/docs` endpoint.
-"""
diff --git a/src/sophrosyne/api/__init__.py b/src/sophrosyne/api/__init__.py
deleted file mode 100644
index 5eaf47a..0000000
--- a/src/sophrosyne/api/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""Module containing the API classes for the SOPH API.
-
-Attributes:
- api_router (APIRouter): The API router for the API.
-"""
-
-from fastapi import APIRouter
-
-from sophrosyne.api.routers.health import router as health_router
-from sophrosyne.api.v1.api import api_router as v1_api_router
-from sophrosyne.core.config import get_settings
-
-api_router = APIRouter()
-api_router.include_router(v1_api_router, prefix=get_settings().api_v1_str)
-api_router.include_router(health_router, prefix="/health")
diff --git a/src/sophrosyne/api/dependencies.py b/src/sophrosyne/api/dependencies.py
deleted file mode 100644
index c22c3f8..0000000
--- a/src/sophrosyne/api/dependencies.py
+++ /dev/null
@@ -1,166 +0,0 @@
-"""Dependencies for the API."""
-
-from typing import Annotated
-
-from fastapi import Depends, HTTPException
-from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
-from sqlalchemy.ext.asyncio import async_sessionmaker
-from sqlmodel import select
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-from sophrosyne.core.config import Settings
-from sophrosyne.core.config import get_settings as get_config_settings
-from sophrosyne.core.database import engine
-from sophrosyne.core.logging import get_logger
-from sophrosyne.core.models import User
-from sophrosyne.core.safety import Safety
-from sophrosyne.core.security import sign
-
-header_scheme = HTTPBearer(
- description="Authorization header for the API",
- auto_error=False,
-)
-
-
-async def get_db_session():
- """Returns an asynchronous database session.
-
- This function creates an asynchronous session using the `async_sessionmaker` and `AsyncSession` classes.
- The session is yielded to the caller, allowing them to use it within a context manager.
-
- Returns:
- AsyncSession: An asynchronous database session.
-
- Example:
- async with get_db_session() as session:
- # Use the session to perform database operations
- await session.execute(...)
- await session.commit()
- """
- async_session = async_sessionmaker(
- engine, class_=AsyncSession, expire_on_commit=False
- )
- async with async_session() as session:
- yield session
-
-
-async def get_safety_service(
- db_session: Annotated[AsyncSession, Depends(get_db_session)],
-):
- """Retrieves the safety service.
-
- Args:
- db_session (AsyncSession): The database session.
-
- Yields:
- Safety: The safety service.
-
- """
- yield Safety(db_session=db_session)
-
-
-async def _authenticate(
- credentials: Annotated[HTTPAuthorizationCredentials, Depends(header_scheme)],
- db_session: Annotated[AsyncSession, Depends(get_db_session)],
-) -> User | None:
- """Authenticates a request based on provided credentials.
-
- If no credentials are provided, None is returned.
-
- Args:
- credentials (HTTPAuthorizationCredentials): The credentials to check.
- db_session (AsyncSession): The database session to use.
-
- Returns:
- User | None: The authenticated user. If credentials matches no user,
- None is returned
- """
- if credentials is None:
- get_logger().info("authentication", result="fail")
- return None
- result = await db_session.exec(
- select(User).where(User.signed_token == sign(credentials.credentials))
- )
- user = result.first()
- if user is None:
- get_logger().info("authentication", result="fail")
- return None
- get_logger().info("authentication", result="success", user=user.id)
- return user
-
-
-async def is_authenticated(
- user: Annotated[User | None, Depends(_authenticate)],
-) -> bool:
- """Checks that the user is authenticated and returns a boolean.
-
- Args:
- user (User | None): The user to check.
-
- Returns:
- bool: True if the user is authenticated, False otherwise.
- """
- return user is not None
-
-
-async def auth_and_return_user(
- user: Annotated[User | None, Depends(_authenticate)],
-) -> User:
- """Retrieves the current user based on the provided credentials.
-
- Args:
- user (User): The user to authenticate.
-
- Returns:
- User: The current user.
-
- Raises:
- HTTPException: If the user is not authenticated (status code 403).
- """
- if user is None:
- raise HTTPException(status_code=403, detail="Not authenticated")
-
- return user
-
-
-async def get_settings() -> Settings:
- """Retrieves the application settings.
-
- Returns:
- Settings: The application settings.
- """
- return get_config_settings()
-
-
-async def require_admin(
- current_user: Annotated[User, Depends(auth_and_return_user)],
-):
- """Requires the current user to be an admin.
-
- Args:
- current_user (User): The current user.
-
- Raises:
- HTTPException: If the current user is not an admin (status code 403).
- """
- if not current_user.is_admin:
- get_logger().info("user is not an admin", user=current_user.id)
- raise HTTPException(
- status_code=403, detail="Only admins can perform this operation"
- )
-
-
-async def require_active_user(
- current_user: Annotated[User, Depends(auth_and_return_user)],
-):
- """Requires the current user to be active.
-
- Args:
- current_user (User): The current user.
-
- Raises:
- HTTPException: If the current user is not active (status code 403).
- """
- if not current_user.is_active:
- get_logger().info("user is not active", user=current_user.id)
- raise HTTPException(status_code=403, detail="Not authenticated")
diff --git a/src/sophrosyne/api/routers/health.py b/src/sophrosyne/api/routers/health.py
deleted file mode 100644
index 877c62d..0000000
--- a/src/sophrosyne/api/routers/health.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""This module contains the health router for the application."""
-
-import importlib.metadata
-import time
-from datetime import datetime, timezone
-from typing import Annotated
-
-from fastapi import APIRouter, Depends, Response, status
-from sophrosyne.api.dependencies import get_db_session, get_settings, is_authenticated
-from sophrosyne.core.config import Settings
-from sophrosyne.healthcheck.models import Check, HealthCheck, Status, SubComponent
-from sqlmodel import text
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-router = APIRouter()
-
-
-@router.get("", response_model=HealthCheck)
-async def get_health(
- response: Response,
- settings: Annotated[Settings, Depends(get_settings)],
- db_session: Annotated[AsyncSession, Depends(get_db_session)],
- is_authenticated: Annotated[bool, Depends(is_authenticated)],
-) -> HealthCheck:
- """Get the health of the application.
-
- Args:
- response (Response): The response object.
- settings (Settings): The application settings.
- db_session (AsyncSession): The database session.
- is_authenticated (bool): Flag indicating if the user is authenticated.
-
- Returns:
- HealthCheck: The health of the application.
- """
- response.headers["Cache-Control"] = "no-cache"
- hc: HealthCheck
- if is_authenticated:
- hc = await do_authenticated_healthcheck(db_session=db_session)
- else:
- hc = await do_healthcheck(db_session=db_session)
-
- if hc.status == Status.PASS:
- response.status_code = status.HTTP_200_OK
- else:
- response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
- return hc
-
-
-@router.get("/ping", response_model=str)
-async def ping() -> str:
- """Ping endpoint to check if the server is running."""
- return "pong"
-
-
-async def do_authenticated_healthcheck(db_session: AsyncSession) -> HealthCheck:
- """Perform an authenticated health check.
-
- Args:
- db_session (AsyncSession): The database session.
-
- Returns:
- HealthCheck: The health check result.
- """
- status: Status
- output: str | None = None
- end: int
- begin = time.perf_counter_ns()
- try:
- await db_session.execute(statement=text("SELECT 1"))
- end = time.perf_counter_ns()
- status = Status.PASS
- except Exception as e:
- end = time.perf_counter_ns()
- status = Status.FAIL
- output = str(e)
-
- return HealthCheck(
- status=status,
- version=importlib.metadata.version("sophrosyne"),
- checks=Check(
- sub_components={
- "database:responseTime": [
- SubComponent(
- status=status,
- output=output,
- observed_value=str(end - begin),
- observed_unit="ns",
- component_type="datastore",
- time=datetime.now(timezone.utc),
- )
- ],
- }
- ),
- )
-
-
-async def do_healthcheck(db_session: AsyncSession) -> HealthCheck:
- """Perform a health check.
-
- Args:
- db_session (AsyncSession): The database session.
-
- Returns:
- HealthCheck: The health check result.
- """
- try:
- await db_session.execute(statement=text("SELECT 1"))
- except Exception:
- return HealthCheck(
- status=Status.FAIL,
- )
- return HealthCheck(
- status=Status.PASS,
- )
diff --git a/src/sophrosyne/api/v1/__init__.py b/src/sophrosyne/api/v1/__init__.py
deleted file mode 100644
index 820eb61..0000000
--- a/src/sophrosyne/api/v1/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-"""API v1 module."""
-
-from sophrosyne.api.v1.tags import Tags # noqa: F401
diff --git a/src/sophrosyne/api/v1/api.py b/src/sophrosyne/api/v1/api.py
deleted file mode 100644
index cda2056..0000000
--- a/src/sophrosyne/api/v1/api.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""Exposes the API routes for the v1 version of the API.
-
-This module contains the API routes for the v1 version of the API. The routes are
-grouped by the different resources they interact with.
-
-Attributes:
- api_router (APIRouter): The API router for the v1 version of the API.
-"""
-
-
-from fastapi import APIRouter, Depends
-
-from sophrosyne.api.dependencies import auth_and_return_user, require_active_user
-from sophrosyne.api.v1.routers import checks, profiles, safety, users
-
-api_router = APIRouter(
- dependencies=[Depends(auth_and_return_user), Depends(require_active_user)]
-)
-api_router.include_router(safety.router)
-api_router.include_router(users.router)
-api_router.include_router(checks.router)
-api_router.include_router(profiles.router)
diff --git a/src/sophrosyne/api/v1/models.py b/src/sophrosyne/api/v1/models.py
deleted file mode 100644
index 7a63477..0000000
--- a/src/sophrosyne/api/v1/models.py
+++ /dev/null
@@ -1,376 +0,0 @@
-"""SQLModels, Pydantic models and Pydantic schemas for the v1 API.
-
-This module defines the SQLModels, Pydantic models and Pydantic schemas for the
-v1 API of the SOPH API service. The models and schemas are used to define the
-request and response bodies for the API endpoints.
-
-Example:
- The UserCreate Pydantic model is used to define the request body for the
- POST /users/ endpoint. The UserBase Pydantic model is used to define the
- response body for the GET /users/ endpoint.
-
-Any input or output to the v1 API should be validated against these models and
-schemas.
-
-If a model or schema doesn't directy affect the input or output of the API, it
-should be defined in the core.models module instead.
-"""
-
-from typing import Annotated, Literal, Union
-
-from pydantic import EmailStr, Field
-from sqlmodel import SQLModel
-
-from sophrosyne.core.models import (
- CheckBase,
- ProfileBase,
- SafetyServicePayloadImage,
- SafetyServicePayloadText,
- SafetyServicePayloadType,
- UserBase,
- Verdict,
-)
-
-
-class UsersCreateUserResponse(UserBase):
- """Represents the response object for creating a user.
-
- Attributes:
- token (str): The authentication token associated with the user.
- """
-
- token: str
-
-
-class UsersCreateUserRequest(SQLModel):
- """Represents a request to create a new user.
-
- Attributes:
- name (str): The name of the user.
- contact (EmailStr): The contact email of the user.
- is_active (bool, optional): Whether the user is active. Defaults to True.
- default_profile (str | None, optional): The default profile of the user. Defaults to None.
- """
-
- name: str
- contact: EmailStr
- is_active: bool = True
- default_profile: str | None = None
- is_admin: bool = False
-
-
-class UsersListUserResponse(UserBase):
- """Represents the response object for a single user in the list of users.
-
- This class inherits from the `UserBase` class and provides additional functionality specific to the response object.
-
- Attributes:
- Inherits all attributes from the `UserBase` class.
-
- """
-
- pass
-
-
-class UsersListUserRequest(SQLModel):
- """Represents a request to list users.
-
- Attributes:
- name (str): The name of the user.
- """
-
- name: str
-
-
-UsersListUsersResponse = Annotated[
- list[UsersListUserResponse],
- Field(..., description="A list of users"),
-]
-
-
-class UsersUpdateUserResponse(UserBase):
- """Represents the response for updating a user.
-
- This class inherits from the `UserBase` class and includes any additional
- attributes or methods specific to the response for updating a user.
- """
-
- pass
-
-
-class UsersUpdateUserRequest(SQLModel):
- """Represents a request to update a user.
-
- Attributes:
- name (str): The name of the user.
- contact (EmailStr | None, optional): The contact email of the user. Defaults to None.
- new_email (EmailStr | None, optional): The new email of the user. Defaults to None.
- is_active (bool | None, optional): Indicates if the user is active. Defaults to None.
- default_profile (str | None, optional): The default profile of the user. Defaults to None.
- """
-
- name: str
- contact: EmailStr | None = None
- new_email: EmailStr | None = None
- is_active: bool | None = None
- default_profile: str | None = None
- is_admin: bool | None = None
-
-
-class UsersDeleteUserResponse(SQLModel):
- """Represents the response for deleting a user.
-
- Attributes:
- ok (bool): Indicates whether the user deletion was successful or not.
- """
-
- ok: bool
-
-
-class UsersDeleteUserRequest(SQLModel):
- """Represents a request to delete a user.
-
- Attributes:
- name (str): The name of the user to be deleted.
- """
-
- name: str
-
-
-class UsersRotateTokenResponse(SQLModel):
- """Represents the response object for the token rotation operation in the Users API.
-
- Attributes:
- token (str): The new token generated after rotating the existing token.
- """
-
- token: str
-
-
-class UsersRotateTokenRequest(SQLModel):
- """Represents a request to rotate the token for a user.
-
- Attributes:
- name (str): The name of the user.
- """
-
- name: str
-
-
-class ProfilesCreateProfileRequest(SQLModel):
- """Represents a request to create a profile.
-
- Attributes:
- name (str): The name of the profile.
- checks (List[str], optional): A list of checks associated with the profile. Defaults to an empty list.
- """
-
- name: str
- checks: list[str] = []
-
-
-class ProfilesCreateProfileResponse(ProfileBase):
- """Represents the response object for creating a profile.
-
- Attributes:
- checks (str): The list of names of checks associated with the profile.
- """
-
- checks: list[str] = []
-
-
-class ProfilesListProfileRequest(SQLModel):
- """Represents a request to list profiles.
-
- Attributes:
- name (str): The name of the profile.
- """
-
- name: str
-
-
-class ProfilesListProfileResponse(ProfileBase):
- """Represents a response object for listing profiles.
-
- Attributes:
- checks (list[ChecksListChecksResponse]): A list of checks associated with the profile.
- """
-
- checks: list["ChecksListChecksResponse"] = []
-
-
-ProfilesListProfilesResponse = Annotated[
- list[ProfilesListProfileResponse],
- Field(..., description="A list of profiles"),
-]
-
-
-class ProfilesUpdateProfileRequest(SQLModel):
- """Represents a request to update a profile.
-
- Attributes:
- name (str): The name of the profile.
- checks (list[str], optional): A list of check names associated with the profile. Defaults to None.
- """
-
- name: str
- checks: list[str] | None = None
-
-
-class ProfilesUpdateProfileResponse(ProfileBase):
- """Represents the response for updating a profile.
-
- This class inherits from the `ProfileBase` class and provides additional functionality
- specific to updating a profile.
- """
-
- pass
-
-
-class ProfilesDeleteProfileRequest(SQLModel):
- """Represents a request to delete a profile.
-
- Attributes:
- name (str): The name of the profile to be deleted.
- """
-
- name: str
-
-
-class ProfilesDeleteProfileResponse(SQLModel):
- """Represents the response for deleting a profile.
-
- This class inherits from the `SQLModel` class.
- """
-
- ok: bool
-
-
-class ChecksCreateCheckResponse(CheckBase):
- """Represents the response object for creating a check.
-
- Attributes:
- profiles (str): The list of names of profiles associated with the check.
- """
-
- profiles: list[str] = []
-
-
-class ChecksCreateCheckRequest(SQLModel):
- """Represents a request to create a check.
-
- Attributes:
- name (str): The name of the check.
- profiles (list[str], optional): A list of profiles associated with the check. Defaults to an empty list.
- upstream_services (list[str], optional): A list of upstream services for the check.
- config (dict[str, Union[str, int, float, bool]], optional): The configuration for the check. Defaults to an empty dictionary.
- """
-
- name: str
- profiles: list[str] = []
- upstream_services: list[str]
- supported_types: list[SafetyServicePayloadType] = []
- config: dict[str, Union[str, int, float, bool]] = {}
-
-
-class ChecksListCheckResponse(CheckBase):
- """Represents the response for a single check in the list of checks.
-
- Attributes:
- profiles (list[str]): A list of profiles associated with the check.
- """
-
- profiles: list[str] = []
-
-
-class ChecksListCheckRequest(SQLModel):
- """Represents a request to list checks for a specific check name."""
-
- name: str
-
-
-ChecksListChecksResponse = Annotated[
- list[ChecksListCheckResponse],
- Field(..., description="A list of checks"),
-]
-
-
-class ChecksUpdateCheckResponse(CheckBase):
- """Represents the response for updating a check.
-
- This class inherits from the CheckBase class and provides additional functionality
- specific to updating a check.
- """
-
- pass
-
-
-class ChecksUpdateCheckRequest(SQLModel):
- """Represents a request to update a check.
-
- Attributes:
- name (str): The name of the check.
- profiles (list[str], optional): A list of profiles associated with the check. Defaults to None.
- upstream_services (list[str], optional): A list of upstream services for the check. Defaults to None.
- config (dict[str, Union[str, int, float, bool]], optional): The configuration for the check. Defaults to None.
- """
-
- name: str
- profiles: list[str] | None = None
- upstream_services: list[str] | None = None
- config: dict[str, Union[str, int, float, bool]] | None = None
-
-
-class ChecksDeleteCheckResponse(SQLModel):
- """Represents the response for deleting a check.
-
- Attributes:
- ok (bool): Indicates whether the check deletion was successful.
- """
-
- ok: bool
-
-
-class ChecksDeleteCheckRequest(SQLModel):
- """Represents a request to delete a check.
-
- Attributes:
- name (str): The name of the check to be deleted.
- """
-
- name: str
-
-
-class SafetyScanRequestWithText(SafetyServicePayloadText):
- """Represents a safety scan request with text.
-
- Attributes:
- kind (Literal["text"]): The type of payload, which is always "text".
- """
-
- kind: Literal["text"] = SafetyServicePayloadType.TEXT.value
-
-
-class SafetyScanRequestWithImage(SafetyServicePayloadImage):
- """Represents a safety scan request with an image.
-
- Attributes:
- kind (Literal["image"]): The type of payload, which is always "image".
- """
-
- kind: Literal["image"] = SafetyServicePayloadType.IMAGE.value
-
-
-SafetyScanRequest = Annotated[
- Union[SafetyScanRequestWithText, SafetyScanRequestWithImage],
- Field(..., discriminator="kind"),
-]
-
-
-class SafetyScanResponse(Verdict):
- """Represents the response from a safety scan.
-
- This class inherits from the `Verdict` class and provides additional functionality specific to safety scans.
- """
-
- pass
diff --git a/src/sophrosyne/api/v1/routers/__init__.py b/src/sophrosyne/api/v1/routers/__init__.py
deleted file mode 100644
index 2fafbdc..0000000
--- a/src/sophrosyne/api/v1/routers/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""API routes for the v1 SOPH API."""
diff --git a/src/sophrosyne/api/v1/routers/checks.py b/src/sophrosyne/api/v1/routers/checks.py
deleted file mode 100644
index 8051e85..0000000
--- a/src/sophrosyne/api/v1/routers/checks.py
+++ /dev/null
@@ -1,183 +0,0 @@
-"""API endpoints for managing checks.
-
-Attributes:
- router (APIRouter): The FastAPI router for the checks endpoints.
-"""
-
-from typing import Sequence
-
-from fastapi import APIRouter, Depends, HTTPException, Query
-from sqlmodel import col, select
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-from sophrosyne.api.dependencies import get_db_session, require_admin
-from sophrosyne.api.v1.models import (
- ChecksCreateCheckRequest,
- ChecksCreateCheckResponse,
- ChecksDeleteCheckRequest,
- ChecksDeleteCheckResponse,
- ChecksListCheckRequest,
- ChecksListCheckResponse,
- ChecksListChecksResponse,
- ChecksUpdateCheckRequest,
- ChecksUpdateCheckResponse,
-)
-from sophrosyne.api.v1.tags import Tags
-from sophrosyne.core.models import Check, Profile
-
-CHECK_NOT_FOUND: str = "Check not found"
-
-router = APIRouter(dependencies=[Depends(require_admin)])
-
-
-@router.post(
- "/checks/create-check", response_model=ChecksCreateCheckResponse, tags=[Tags.checks]
-)
-async def create_check(
- *, db_session: AsyncSession = Depends(get_db_session), req: ChecksCreateCheckRequest
-):
- """Create a new check.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ChecksCreateCheckRequest): The request object containing the check details.
-
- Returns:
- ChecksCreateCheckResponse: The response object containing the created check.
- """
- db_profiles: Sequence[Profile] = []
- if req.profiles is not None:
- result = await db_session.exec(
- select(Profile).where(col(Profile.name).in_(req.profiles))
- )
- db_profiles = result.all()
- # Clear the profiles to avoid "'int' object has no attribute '_sa_instance_state'"
- # error when converting to our SQLModel table model later.
- req.profiles.clear()
-
- db_check = Check.model_validate(req)
- db_check.profiles.extend(db_profiles)
- db_session.add(db_check)
- await db_session.commit()
- await db_session.refresh(db_check)
- return ChecksCreateCheckResponse.model_validate(
- db_check, update={"profiles": [p.name for p in db_check.profiles]}
- )
-
-
-@router.get(
- "/checks/list-checks", response_model=ChecksListChecksResponse, tags=[Tags.checks]
-)
-async def read_checks(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- offset: int = 0,
- limit: int = Query(100, le=100),
-):
- """Retrieve a list of checks.
-
- Args:
- db_session (AsyncSession): The database session.
- offset (int): The offset for pagination.
- limit (int): The maximum number of checks to retrieve.
-
- Returns:
- List[Check]: A list of checks.
-
- Raises:
- HTTPException: If no checks are found.
- """
- result = await db_session.exec(select(Check).offset(offset).limit(limit))
- checks = result.all()
- if not checks:
- raise HTTPException(status_code=404, detail="No checks found")
- return checks
-
-
-@router.post(
- "/checks/list-check", response_model=ChecksListCheckResponse, tags=[Tags.checks]
-)
-async def read_check(
- *, db_session: AsyncSession = Depends(get_db_session), req: ChecksListCheckRequest
-):
- """Retrieve a specific check by its name.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ChecksListCheckRequest): The request object containing the name of the check.
-
- Returns:
- Check: The check object if found.
-
- Raises:
- HTTPException: If the check is not found (status code 404).
- """
- result = await db_session.exec(select(Check).where(Check.name == req.name))
- check = result.first()
- if not check:
- raise HTTPException(status_code=404, detail=CHECK_NOT_FOUND)
- return ChecksListCheckResponse.model_validate(
- check, update={"profiles": [p.name for p in check.profiles]}
- )
-
-
-@router.patch(
- "/checks/update-check", response_model=ChecksUpdateCheckResponse, tags=[Tags.checks]
-)
-async def update_check(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: ChecksUpdateCheckRequest,
-):
- """Update a check in the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ChecksUpdateCheckRequest): The request object containing the updated check data.
-
- Returns:
- The updated check object.
-
- Raises:
- HTTPException: If the check is not found in the database.
- """
- result = await db_session.exec(select(Check).where(Check.name == req.name))
- db_check = result.first()
- if not db_check:
- raise HTTPException(status_code=404, detail=CHECK_NOT_FOUND)
-
- if req.profiles is not None:
- db_profiles = await db_session.exec(
- select(Profile).where(col(Profile.name).in_(req.profiles))
- )
- db_check.profiles.clear()
- db_check.profiles.extend(db_profiles)
-
- check_data = req.model_dump(exclude_unset=True)
- db_check.sqlmodel_update(check_data)
- db_session.add(db_check)
- await db_session.commit()
- await db_session.refresh(db_check)
- return db_check
-
-
-@router.delete("/checks/delete-check", tags=[Tags.checks])
-async def delete_check(
- *, db_session: AsyncSession = Depends(get_db_session), req: ChecksDeleteCheckRequest
-):
- """Delete a check from the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ChecksDeleteCheckRequest): The request object containing the name of the check to delete.
-
- Returns:
- ChecksDeleteCheckResponse: The response object indicating the success of the deletion.
- """
- result = await db_session.exec(select(Check).where(Check.name == req.name))
- db_check = result.first()
- if not db_check:
- raise HTTPException(status_code=404, detail=CHECK_NOT_FOUND)
- await db_session.delete(db_check)
- await db_session.commit()
- return ChecksDeleteCheckResponse(ok=True)
diff --git a/src/sophrosyne/api/v1/routers/profiles.py b/src/sophrosyne/api/v1/routers/profiles.py
deleted file mode 100644
index 9e111c1..0000000
--- a/src/sophrosyne/api/v1/routers/profiles.py
+++ /dev/null
@@ -1,204 +0,0 @@
-"""Profiles API endpoints.
-
-Attributes:
- router (APIRouter): The FastAPI router for the profiles API.
-"""
-
-from fastapi import APIRouter, Depends, HTTPException, Query
-from sqlmodel import col, select
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-from sophrosyne.api.dependencies import get_db_session, require_admin
-from sophrosyne.api.v1 import Tags
-from sophrosyne.api.v1.models import (
- ProfilesCreateProfileRequest,
- ProfilesCreateProfileResponse,
- ProfilesDeleteProfileRequest,
- ProfilesDeleteProfileResponse,
- ProfilesListProfileRequest,
- ProfilesListProfileResponse,
- ProfilesListProfilesResponse,
- ProfilesUpdateProfileRequest,
- ProfilesUpdateProfileResponse,
-)
-from sophrosyne.core.models import Check, Profile
-
-router = APIRouter(dependencies=[Depends(require_admin)])
-
-PROFILE_NOT_FOUND: str = "Profile not found"
-
-
-@router.post(
- "/profiles/create-profile",
- response_model=ProfilesCreateProfileResponse,
- tags=[Tags.profiles],
-)
-async def create_profile(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: ProfilesCreateProfileRequest,
-):
- """Create a new profile in the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ProfilesCreateProfileRequest): The request object containing the profile data.
-
- Returns:
- The created profile object.
-
- Raises:
- None.
- """
- if req.checks is not None:
- result = await db_session.exec(
- select(Check).where(col(Check.name).in_(req.checks))
- )
- db_checks = result.all()
- # Clear the profiles to avoid "'int' object has no attribute '_sa_instance_state'"
- # error when converting to our SQLModel table model later.
- req.checks.clear()
-
- db_profile = Profile.model_validate(req)
- db_profile.checks.extend(db_checks)
- db_session.add(db_profile)
- await db_session.commit()
- await db_session.refresh(db_profile)
- return ProfilesCreateProfileResponse.model_validate(
- db_profile, update={"checks": [c.name for c in db_profile.checks]}
- )
-
-
-@router.get(
- "/profiles/list-profiles",
- response_model=ProfilesListProfilesResponse,
- tags=[Tags.profiles],
-)
-async def read_profiles(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- offset: int = 0,
- limit: int = Query(100, le=100),
-):
- """Retrieve a list of profiles with pagination support.
-
- Args:
- db_session (AsyncSession): The database session.
- offset (int): The offset for pagination. Defaults to 0.
- limit (int): The maximum number of profiles to retrieve. Defaults to 100.
-
- Returns:
- List[Profile]: A list of profiles matching the query.
-
- Raises:
- HTTPException: If no profiles are found.
- """
- result = await db_session.exec(select(Profile).offset(offset).limit(limit))
- profiles = result.all()
- if not profiles:
- raise HTTPException(status_code=404, detail="No profiles found")
- return profiles
-
-
-@router.post(
- "/profiles/list-profile",
- response_model=ProfilesListProfileResponse,
- tags=[Tags.profiles],
-)
-async def read_profile(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: ProfilesListProfileRequest,
-):
- """Retrieve a profile from the database based on the provided name.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ProfilesListProfileRequest): The request object containing the profile name.
-
- Returns:
- Profile: The retrieved profile.
-
- Raises:
- HTTPException: If the profile is not found in the database.
- """
- result = await db_session.exec(select(Profile).where(Profile.name == req.name))
- profile = result.first()
- if not profile:
- raise HTTPException(status_code=404, detail=PROFILE_NOT_FOUND)
- return ProfilesListProfileResponse.model_validate(
- profile, update={"checks": [c.name for c in profile.checks]}
- )
-
-
-@router.patch(
- "/profiles/update-profile",
- response_model=ProfilesUpdateProfileResponse,
- tags=[Tags.profiles],
-)
-async def update_profile(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: ProfilesUpdateProfileRequest,
-):
- """Update a profile in the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ProfilesUpdateProfileRequest): The request object containing the updated profile data.
-
- Returns:
- The updated profile object.
-
- Raises:
- HTTPException: If the profile is not found in the database.
- """
- result = await db_session.exec(select(Profile).where(Profile.name == req.name))
- db_profile = result.first()
- if not db_profile:
- raise HTTPException(status_code=404, detail=PROFILE_NOT_FOUND)
-
- if req.checks is not None:
- db_checks = await db_session.exec(
- select(Check).where(col(Check.name).in_(req.checks))
- )
- db_profile.checks.clear()
- db_profile.checks.extend(db_checks)
-
- profile_data = req.model_dump(exclude_unset=True)
- db_profile.sqlmodel_update(profile_data)
- db_session.add(db_profile)
- await db_session.commit()
- await db_session.refresh(db_profile)
- return db_profile
-
-
-@router.delete(
- "/profiles/delete-profile",
- response_model=ProfilesDeleteProfileResponse,
- tags=[Tags.profiles],
-)
-async def delete_profile(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: ProfilesDeleteProfileRequest,
-):
- """Delete a profile from the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (ProfilesDeleteProfileRequest): The request object containing the profile name.
-
- Returns:
- ProfilesDeleteProfileResponse: The response object indicating the success of the operation.
-
- Raises:
- HTTPException: If the profile is not found in the database.
- """
- result = await db_session.exec(select(Profile).where(Profile.name == req.name))
- db_profile = result.first()
- if not db_profile:
- raise HTTPException(status_code=404, detail=PROFILE_NOT_FOUND)
- await db_session.delete(db_profile)
- await db_session.commit()
- return ProfilesDeleteProfileResponse(ok=True)
diff --git a/src/sophrosyne/api/v1/routers/safety.py b/src/sophrosyne/api/v1/routers/safety.py
deleted file mode 100644
index ace74dd..0000000
--- a/src/sophrosyne/api/v1/routers/safety.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""Safety API endpoints.
-
-Attributes:
- router (APIRouter): The FastAPI router for the safety endpoints.
-"""
-
-from typing import Annotated
-
-from fastapi import APIRouter, Body, Depends
-
-from sophrosyne.api.dependencies import auth_and_return_user, get_safety_service
-from sophrosyne.api.v1.models import (
- SafetyScanRequest,
- SafetyScanResponse,
-)
-from sophrosyne.core.config import Settings, get_settings
-from sophrosyne.core.models import (
- SafetyServicePayload,
- SafetyServicePayloadImage,
- SafetyServicePayloadText,
- User,
-)
-from sophrosyne.core.safety import Safety
-
-router = APIRouter()
-
-
-@router.post("/safety/scan", response_model=SafetyScanResponse)
-async def safety(
- req: Annotated[SafetyScanRequest, Body()],
- ss: Annotated[Safety, Depends(get_safety_service)],
- current_user: Annotated[User, Depends(auth_and_return_user)],
- settings: Annotated[Settings, Depends(get_settings)],
-) -> SafetyScanResponse:
- """Endpoint for performing a safety scan.
-
- Args:
- req (SafetyScanRequest): The request payload containing the scan details.
- ss (Safety): The safety service dependency.
- current_user (User): The current user making the request.
- settings (Settings): The application settings.
-
- Returns:
- SafetyScanResponse: The response containing the scan results.
- """
- ssp: SafetyServicePayload
- profile: str
- if current_user.default_profile is None:
- profile = settings.default_profile
- else:
- profile = current_user.default_profile
- if req.kind == "text":
- ssp = SafetyServicePayloadText.model_validate(req.model_dump())
- elif req.kind == "image":
- ssp = SafetyServicePayloadImage.model_validate(req.model_dump())
- ssr = await ss.predict(profile=profile, data=ssp)
-
- return SafetyScanResponse.model_validate(ssr.model_dump())
diff --git a/src/sophrosyne/api/v1/routers/users.py b/src/sophrosyne/api/v1/routers/users.py
deleted file mode 100644
index 1ec39f2..0000000
--- a/src/sophrosyne/api/v1/routers/users.py
+++ /dev/null
@@ -1,231 +0,0 @@
-"""Users API endpoints.
-
-Attributes:
- router (APIRouter): The FastAPI router for the users API.
-"""
-
-from fastapi import APIRouter, Depends, HTTPException, Query
-from sqlmodel import select
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-from sophrosyne.api.dependencies import (
- auth_and_return_user,
- get_db_session,
- require_admin,
-)
-from sophrosyne.api.v1.models import (
- UsersCreateUserRequest,
- UsersCreateUserResponse,
- UsersDeleteUserRequest,
- UsersDeleteUserResponse,
- UsersListUserRequest,
- UsersListUserResponse,
- UsersListUsersResponse,
- UsersRotateTokenRequest,
- UsersRotateTokenResponse,
- UsersUpdateUserRequest,
- UsersUpdateUserResponse,
-)
-from sophrosyne.api.v1.tags import Tags
-from sophrosyne.core.models import User
-from sophrosyne.core.security import new_token, sign
-
-router = APIRouter()
-
-USER_NOT_FOUND = "User not found"
-
-
-@router.post(
- "/users/create-user",
- response_model=UsersCreateUserResponse,
- tags=[Tags.users],
- dependencies=[Depends(require_admin)],
-)
-async def create_user(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- user: UsersCreateUserRequest,
-):
- """Create a new user.
-
- Args:
- db_session (AsyncSession): The database session.
- user (UsersCreateUserRequest): The request payload containing user data.
-
- Returns:
- UsersCreateUserResponse: The newly created user.
-
- Raises:
- None
-
- """
- token = new_token()
- extra_data = {"signed_token": sign(token)}
- db_user = User.model_validate(user, update=extra_data)
- db_session.add(db_user)
- await db_session.commit()
- await db_session.refresh(db_user)
- return UsersCreateUserResponse.model_validate(db_user, update={"token": token})
-
-
-@router.get(
- "/users/list-users",
- response_model=UsersListUsersResponse,
- tags=[Tags.users],
- dependencies=Depends(require_admin),
-)
-async def read_users(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- offset: int = 0,
- limit: int = Query(100, le=100),
-):
- """Retrieve a list of users from the database.
-
- Args:
- db_session (AsyncSession): The database session.
- offset (int): The offset for pagination. Defaults to 0.
- limit (int): The maximum number of users to retrieve. Defaults to 100.
-
- Returns:
- UsersListUsersResponse: A list of user objects.
-
- Raises:
- HTTPException: If no users are found in the database.
- """
- result = await db_session.exec(select(User).offset(offset).limit(limit))
- users = result.all()
- if not users:
- raise HTTPException(status_code=400, detail="No users found")
- return users
-
-
-@router.post(
- "/users/list-user", response_model=UsersListUserResponse, tags=[Tags.users]
-)
-async def read_user(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: UsersListUserRequest,
- current_user=Depends(auth_and_return_user),
-):
- """Retrieve a user from the database based on the provided name.
-
- Args:
- db_session (AsyncSession): The database session.
- req (UsersListUserRequest): The request object containing the user's name.
- current_user (User): The current user making the request.
-
- Returns:
- UsersListUserResponse: The user object retrieved from the database.
-
- Raises:
- HTTPException: If the user is not found in the database.
- """
- result = await db_session.exec(select(User).where(User.name == req.name))
- user = result.first()
- if not user:
- raise HTTPException(status_code=400, detail=USER_NOT_FOUND)
- if current_user.name != user.name:
- raise HTTPException(status_code=403, detail=USER_NOT_FOUND)
- return user
-
-
-@router.patch(
- "/users/update-user",
- response_model=UsersUpdateUserResponse,
- tags=[Tags.users],
- dependencies=[Depends(require_admin)],
-)
-async def update_user(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: UsersUpdateUserRequest,
-):
- """Update a user in the database.
-
- Args:
- db_session: The database session.
- req: The request object containing the updated user data.
-
- Returns:
- UsersUpdateUserResponse: The updated user object.
-
- Raises:
- HTTPException: If the user is not found in the database.
- """
- result = await db_session.exec(select(User).where(User.name == req.name))
- db_user = result.first()
- if not db_user:
- raise HTTPException(status_code=400, detail=USER_NOT_FOUND)
- user_data = req.model_dump(exclude_unset=True)
- db_user.sqlmodel_update(user_data)
- db_session.add(db_user)
- await db_session.commit()
- await db_session.refresh(db_user)
- return db_user
-
-
-@router.delete(
- "/users/delete-user",
- response_model=UsersDeleteUserResponse,
- tags=[Tags.users],
- dependencies=[Depends(require_admin)],
-)
-async def delete_user(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: UsersDeleteUserRequest,
-):
- """Delete a user from the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (UsersDeleteUserRequest): The request object containing the user's name.
-
- Returns:
- UsersDeleteUserResponse: The response object indicating the success of the operation.
- """
- result = await db_session.exec(select(User).where(User.name == req.name))
- db_user = result.first()
- if not db_user:
- raise HTTPException(status_code=400, detail=USER_NOT_FOUND)
- await db_session.delete(db_user)
- await db_session.commit()
- return UsersDeleteUserResponse(ok=True)
-
-
-@router.post(
- "/users/rotate-token", response_model=UsersRotateTokenResponse, tags=[Tags.users]
-)
-async def rotate_user_token(
- *,
- db_session: AsyncSession = Depends(get_db_session),
- req: UsersRotateTokenRequest,
- current_user=Depends(auth_and_return_user),
-):
- """Create a new token for a user and update it in the database.
-
- Args:
- db_session (AsyncSession): The database session.
- req (UsersRotateTokenRequest): The request object containing the user's name.
- current_user (User): The current user making the request.
-
- Returns:
- UsersRotateTokenResponse: The updated user object with the new token.
-
- Raises:
- HTTPException: If the user is not found in the database.
- """
- result = await db_session.exec(select(User).where(User.name == req.name))
- db_user = result.first()
- if not db_user:
- raise HTTPException(status_code=400, detail=USER_NOT_FOUND)
- if current_user.name != db_user.name and not current_user.is_admin:
- raise HTTPException(status_code=403, detail="Not authorized")
- token = new_token()
- db_user.signed_token = sign(token)
- db_session.add(db_user)
- await db_session.commit()
- await db_session.refresh(db_user)
- return UsersRotateTokenResponse.model_validate(db_user, update={"token": token})
diff --git a/src/sophrosyne/api/v1/tags.py b/src/sophrosyne/api/v1/tags.py
deleted file mode 100644
index b474ab3..0000000
--- a/src/sophrosyne/api/v1/tags.py
+++ /dev/null
@@ -1,19 +0,0 @@
-"""Tags for the API endpoints."""
-
-from enum import Enum
-
-
-class Tags(Enum):
- """Tags for the API endpoints.
-
- Attributes:
- profiles (str): The profiles tag.
- checks (str): The checks tag.
- users (str): The users tag.
- safety (str): The safety tag.
- """
-
- profiles = "profiles"
- checks = "checks"
- users = "users"
- safety = "safety"
diff --git a/src/sophrosyne/commands/__init__.py b/src/sophrosyne/commands/__init__.py
deleted file mode 100644
index ba83db8..0000000
--- a/src/sophrosyne/commands/__init__.py
+++ /dev/null
@@ -1,210 +0,0 @@
-"""Commands for Sophrosyne."""
-
-#
-# Do NOT import any modules from sophrosyne before making sure you've read the
-# docstring of the necessary_evil function.
-#
-
-from contextlib import asynccontextmanager
-
-import click
-from fastapi import FastAPI
-
-from sophrosyne.commands import database
-from sophrosyne.commands.internal import necessary_evil
-from sophrosyne.core import async_cmd
-
-
-@asynccontextmanager
-async def _lifespan(app: FastAPI):
- """FastAPI lifespan event handler."""
- from sophrosyne.core.logging import get_logger
-
- log = get_logger()
- from sophrosyne.core.database import (
- create_db_and_tables,
- create_default_profile,
- create_root_user,
- )
-
- await create_db_and_tables()
- rt = await create_default_profile()
- if rt:
- log.info("default profile created", profile=rt.name)
- rt = await create_root_user()
- if rt:
- log.info("root user created created", token=rt)
- yield
- log.info("app is shutting down")
-
-
-@click.command()
-def version():
- """Get the version of sophrosyne."""
- from importlib.metadata import version
-
- print(version("sophrosyne"))
-
-
-@click.command()
-@click.option("--config", default="config.yaml", help="path to configuration file.")
-@click.option(
- "--pretty",
- is_flag=True,
- default=False,
- help="If set, prints configuration with indents for easier reading.",
-)
-def config(config, pretty):
- """Print the configuration to stdout as JSON."""
- necessary_evil(config)
-
- from sophrosyne.core.config import get_settings
-
- indent = None
- if pretty:
- indent = 2
-
- click.echo(get_settings().model_dump_json(indent=indent))
-
-
-@click.command()
-@click.option("--config", default="config.yaml", help="path to configuration file.")
-@async_cmd
-async def healthcheck(config):
- """Check the health of the sophrosyne API service."""
- necessary_evil(config)
-
- import sys
-
- import requests
-
- from sophrosyne.core.config import get_settings
- from sophrosyne.core.database import (
- engine,
- )
-
- # Disable warnings for insecure requests
- requests.packages.urllib3.disable_warnings()
-
- verify = get_settings().security.outgoing_tls_verify
- if verify and get_settings().security.outgoing_tls_ca_path is not None:
- verify = get_settings().security.outgoing_tls_ca_path
-
- try:
- resp = requests.get(
- f"https://{get_settings().server.listen_host}:{get_settings().server.port}/health/ping",
- verify=verify,
- )
- if resp.status_code != 200 and resp.text != '"pong"':
- click.echo("API returned abnormal response.")
- return sys.exit(1)
- except requests.exceptions.ConnectionError as e:
- # This is not really a nice way of doing this, is there not a better way?
- if "CERTIFICATE_VERIFY_FAILED" in str(e):
- reason = str(e)[str(e).find("certificate verify failed: ") :]
- reason = reason.removeprefix("certificate verify failed: ")
- reason = reason[: reason.rfind(" (")]
- reason = reason.strip()
- click.echo(f"SSL/TLS verification failure: {reason}")
- else:
- click.echo("API is not responding.")
- return sys.exit(1)
-
- from sqlalchemy.ext.asyncio import async_sessionmaker
- from sqlmodel.ext.asyncio.session import AsyncSession
-
- from sophrosyne.api.routers.health import do_authenticated_healthcheck
-
- db_session = async_sessionmaker(
- bind=engine,
- class_=AsyncSession,
- expire_on_commit=False,
- )
- async with db_session() as session:
- hc = await do_authenticated_healthcheck(db_session=session)
- if hc.status == "pass":
- click.echo("The server is healthy.")
- else:
- click.echo("The server is not healthy.")
- return sys.exit(1)
-
-
-@click.command()
-@click.option("--config", default="config.yaml", help="path to configuration file.")
-def run(config):
- """Run the sophrosyne API service."""
- necessary_evil(config)
-
- import sys
-
- import uvicorn
- from fastapi import FastAPI
- from fastapi.middleware.cors import CORSMiddleware
-
- from sophrosyne.api import api_router
- from sophrosyne.core.config import get_settings
- from sophrosyne.core.logging import LoggingMiddleware
- from sophrosyne.core.security import TLS
-
- try:
- get_settings().security.assert_non_default_cryptographic_material()
- except ValueError as e:
- print(f"configuration error: {e}")
- sys.exit(1)
-
- tls = TLS(
- certificate_path=get_settings().security.certificate_path,
- key_path=get_settings().security.key_path,
- key_password=get_settings().security.key_password,
- )
-
- app = FastAPI(
- lifespan=_lifespan,
- openapi_url="/.well-known/openapi",
- redoc_url="/docs",
- )
-
- app.add_middleware(
- CORSMiddleware,
- allow_origins=get_settings().backend_cors_origins,
- allow_credentials=False,
- allow_methods=["*"],
- allow_headers=[],
- )
- app.add_middleware(
- LoggingMiddleware,
- )
- app.include_router(api_router)
-
- uvicorn.run(
- app,
- host=get_settings().server.listen_host,
- port=get_settings().server.port,
- log_level="info",
- log_config=None,
- access_log=False,
- ssl_certfile=tls.to_path(input=tls.certificate),
- ssl_keyfile=tls.to_path(input=tls.private_key),
- # Mypy complains about ssl_keyfile_password being a bytes object, when
- # the argument expects a str. It works because internally in uvicorn,
- # it is passed to the ssl.SSLContext.load_cert_chain() method, which
- # expects a bytes, string or None object. This is probably a bug in
- # uvicorn, but it works as expected.
- ssl_keyfile_password=tls.private_key_password, # type: ignore
- )
-
-
-@click.group()
-def root():
- """Sophrosyne - A content moderation API."""
- pass
-
-
-def setup_and_run_commands():
- """Setup the CLI commands and execute the root command."""
- root.add_command(version)
- root.add_command(run)
- root.add_command(healthcheck)
- root.add_command(config)
- root.add_command(database.cmd)
- root()
diff --git a/src/sophrosyne/commands/database.py b/src/sophrosyne/commands/database.py
deleted file mode 100644
index 1a6cfd5..0000000
--- a/src/sophrosyne/commands/database.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""Migration commands for the underlying database."""
-
-import click
-
-from sophrosyne.commands.internal import necessary_evil
-from sophrosyne.core import async_cmd
-
-_CONFIG_HELP_TEXT = "path to configuration file."
-
-
-@click.group(name="database")
-def cmd():
- """Database management related commands."""
- pass
-
-
-@cmd.command()
-@click.option(
- "--revision",
- required=True,
- help='The ID of the revision you\'d like to upgrade to. Use "head" to upgrade to the latest.',
- confirmation_prompt=True,
-)
-@click.option("--config", default="config.yaml", help=_CONFIG_HELP_TEXT)
-@async_cmd
-async def upgrade(config, revision: str):
- """Update the database."""
- necessary_evil(config)
- from sophrosyne.core.database import upgrade
-
- await upgrade(revision=revision)
-
-
-@cmd.command()
-@click.option(
- "--revision",
- required=True,
- help='The Id of the revision you\'d like to downgrade to. Use "base" to completely wipe the database.',
- confirmation_prompt=True,
-)
-@click.option("--config", default="config.yaml", help=_CONFIG_HELP_TEXT)
-@async_cmd
-async def downgrade(config, revision: str):
- """Downgrade the database."""
- necessary_evil(config)
- from sophrosyne.core.database import downgrade
-
- await downgrade(revision=revision)
-
-
-@click.group(name="show")
-def show():
- """Commands to read metadata from the database."""
- pass
-
-
-@show.command()
-@click.option("--verbose", default=False, is_flag=True)
-@click.option("--config", default="config.yaml", help=_CONFIG_HELP_TEXT)
-@async_cmd
-async def history(config, verbose: bool):
- """Show the migration history of the database."""
- necessary_evil(config)
- from sophrosyne.core.database import history
-
- await history(verbose=verbose)
-
-
-@show.command()
-@click.option("--verbose", default=False, is_flag=True)
-@click.option("--config", default="config.yaml", help=_CONFIG_HELP_TEXT)
-@async_cmd
-async def current(config, verbose: bool):
- """Show the current migration revision of the database."""
- necessary_evil(config)
- from sophrosyne.core.database import current
-
- await current(verbose=verbose)
-
-
-cmd.add_command(show)
-cmd.add_command(upgrade)
-cmd.add_command(downgrade)
diff --git a/src/sophrosyne/commands/internal/__init__.py b/src/sophrosyne/commands/internal/__init__.py
deleted file mode 100644
index ad6becc..0000000
--- a/src/sophrosyne/commands/internal/__init__.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""Internal things."""
-
-#
-# Do NOT import any modules from sophrosyne before making sure you've read the
-# docstring of the _necessary_evil function.
-#
-
-
-def necessary_evil(path: str):
- """Run some initial setup.
-
- This code, as the name implies, is a necessary evil to make up for a
- missing feature, or perhaps my own personal shortcomings, of Pydantic. It
- does not seem possible to dynamically specify external configuration files
- via `model_config` in Pydantic, forcing us to have the value of the
- `yaml_file` argument be a variable that is set at module import time. This
- unfortunately creates the side effect that the location of the yaml file
- must be known the config module is imported.
-
- The way this is handled is to have this function take care of setting the
- necessary environment variables to configure the config module before
- importing it.
-
- It is imperative that this function is run before any other modules from
- sophrosyne is imported. This is because many other modules import the
- config module, and if that happens before this function is run, everything
- breaks.
-
- Additionally, because this function is run early and by pretty much all
- commands, it is also used to centralize other things such as initialization
- of logging.
- """
- import os
-
- if "SOPH__CONFIG_YAML_FILE" not in os.environ:
- os.environ["SOPH__CONFIG_YAML_FILE"] = path
- import sophrosyne.core.config # noqa: F401
- from sophrosyne.core.config import get_settings
- from sophrosyne.core.logging import initialize_logging
-
- initialize_logging(
- log_level=get_settings().logging.level_as_int,
- format=get_settings().logging.format,
- event_field=get_settings().logging.event_field,
- )
diff --git a/src/sophrosyne/core/__init__.py b/src/sophrosyne/core/__init__.py
deleted file mode 100644
index ebecccc..0000000
--- a/src/sophrosyne/core/__init__.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""Core module for the SOPH API.
-
-This module contains the core logic of the SOPH API service. It defines the
-database operations, configuration, and utility functions not tied to a
-specific API version.
-"""
-
-import asyncio
-from functools import wraps
-
-
-def async_cmd(func):
- """Decorator to run an async function as a synchronous command.
-
- This decorator allows you to use async functions as synchronous commands in Click.
- It uses the `asyncio.run()` function to run the async function in a synchronous manner.
-
- Args:
- func (Callable): The async function to be decorated.
-
- Returns:
- Callable: The decorated function.
-
- Example:
- @async_cmd
- async def my_async_command():
- # async code here
-
- if __name__ == "__main__":
- my_async_command()
-
- Reference:
- This decorator is based on the solution provided in the following StackOverflow post:
- https://stackoverflow.com/questions/67558717/how-can-i-test-async-click-commands-from-an-async-pytest-function
- """
-
- @wraps(func)
- def wrapper(*args, **kwargs):
- return asyncio.run(func(*args, **kwargs))
-
- return wrapper
diff --git a/src/sophrosyne/core/checks.py b/src/sophrosyne/core/checks.py
deleted file mode 100644
index cacf5e3..0000000
--- a/src/sophrosyne/core/checks.py
+++ /dev/null
@@ -1,107 +0,0 @@
-"""The checks module contains the logic to run checks on the data supplied to the SOPH API service.
-
-Checks may either be completely implemented in this module
-or may call out to external services to perform the check.
-"""
-
-from random import choice
-
-import grpc
-
-from sophrosyne.core.models import (
- CheckBase,
- SafetyServicePayload,
- SafetyServicePayloadImage,
- SafetyServicePayloadText,
- SafetyServicePayloadType,
-)
-from sophrosyne.grpc.checks import checks_pb2, checks_pb2_grpc
-
-
-class Check(CheckBase):
- """Base class for all checks.
-
- Attributes:
- name (str): The name of the check.
- description (str): A description of the check.
- """
-
- def __init__(self, name: str, description: str):
- """Initializes the Check class.
-
- Args:
- name (str): The name of the check.
- description (str): A description of the check.
- """
- self.name = name
- self.description = description
-
- def handle_dummy(self) -> bool:
- """Handle the dummy check.
-
- Will return with the result specified in the config. If no result is
- specified, will return False.
-
- Returns:
- bool: The result of the dummy check.
- """
- if self.config is None:
- return False
- if "result" not in self.config:
- return False
-
- if isinstance(self.config["result"], bool):
- return self.config["result"]
- if isinstance(self.config["result"], str):
- if self.config["result"].lower() == "true":
- return True
- if self.config["result"].lower() == "false":
- return False
- if isinstance(self.config["result"], int):
- return bool(self.config["result"])
- if isinstance(self.config["result"], float):
- return bool(self.config["result"])
-
- return False
-
- def type_is_supported(self, data: SafetyServicePayload) -> bool:
- """Determines if the check supports the given data type.
-
- Args:
- data (SafetyServicePayload): The data to check.
-
- Returns:
- bool: True if the check supports the data type, False otherwise.
- """
- if isinstance(data, SafetyServicePayloadText):
- return SafetyServicePayloadType.TEXT in self.supported_types
- if isinstance(data, SafetyServicePayloadImage):
- return SafetyServicePayloadType.IMAGE in self.supported_types
- return False
-
- def run(self, data: SafetyServicePayload) -> bool:
- """Run the check on the data.
-
- Args:
- data (SafetyServicePayload): The data to run the check on.
-
- Raises:
- NotImplementedError: If the check is not implemented for the given data type.
-
- Returns:
- bool: True if the data passes the check, False otherwise.
- """
- if not self.type_is_supported(data):
- raise NotImplementedError("Check not implemented for data type")
- if isinstance(data, SafetyServicePayloadText):
- rpc_payload = checks_pb2.CheckRequest(text=data.text)
- elif isinstance(data, SafetyServicePayloadImage):
- rpc_payload = checks_pb2.CheckRequest(image=data.image)
- else:
- raise NotImplementedError("Check not implemented for data type")
- if self.name.startswith("local:dummy:"):
- return self.handle_dummy()
- channel = grpc.insecure_channel(choice(self.upstream_services))
- stub = checks_pb2_grpc.CheckServiceStub(channel)
- call = stub.Check(rpc_payload)
- return call.result
diff --git a/src/sophrosyne/core/config.py b/src/sophrosyne/core/config.py
deleted file mode 100644
index 106c969..0000000
--- a/src/sophrosyne/core/config.py
+++ /dev/null
@@ -1,429 +0,0 @@
-"""Configuration module for the SOPH API.
-
-This module contains the configuration settings for the SOPH API. The settings
-are loaded from environment variables, a YAML configuration file, and secrets
-files. The settings are organized into classes that represent different parts
-of the configuration. The settings are loaded using the `pydantic-settings`
-library, which provides a way to load settings from multiple sources and
-validate the settings.
-
-The settings are loaded in the following order of priority:
-1. Secrets files
-2. Environment variables
-3. YAML configuration file
-
-Dynamic module behaviour:
- Because of a limitation in the `pydantic-settings` library, namely that it
- does not support dynamically setting model configuration (e.g., `secrets_dir`
- and `yaml_file`), the default values for these settings are set using
- environment variables. This allows the settings to be configured using
- dynamically set environment variables, although this is not ideal.
-
- The behaviour of this module that is considered dynamic are:
-
- - The path to the configuration file.
- The value used by this module is available as the `config_file` attribute,
- and the environment variable used to set this value is
- `SOPH__CONFIG_YAML_FILE`. The default value is `config.yaml`.
- - The path to the directory containing secrets files.
- The value used by this module is available as the `secrets_dir` attribute,
- and the environment variable used to set this value is
- `SOPH__CONFIG_SECRETS_DIR`. The default value is `/run/secrets` on Linux
- and an empty string on macOS. The empty string is used on macOS purely for
- testing purposes, as the `/run` directory is not available on macOS.
- - Whether to create the secrets directory if it does not exist.
- The value used by this module is available as the `create_secrets_dir`
- attribute, and the environment variable used to set this value is
- `SOPH__CONFIG_CREATE_SECRETS_DIR`. The default value is `false`.
-
- Environment variables used to dynamically alter the behaviour of this module
- is carefully chosen to avoid conflicts with the settings themselves. The
- prefix `SOPH__CONFIG_` is used to avoid conflicts.
-
- The module behaviour controlled by these environment variables is run once
- when the module is imported. The settings are then loaded using the
- configured settings sources.
-
-Configuration via secrets files:
- The secrets files are loaded from the directory specified by the
- `secrets_dir` attribute. The files in these directories will be read and
- their literal contents will be used as the values for the settings. The
- secrets files are expected to be named after the settings they are providing
- secrets for, taking into account the environment prefix.
-
- This also means that the name of the secrets file for a setting is identical
- to the environment variable used to set the value of the setting. For
- example, the secrets file for the `password` setting in the `Database` class
- is expected to be named `SOPH_DATABASE__PASSWORD`.
-
- Configuration applied via will override all other sources of configuration.
-
-Configuration via YAML configuration file:
- The YAML configuration file is loaded from the path specified by the
- `config_file` attribute. The settings in the YAML file are expected to be
- organized in the same structure as the settings classes in this module. The
- settings in the YAML file will be used to override the default values of the
- settings.
-
-Configuration via environment variables:
- The settings can be configured using environment variables. The environment
- variables are expected to be named after the settings they are configuring,
- taking into account the environment prefix. The environment variables are
- expected to be in uppercase and use underscores to separate words. For
- example, the environment variable for the `password` setting in the
- `Database` class is expected to be named `SOPH_DATABASE__PASSWORD`.
-
- The environment variables will be used to override the default values of the
- settings as well as the values loaded from the YAML configuration file, but
- will be overridden by values loaded from secrets files.
-
- When providing lists via environment variables, the list should be a JSON
- array. For example, to provide a list of backend CORS origins, the
- environment variable should be named `SOPH_BACKEND_CORS_ORIGINS` and the
- value could be `'["http://localhost:3000", "http://localhost:3001"]'`.
-
-Users of this module is encouraged to use the `get_settings` function to get the
-settings object, as this function uses a LRU cache to ensure that the settings
-are only loaded once.
-
-Example:
- To get the settings object, use the `get_settings` function:
- settings = get_settings()
-
- The settings object can then be used to access the settings values:
- print(settings.database.host)
-
-
-Attributes:
- config_file (str): The path to the configuration file.
- secrets_dir (str): The path to the directory containing secrets files.
- create_secrets_dir (bool): Whether to create the secrets directory if it
- does not exist.
-"""
-
-import base64
-import logging
-import os
-import sys
-from functools import lru_cache
-from typing import Annotated, List, Literal, Tuple, Type
-
-from pydantic import (
- AnyHttpUrl,
- Base64Encoder,
- EmailStr,
- EncodedBytes,
- Field,
- computed_field,
-)
-from pydantic_settings import (
- BaseSettings,
- PydanticBaseSettingsSource,
- SettingsConfigDict,
- YamlConfigSettingsSource,
-)
-
-config_file = os.environ.get("SOPH__CONFIG_YAML_FILE", "config.yaml")
-secrets_dir = os.environ.get(
- "SOPH__CONFIG_SECRETS_DIR", ("" if sys.platform == "darwin" else "/run/secrets")
-)
-create_secrets_dir = (
- os.environ.get("SOPH__CONFIG_CREATE_SECRETS_DIR", "false").lower() == "true"
-)
-
-if create_secrets_dir and secrets_dir == "":
- raise ValueError(
- "Cannot create secrets dir when secrets dir is empty. Configure SOPH__CONFIG_SECRETS_DIR"
- )
-
-if create_secrets_dir:
- os.makedirs(secrets_dir, exist_ok=True)
-
-
-class Base64EncoderSansNewline(Base64Encoder):
- """Encode Base64 without adding a trailing newline.
-
- The default Base64Bytes encoder in PydanticV2 appends a trailing newline
- when encoding. See https://github.com/pydantic/pydantic/issues/9072
- """
-
- @classmethod
- def encode(cls, value: bytes) -> bytes: # noqa: D102
- return base64.b64encode(value)
-
-
-Base64Bytes = Annotated[bytes, EncodedBytes(encoder=Base64EncoderSansNewline)]
-
-
-class Logging(BaseSettings):
- """Configuration class for the logging settings.
-
- Attributes of this class can be overriden by environment variables, a YAML
- configuration file, and secrets files. The environment variables and secret
- files must be named after the attributes they are setting, with the
- environment prefix and nested delimiter taken into account.
-
- The environment prefix is `SOPH_DATABASE__`.
-
- Attributes:
- level (Literal["info", "debug"]): The log level to use. Defaults to "info".
- event_field (str): The name of the field to use for the main part of the log. Defaults to "event".
- format (Literal["development", "production"]): The format of the logs. Defaults to "production".
- """
-
- level: Literal["info", "debug"] = "info"
- event_field: str = "event"
- format: Literal["development", "production"] = "production"
-
- @computed_field
- def level_as_int(self) -> int:
- """Provides the log_level as an integer."""
- if self.level.lower() == "debug":
- return logging.DEBUG
- else:
- return logging.INFO
-
- model_config = SettingsConfigDict(
- secrets_dir=secrets_dir, env_prefix="SOPH_logging__"
- )
-
-
-class Database(BaseSettings):
- """Configuration class for the database settings.
-
- Attributes of this class can be overriden by environment variables, a YAML
- configuration file, and secrets files. The environment variables and secret
- files must be named after the attributes they are setting, with the
- environment prefix and nested delimiter taken into account.
-
- The environment prefix is `SOPH_DATABASE__`.
-
- Attributes:
- host (str): The host of the database.
- port (int): The port of the database.
- database (str): The name of the database.
- password (str): The password for the database.
- user (str): The user for the database.
- """
-
- host: str = "localhost"
- port: int = 5432
- database: str = "postgres"
- password: str = "postgres"
- user: str = "postgres"
-
- @computed_field
- def dsn(self) -> str:
- """Returns the Data Source Name (DSN) for connecting to the PostgreSQL database.
-
- The DSN is constructed using the user, password, host, port, and
- database attributes of the Config object.
-
- Returns:
- str: The Data Source Name (DSN) for connecting to the PostgreSQL
- database.
- """
- return f"postgresql+asyncpg://{self.user}:{self.password}@{self.host}:{self.port}/{self.database}"
-
- model_config = SettingsConfigDict(
- secrets_dir=secrets_dir, env_prefix="SOPH_database__"
- )
-
-
-class Development(BaseSettings):
- """Configuration class for development environment.
-
- These settings should never be changed in production, as they are meant for
- development purposes only.
-
- Attributes of this class can be overriden by environment variables, a YAML
- configuration file, and secrets files. The environment variables and secret
- files must be named after the attributes they are setting, with the
- environment prefix and nested delimiter taken into account.
-
- The environment prefix is `SOPH_DEVELOPMENT__`.
-
- Attributes:
- static_root_token (str): Override the random token generated at first statup.
- sqlalchemy_echo (bool): Instruct SQLAlchemy to log SQL commands.
- """
-
- static_root_token: str = ""
- sqlalchemy_echo: bool = False
-
- model_config = SettingsConfigDict(
- secrets_dir=secrets_dir, env_prefix="SOPH_development__"
- )
-
-
-_default_key = b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
-
-
-class Security(BaseSettings):
- """Configuration class for security settings.
-
- Attributes of this class can be overriden by environment variables, a YAML
- configuration file, and secrets files. The environment variables and secret
- files must be named after the attributes they are setting, with the
- environment prefix and nested delimiter taken into account.
-
- The environment prefix is `SOPH_SECURITY__`.
-
- Attributes:
- token_length (int): The number of bytes to generate for tokens used by the application.
- site_key (bytes): The site key used for protection tokens at rest. Value must be provided as a base64 encoded string. Minimum length is 32 bytes, and maximum length is 64 bytes.
- salt (bytes): The salt used for protection tokens at rest. Value must be provided as a base64 encoded string. Minimum length is 32 bytes.
- certificate_path (str): The path to the certificate file. If empty, a self-signed certificate will be generated.
- key_path (str): The path to the key file. Used for TLS.
- key_password (str): The password for the key file.
- outgoing_tls_verify (bool): Whether to verify outgoing TLS connections.
- outgoing_tls_ca_path (str): The path to the CA certificate file for outgoing TLS connections.
- """
-
- token_length: int = 128
- site_key: Annotated[Base64Bytes, Field(min_length=32, max_length=64)] = _default_key
- salt: Annotated[Base64Bytes, Field(min_length=32)] = _default_key
- certificate_path: str | None = None
- key_path: str | None = None
- key_password: str | None = None
- outgoing_tls_verify: bool = True
- outgoing_tls_ca_path: str | None = None
-
- def assert_non_default_cryptographic_material(self) -> None:
- """Asserts that important cryptographic materials do not have a default value.
-
- This function must be called as soon as theres a slight possibility that
- the cryptographic key material provided by this class is needed.
-
- Raises:
- ValueError: If site_key or salt has the default value, a ValueError
- will be raised.
- """
- if self.site_key == b"\x00" * 32:
- raise ValueError("security.site_key must be set")
- if self.salt == b"\x00" * 32:
- raise ValueError("security.salt must be set")
-
- model_config = SettingsConfigDict(
- secrets_dir=secrets_dir, env_prefix="SOPH_security__"
- )
-
-
-class Server(BaseSettings):
- """Configuration class for server settings.
-
- Attributes of this class can be overriden by environment variables, a YAML
- configuration file, and secrets files. The environment variables and secret
- files must be named after the attributes they are setting, with the
- environment prefix and nested delimiter taken into account.
-
- The environment prefix is `SOPH_SERVER__` and the nested delimiter is `__`.
-
- Attributes:
- port (int): The port to run the server on.
- listen_host (str): The host to listen on.
- """
-
- port: int = 8000
- listen_host: str = "0.0.0.0"
-
- model_config = SettingsConfigDict(
- secrets_dir=secrets_dir, env_prefix="SOPH_server__"
- )
-
-
-class Settings(BaseSettings):
- """Represents the settings for the application.
-
- Attributes of this class can be overriden by environment variables, a YAML
- configuration file, and secrets files. The environment variables and secret
- files must be named after the attributes they are setting, with the
- environment prefix and nested delimiter taken into account.
-
- The environment prefix is `SOPH_`, and the nested delimiter is `__`.
-
- Attributes:
- api_v1_str (str): The API version string.
- backend_cors_origins (List[AnyHttpUrl]): The list of backend CORS
- origins.
- root_contact (EmailStr): The root contact email address.
- hostnames (List[str]): The list of hostnames that the server should respond to. If generating a certificate, these values are used as the Common Name (CN) and Subject Alternate Name (SAN) in the certificate.
- checks (Checks): The checks configuration.
- database (Database): The database configuration.
- development (Development): The development configuration.
-
- Methods:
- settings_customise_sources: Customize the sources for loading settings.
- """
-
- api_v1_str: str = "/v1"
- backend_cors_origins: List[AnyHttpUrl] = []
- default_profile: str = "default"
-
- root_contact: EmailStr = "replaceme@withareal.email" # type: ignore NOSONAR
- hostnames: List[str] = ["localhost"]
- database: Database = Database()
- security: Security = Security()
- server: Server = Server()
- development: Development = Development()
- logging: Logging = Logging()
-
- model_config = SettingsConfigDict(
- yaml_file=config_file,
- secrets_dir=secrets_dir,
- env_prefix="SOPH_",
- env_nested_delimiter="__",
- )
-
- @classmethod
- def settings_customise_sources(
- cls,
- settings_cls: Type[BaseSettings],
- init_settings: PydanticBaseSettingsSource,
- env_settings: PydanticBaseSettingsSource,
- dotenv_settings: PydanticBaseSettingsSource,
- file_secret_settings: PydanticBaseSettingsSource,
- ) -> Tuple[PydanticBaseSettingsSource, ...]:
- """Customize the sources for loading settings.
-
- Creating this class method, which pydantic will call behind the scenes,
- allows us to customize the sources for loading settings. The return
- value of this method is a tuple of settings sources that will be used
- to load the settings. The order of the sources in the tuple defines
- the priority of the sources. The first source in the tuple has the
- highest priority, and the last source in the tuple has the lowest
- priority.
-
- Args:
- settings_cls (Type[BaseSettings]): The settings class.
- init_settings (PydanticBaseSettingsSource): The initial settings source.
- env_settings (PydanticBaseSettingsSource): The environment settings source.
- dotenv_settings (PydanticBaseSettingsSource): The dotenv settings source.
- file_secret_settings (PydanticBaseSettingsSource): The file secret settings source.
-
- Returns:
- Tuple[PydanticBaseSettingsSource, ...]: A tuple of customized settings sources.
- """
- return (
- init_settings,
- file_secret_settings,
- env_settings,
- YamlConfigSettingsSource(settings_cls),
- )
-
-
-@lru_cache
-def get_settings():
- """Retrieves the settings object.
-
- This function is backed by an LRU cache to ensure that the settings are
- only loaded once, and that the same settings object is returned on
- subsequent calls.
-
- The object returned by this function is not safe to modify, as it is shared
- between all users of this function.
-
- Returns:
- Settings: The settings object.
- """
- return Settings()
diff --git a/src/sophrosyne/core/database.py b/src/sophrosyne/core/database.py
deleted file mode 100644
index 3b2a4a0..0000000
--- a/src/sophrosyne/core/database.py
+++ /dev/null
@@ -1,161 +0,0 @@
-"""This module is responsible for creating the database and tables, and also for creating the root user."""
-
-from alembic import command, config
-from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
-from sqlmodel import SQLModel, select
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-from sophrosyne.core.config import get_settings
-from sophrosyne.core.logging import get_logger
-from sophrosyne.core.models import Profile, User
-from sophrosyne.core.security import new_token, sign
-
-engine = create_async_engine(
- get_settings().database.dsn,
- echo=get_settings().development.sqlalchemy_echo,
- future=True,
-)
-
-log = get_logger()
-
-
-def alembic_config() -> config.Config:
- """Creates an returns a configuration for alembic."""
- cfg = config.Config()
- cfg.set_main_option("script_location", "sophrosyne:migrations")
- cfg.set_main_option("sqlalchemy.url", get_settings().database.dsn)
-
- return cfg
-
-
-async def create_db_and_tables():
- """Create the database and tables."""
- cfg = alembic_config()
-
- def stamp(connection):
- cfg.attributes["connection"] = connection
- command.stamp(cfg, "head")
-
- async with engine.begin() as conn:
- await conn.run_sync(SQLModel.metadata.create_all)
- await conn.run_sync(stamp)
-
-
-async def create_default_profile():
- """Create the default profile if it does not exist."""
- async_session = async_sessionmaker(
- engine, class_=AsyncSession, expire_on_commit=False
- )
- async with async_session() as session:
- result = await session.exec(
- select(Profile).where(Profile.name == get_settings().default_profile)
- )
- p = result.first()
- if p:
- return ""
-
- profile = Profile(name=get_settings().default_profile)
- session.add(profile)
- await session.commit()
-
- return profile
-
-
-async def create_root_user() -> str:
- """Create the root user if it does not exist."""
- async_session = async_sessionmaker(
- engine, class_=AsyncSession, expire_on_commit=False
- )
- async with async_session() as session:
- result = await session.exec(
- select(User).where(User.contact == get_settings().root_contact)
- )
- u = result.first()
- if u:
- return ""
-
- token = new_token()
- if get_settings().development.static_root_token != "":
- token = get_settings().development.static_root_token
- log.warn("static root token in use")
- user = User(
- name="root",
- contact=get_settings().root_contact,
- signed_token=sign(token),
- is_active=True,
- is_admin=True,
- )
- session.add(user)
- await session.commit()
-
- return token
-
-
-async def upgrade(revision: str):
- """Run database upgrade migration using alembic.
-
- Args:
- revision (str): The ID of the revision to upgrade the database do.
- """
- cfg = alembic_config()
-
- def _upgrade(revision: str):
- def execute(connection):
- cfg.attributes["connection"] = connection
- command.upgrade(cfg, revision)
-
- return execute
-
- async with engine.begin() as conn:
- await conn.run_sync(_upgrade(revision))
-
-
-async def downgrade(revision: str):
- """Run database downgrade migration using alembic.
-
- Args:
- revision (str): The ID of the revision to downgrade the database do.
- """
- cfg = alembic_config()
-
- def _downgrade(revision: str):
- def execute(connection):
- cfg.attributes["connection"] = connection
- command.downgrade(cfg, revision)
-
- return execute
-
- async with engine.begin() as conn:
- await conn.run_sync(_downgrade(revision))
-
-
-async def history(verbose: bool):
- """Show the database migration history.
-
- Args:
- verbose (bool): Be verbose in the output.
- """
- cfg = alembic_config()
-
- def show(connection):
- cfg.attributes["connection"] = connection
- command.history(cfg, verbose=verbose, indicate_current=True)
-
- async with engine.begin() as conn:
- await conn.run_sync(show)
-
-
-async def current(verbose: bool):
- """Show the current database migration.
-
- Args:
- verbose (bool): Be verbose in the output.
- """
- cfg = alembic_config()
-
- def show(connection):
- cfg.attributes["connection"] = connection
- command.current(cfg, verbose=verbose)
-
- async with engine.begin() as conn:
- await conn.run_sync(show)
diff --git a/src/sophrosyne/core/logging.py b/src/sophrosyne/core/logging.py
deleted file mode 100644
index d97cad7..0000000
--- a/src/sophrosyne/core/logging.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""Logging provides utilities that allow the app to standardise on logging."""
-
-import logging
-import random
-import sys
-import time
-from typing import Any, Literal
-
-import structlog
-from fastapi import Request, Response
-from starlette.middleware.base import BaseHTTPMiddleware
-
-
-def get_logger() -> Any:
- """Get a logger.
-
- Primary purpose of this function is to not have other modules import
- structlog.
- """
- return structlog.get_logger()
-
-
-def initialize_logging(
- log_level: int = logging.NOTSET,
- clear_handlers: bool = True,
- event_field: str = "event",
- format: Literal["development", "production"] = "production",
-) -> None:
- """Set up logging.
-
- Will set up logging using structlog. In addition to setting up structlog,
- the root logger of the Python standard library (`logging`) will be
- reconfigured to use the same formatting as structlog.
-
- All logs will be written to standard out as JSON.
-
- If the `log_format` attribute of the `sophrosyne.core.config.Settings` class
- equals `development`, then logs will be pretty printed.
-
- By default, all handlers of the Python standard library logging package
- will be cleared, unless the `clear_handlers` argument is set to `False`.
-
- Additionally, the function will set clear the handlers and set logger to
- propagate for the following modules:
-
- - `uvicorn`
- - `uvicorn.error`
-
- This is in order to prevent them from setting their own format.
-
- Args:
- log_level (int): Logging level to use. Defaults to logging.NOTSET.
- clear_handlers (bool): Existing handlers should be cleared. Defaults to True.
- event_field (str): The name of the field that will contain the logged event. Defaults to "event".
- format (Literal["development", "production"]): Decides how the log will be formatted.
- """
- if clear_handlers:
- logging.getLogger().handlers.clear()
-
- processors: list[structlog.types.Processor] = [
- structlog.contextvars.merge_contextvars,
- structlog.stdlib.add_logger_name,
- structlog.stdlib.add_log_level,
- structlog.processors.StackInfoRenderer(),
- structlog.processors.TimeStamper(fmt="iso"),
- ]
-
- if event_field != "event":
- processors.append(structlog.processors.EventRenamer(to=event_field))
-
- log_renderer: structlog.typing.Processor
- if format == "development":
- processors.append(structlog.dev.set_exc_info)
- log_renderer = structlog.dev.ConsoleRenderer(event_key=event_field)
- else:
- processors.append(structlog.processors.format_exc_info)
- log_renderer = structlog.processors.JSONRenderer()
-
- structlog.configure(
- processors=processors
- + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
- cache_logger_on_first_use=True,
- logger_factory=structlog.stdlib.LoggerFactory(),
- )
-
- formatter = structlog.stdlib.ProcessorFormatter(
- processors=[
- structlog.stdlib.ProcessorFormatter.remove_processors_meta,
- log_renderer,
- ],
- foreign_pre_chain=processors,
- )
-
- h = logging.StreamHandler()
- h.setFormatter(formatter)
-
- r = logging.getLogger()
- r.addHandler(h)
- r.setLevel(log_level)
-
- for name in ["uvicorn", "uvicorn.error"]:
- logging.getLogger(name).handlers.clear()
- logging.getLogger(name).propagate = True
-
- def handle_exception(exc_type, exc_value, exc_traceback):
- """Log any uncaught exception.
-
- Ignores KeyboardInterrupt from Ctrl+C.
- """
- if issubclass(exc_type, KeyboardInterrupt):
- sys.__excepthook__(exc_type, exc_value, exc_traceback)
- return
-
- r.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
-
- sys.excepthook = handle_exception
-
-
-class LoggingMiddleware(BaseHTTPMiddleware):
- """Middleware to ensure requests are logged.
-
- This middleware will ensure that an access log entry is created. The access
- log will be generated with an event with the text `http request served`.
-
- The middleware will additionally create a 16byte pseudo-random request
- identifier (represented as a 32byte hex string), attach it to the loggers
- context vars and add it to the outgoing HTTP response in the form of a
- `X-Request-ID` header.
-
- Attributes:
- app: A FastAPI app.
- """
-
- def __init__(
- self,
- app,
- ):
- """Initialize the LoggingMiddleware.
-
- Args:
- app: A FastAPI app.
- """
- super().__init__(app)
-
- async def dispatch(self, request: Request, call_next):
- """Serve request with the middleware."""
- structlog.contextvars.clear_contextvars()
- request_id = "%032x" % random.randrange(16**32)
- structlog.contextvars.bind_contextvars(request_id=request_id)
-
- start_time = time.perf_counter_ns()
- response: Response = Response(status_code=418)
- try:
- response = await call_next(request)
- except Exception:
- structlog.stdlib.get_logger().exception("uncaught exception")
- raise
- finally:
- process_time = time.perf_counter_ns() - start_time
- status_code = response.status_code
- response.headers["X-Request-ID"] = request_id
- request.client
- client_port = request.client.port # type: ignore
- client_host = request.client.host # type: ignore
- http_method = request.method
- http_version = request.scope["http_version"]
- structlog.stdlib.get_logger().info(
- "http request served",
- http={
- "url": str(request.url),
- "status_code": status_code,
- "method": http_method,
- "version": http_version,
- },
- network={"client": {"ip": client_host, "port": client_port}},
- duration=process_time,
- )
-
- return response
diff --git a/src/sophrosyne/core/models.py b/src/sophrosyne/core/models.py
deleted file mode 100644
index 14d56e2..0000000
--- a/src/sophrosyne/core/models.py
+++ /dev/null
@@ -1,206 +0,0 @@
-"""SQLModel, Pydantic models and Pydantic schemas for the SOPH API.
-
-This module defines the SQLModels, Pydantic models and Pydantic schemas for the
-SOPH API service. Any model or schema that is used in the core logic of the
-application should be defined here. If a model or schema is used to define the
-request or response body of an API endpoint, it should be defined in the
-models module of the API version it belongs to.
-
-Example:
- The User model is used to define the User table in the database. The
- UserBase model is used to define the base fields of the User model.
-
-Any model or schema that is used in the core logic of the application should be
-validated against these models and schemas.
-"""
-
-from datetime import datetime
-from enum import Enum
-from typing import Union
-
-from pydantic import BaseModel, EmailStr
-from sqlmodel import (
- ARRAY,
- JSON,
- AutoString,
- Column,
- Field,
- Relationship,
- SQLModel,
- String,
-)
-
-from sophrosyne.core.config import get_settings
-
-
-class SafetyServicePayloadType(str, Enum):
- """Enum class representing the payload types for the Safety Service."""
-
- TEXT = "text"
- IMAGE = "image"
-
-
-class UserBase(SQLModel):
- """Base class for all users.
-
- Attributes:
- name (str): The name of the user.
- contact (EmailStr): The contact email of the user.
- is_active (bool): Whether the user is active or not.
- default_profile (str): The default profile of the user.
- """
-
- name: str = Field(unique=True, index=True)
- created_at: datetime = Field(default_factory=datetime.utcnow)
- contact: EmailStr = Field(sa_type=AutoString)
- is_active: bool = Field(default=True)
- default_profile: str | None = Field(
- default=get_settings().default_profile, foreign_key="profile.name"
- )
-
-
-class User(UserBase, table=True):
- """Model for the User table in the database.
-
- Attributes:
- id (int): The ID of the user.
- signed_token (str): The token of the user, signed with sophrosyne.core.security.sign.
- """
-
- id: int | None = Field(default=None, primary_key=True)
- signed_token: str = Field(index=True, unique=True)
- is_admin: bool = Field(default=False)
-
-
-class ProfileCheckAssociation(SQLModel, table=True):
- """Model for the ProfileCheckAssociation table in the database.
-
- Attributes:
- profile_id (int): The ID of the profile.
- check_id (int): The ID of the check.
- """
-
- profile_id: int | None = Field(
- default=None, foreign_key="profile.id", primary_key=True
- )
- check_id: int | None = Field(default=None, foreign_key="check.id", primary_key=True)
-
-
-class CheckBase(SQLModel):
- """Model for the Check table in the database.
-
- Attributes:
- name (str): The name of the check.
- created_at (datetime): The creation date of the check.
- upstream_services (list[str]): The list of upstream services for the check.
- config (dict[str, Union[str, int, float, bool]]): The configuration for the check.
- """
-
- name: str = Field(unique=True, index=True)
- created_at: datetime = Field(default_factory=datetime.utcnow)
- upstream_services: list[str] = Field(
- default_factory=list, sa_column=Column(ARRAY(String))
- )
- supported_types: list[SafetyServicePayloadType] = Field(
- default_factory=list, sa_column=Column(ARRAY(String))
- )
- config: dict[str, Union[str, int, float, bool]] = Field(
- default_factory=dict, sa_column=Column(JSON)
- )
-
-
-class Check(CheckBase, table=True):
- """Model for the Check table in the database.
-
- Attributes:
- id (int): The ID of the check.
- profiles (list[Profile]): The profiles that use the check.
- """
-
- id: int | None = Field(default=None, primary_key=True)
- profiles: list["Profile"] = Relationship(
- back_populates="checks",
- link_model=ProfileCheckAssociation,
- # Prevent SQLAlchemy from lazy loading the relationship. See https://stackoverflow.com/questions/74252768/missinggreenlet-greenlet-spawn-has-not-been-called
- sa_relationship_kwargs={"lazy": "selectin"},
- )
-
-
-class ProfileBase(SQLModel):
- """Model for the Profile table in the database.
-
- Attributes:
- name (str): The name of the profile.
- created_at (datetime): The creation date of the profile.
- """
-
- name: str = Field(unique=True, index=True)
- created_at: datetime = Field(default_factory=datetime.utcnow)
-
-
-class Profile(ProfileBase, table=True):
- """Represents a profile in the system.
-
- Attributes:
- id (int | None): The ID of the profile. Defaults to None.
- checks (list["Check"]): The list of checks associated with the profile.
- """
-
- id: int | None = Field(default=None, primary_key=True)
- checks: list["Check"] = Relationship(
- back_populates="profiles",
- link_model=ProfileCheckAssociation,
- # Prevent SQLAlchemy from lazy loading the relationship. See https://stackoverflow.com/questions/74252768/missinggreenlet-greenlet-spawn-has-not-been-called
- sa_relationship_kwargs={"lazy": "selectin"},
- )
-
-
-class SafetyServicePayloadText(BaseModel):
- """Represents the payload for analyzing the safety of a text.
-
- Attributes:
- text (str): The text to be analyzed for safety.
- """
-
- text: str = Field(
- title="Text",
- description="The text to be analyzed for safety.",
- min_length=1,
- max_length=1000,
- )
-
-
-class SafetyServicePayloadImage(BaseModel):
- """Represents the payload for analyzing the safety of an image.
-
- Attributes:
- image (str): The image to be analyzed for safety.
- """
-
- image: str = Field(
- title="Image",
- description="The image to be analyzed for safety.",
- min_length=1,
- max_length=1000,
- )
-
-
-SafetyServicePayload = Union[SafetyServicePayloadText, SafetyServicePayloadImage]
-
-
-class Verdict(BaseModel):
- """Represents a safety verdict.
-
- Attributes:
- verdict (bool): The safety verdict.
- checks (dict[str, bool]): The safety checks.
- """
-
- verdict: bool = Field(
- title="Verdict",
- description="The safety verdict.",
- )
- checks: dict[str, bool] = Field(
- title="Checks",
- description="The safety checks.",
- )
diff --git a/src/sophrosyne/core/safety.py b/src/sophrosyne/core/safety.py
deleted file mode 100644
index a8e11d9..0000000
--- a/src/sophrosyne/core/safety.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""Safety module for performing safety checks on profiles."""
-
-from typing import Iterable
-
-from fastapi import HTTPException
-from sqlmodel import select
-from sqlmodel.ext.asyncio.session import AsyncSession
-
-from sophrosyne.core.checks import Check
-from sophrosyne.core.models import Profile, SafetyServicePayload, Verdict
-
-
-class Safety:
- """Safety class for performing safety checks on profiles.
-
- Attributes:
- db_session (AsyncSession): The database session to use.
- """
-
- db_session: AsyncSession
-
- def __init__(self, db_session: AsyncSession) -> None:
- """Initialize the Safety class with a database session.
-
- Args:
- db_session (AsyncSession): The database session to use.
- """
- self.db_session = db_session
-
- async def predict(self, profile: str, data: SafetyServicePayload) -> Verdict:
- """Predict the safety verdict for a given profile and data.
-
- Args:
- profile (str): The name of the profile to check.
- data (SafetyServicePayload): The data to perform safety checks on.
-
- Returns:
- Verdict: The safety verdict.
- """
- result = await self.db_session.exec(
- select(Profile).where(Profile.name == profile)
- )
- db_profile = result.first()
- if not db_profile:
- raise HTTPException(status_code=404, detail="Profile not found")
- check_results: dict[str, bool] = {}
- for check in db_profile.checks:
- check_results[check.name] = Check.model_validate(check.model_dump()).run(
- data
- )
-
- # Bug / Point of Contention
- # If there are no checks associated with the profile, the verdict will
- # always be True. This may not be the desired behavior - if there are no
- # checks being performed, how can we assume it is safe? May be better to
- # return a False verdict in this case.
-
- return Verdict(verdict=_all(check_results.values()), checks=check_results)
-
-
-def _all(iterable: Iterable[object]) -> bool:
- """Custom implementation of `all`.
-
- Differs from the built-in `all` in that it returns False if the iterable is
- empty.
-
- Args:
- iterable (Iterable[object]): The iterable to check.
-
- Returns:
- bool: True if all elements are truthy, False otherwise.
- """
- count_t: int = 0
- count_f: int = 0
- for item in iterable:
- if bool(item):
- count_t += 1
- else:
- count_f += 1
- return count_t > count_f
diff --git a/src/sophrosyne/core/security.py b/src/sophrosyne/core/security.py
deleted file mode 100644
index d15ae0a..0000000
--- a/src/sophrosyne/core/security.py
+++ /dev/null
@@ -1,194 +0,0 @@
-"""Security module for the SOPH API.
-
-This module contains the security-related functions for the SOPH API service.
-It provides functions for generating tokens and signing data.
-
-On Authentication:
- This module also provides the means with which to authenticate users by
- their tokens. As we need to identify a user solely by their token, using a
- per-user salt is un-tenable as it would require us to iterate over all
- users, while hashing the key with the users salt to find the user that the
- token matches. Instead, we use a keyed HMAC signature to sign the token,
- which allows us to verify the token without needing to iterate over all
- users.
-
- Using keyed functions for authentication is a recommendation by OWASP, from
- their Password Storage Cheat Sheet: https://owasp.deteact.com/cheat/cheatsheets/Password_Storage_Cheat_Sheet.html#leverage-keyed-functions
-
- The HMAC signature is generated by hashing the token with a secret site-wide
- key, and a site-wide salt. The salt is used to prevent rainbow table
- attacks, while the secret key is used to prevent brute-force attacks. The
- salt and site-wide key is provided via the settings, and should be kept
- secret.
-"""
-
-import datetime
-import hashlib
-import hmac
-import ipaddress
-import tempfile
-from secrets import token_bytes, token_hex
-
-from cryptography import x509
-from cryptography.hazmat.backends import default_backend
-from cryptography.hazmat.primitives import hashes, serialization
-from cryptography.hazmat.primitives.asymmetric import ec
-from cryptography.x509.oid import NameOID
-
-from sophrosyne.core.config import get_settings
-
-
-def new_token() -> str:
- """Generate a new token used for authentication.
-
- Returns:
- str: The new token.
- """
- return token_hex(get_settings().security.token_length)
-
-
-def sign(data: str) -> str:
- """Sign the data using the HMAC signature.
-
- Args:
- data (str): The data to sign.
-
- Returns:
- str: The HMAC signature of the data.
- """
- settings = get_settings()
- secret_key = settings.security.site_key
- salt = settings.security.salt
- return (
- settings.security.salt.hex()
- + hmac.new(secret_key, salt + data.encode(), hashlib.sha256).hexdigest()
- )
-
-
-class TLS:
- """TLS class for the SOPH API.
-
- This class contains the logic to create a TLS context for the SOPH API service.
- """
-
- certificate: x509.Certificate
- private_key: ec.EllipticCurvePrivateKey
- private_key_password: bytes
- public_key: ec.EllipticCurvePublicKey
-
- def __init__(
- self,
- certificate_path: str | None = None,
- key_path: str | None = None,
- key_password: bytes | None = None,
- ):
- """Initializes the TLS class."""
- if key_path is not None:
- with open(key_path, "rb") as file:
- k = serialization.load_pem_private_key(
- file.read(), password=key_password, backend=default_backend()
- )
- if isinstance(k, ec.EllipticCurvePrivateKey):
- self.private_key = k
- else:
- raise ValueError("Key is not an EllipticCurvePrivateKey")
- else:
- self.private_key = ec.generate_private_key(
- ec.SECP256R1(), default_backend()
- )
- if key_password is None:
- key_password = token_bytes(128)
-
- # We do not reuse the key password, as it is not required after the key
- # is loaded. A new one is generated and stored in the class for use if
- # there is a need to write the key to persistent storage.
- self.private_key_password = key_password
- self.public_key = self.private_key.public_key()
-
- if certificate_path is not None:
- with open(certificate_path, "rb") as file:
- self.certificate = x509.load_pem_x509_certificate(
- file.read(), default_backend()
- )
- else:
- self.generate_certificate(get_settings().hostnames)
-
- def generate_certificate(self, hostnames=list[str]):
- """Generates a new certificate."""
- if self.private_key is None:
- raise ValueError("Key is required to generate a certificate.")
- cert_builder = x509.CertificateBuilder()
- cert_builder = cert_builder.issuer_name(
- x509.Name(
- [
- x509.NameAttribute(NameOID.COMMON_NAME, hostnames[0]),
- ]
- )
- )
- cert_builder = cert_builder.subject_name(
- x509.Name(
- [
- x509.NameAttribute(NameOID.COMMON_NAME, hostnames[0]),
- ]
- )
- )
- cert_builder = cert_builder.serial_number(x509.random_serial_number())
- cert_builder = cert_builder.not_valid_before(datetime.datetime.utcnow())
- cert_builder = cert_builder.not_valid_after(
- datetime.datetime.utcnow() + datetime.timedelta(days=365)
- )
- cert_builder.add_extension(
- x509.BasicConstraints(ca=False, path_length=None), critical=True
- )
- for host in hostnames:
- try:
- ipaddress.ip_address(host)
- cert_builder = cert_builder.add_extension(
- x509.SubjectAlternativeName(
- [x509.IPAddress(ipaddress.ip_address(host))]
- ),
- critical=False,
- )
- except ValueError:
- cert_builder = cert_builder.add_extension(
- x509.SubjectAlternativeName([x509.DNSName(host)]), critical=False
- )
- cert_builder = cert_builder.public_key(self.public_key)
- self.certificate = cert_builder.sign(
- private_key=self.private_key,
- algorithm=hashes.SHA256(),
- backend=default_backend(),
- )
-
- def to_path(
- self,
- input: x509.Certificate | ec.EllipticCurvePrivateKey,
- ) -> str:
- """Converts the input to a path.
-
- Args:
- input (Union[x509.Certificate, ec.EllipticCurvePrivateKey]): The input to convert.
-
- Returns:
- str: The path to the input.
- """
- with tempfile.NamedTemporaryFile(delete=False) as temp:
- if isinstance(input, x509.Certificate):
- temp.write(
- input.public_bytes(
- encoding=serialization.Encoding.PEM,
- )
- )
- elif isinstance(input, ec.EllipticCurvePrivateKey):
- temp.write(
- input.private_bytes(
- encoding=serialization.Encoding.PEM,
- format=serialization.PrivateFormat.TraditionalOpenSSL,
- encryption_algorithm=(
- serialization.BestAvailableEncryption(
- password=self.private_key_password
- )
- ),
- )
- )
- return temp.name
diff --git a/src/sophrosyne/grpc/__init__.py b/src/sophrosyne/grpc/__init__.py
deleted file mode 100644
index 8d7baee..0000000
--- a/src/sophrosyne/grpc/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""grpc related modules."""
diff --git a/src/sophrosyne/grpc/checks/__init__.py b/src/sophrosyne/grpc/checks/__init__.py
deleted file mode 100644
index 15a0a8c..0000000
--- a/src/sophrosyne/grpc/checks/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""grpc modules for checks."""
diff --git a/src/sophrosyne/grpc/checks/checks_pb2.py b/src/sophrosyne/grpc/checks/checks_pb2.py
deleted file mode 100644
index cea371e..0000000
--- a/src/sophrosyne/grpc/checks/checks_pb2.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: sophrosyne/grpc/checks/checks.proto
-# Protobuf Python Version: 4.25.1
-"""Generated protocol buffer code."""
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf.internal import builder as _builder
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#sophrosyne/grpc/checks/checks.proto\x12\tchecks.v1\"8\n\x0c\x43heckRequest\x12\x0e\n\x04text\x18\x01 \x01(\tH\x00\x12\x0f\n\x05image\x18\x02 \x01(\tH\x00\x42\x07\n\x05\x63heck\"0\n\rCheckResponse\x12\x0e\n\x06result\x18\x01 \x01(\x08\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t2L\n\x0c\x43heckService\x12<\n\x05\x43heck\x12\x17.checks.v1.CheckRequest\x1a\x18.checks.v1.CheckResponse\"\x00\x62\x06proto3')
-
-_globals = globals()
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'sophrosyne.grpc.checks.checks_pb2', _globals)
-if _descriptor._USE_C_DESCRIPTORS == False:
- DESCRIPTOR._options = None
- _globals['_CHECKREQUEST']._serialized_start=50
- _globals['_CHECKREQUEST']._serialized_end=106
- _globals['_CHECKRESPONSE']._serialized_start=108
- _globals['_CHECKRESPONSE']._serialized_end=156
- _globals['_CHECKSERVICE']._serialized_start=158
- _globals['_CHECKSERVICE']._serialized_end=234
-# @@protoc_insertion_point(module_scope)
diff --git a/src/sophrosyne/grpc/checks/checks_pb2.pyi b/src/sophrosyne/grpc/checks/checks_pb2.pyi
deleted file mode 100644
index d5b4c75..0000000
--- a/src/sophrosyne/grpc/checks/checks_pb2.pyi
+++ /dev/null
@@ -1,21 +0,0 @@
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from typing import ClassVar as _ClassVar, Optional as _Optional
-
-DESCRIPTOR: _descriptor.FileDescriptor
-
-class CheckRequest(_message.Message):
- __slots__ = ("text", "image")
- TEXT_FIELD_NUMBER: _ClassVar[int]
- IMAGE_FIELD_NUMBER: _ClassVar[int]
- text: str
- image: str
- def __init__(self, text: _Optional[str] = ..., image: _Optional[str] = ...) -> None: ...
-
-class CheckResponse(_message.Message):
- __slots__ = ("result", "details")
- RESULT_FIELD_NUMBER: _ClassVar[int]
- DETAILS_FIELD_NUMBER: _ClassVar[int]
- result: bool
- details: str
- def __init__(self, result: bool = ..., details: _Optional[str] = ...) -> None: ...
diff --git a/src/sophrosyne/grpc/checks/checks_pb2_grpc.py b/src/sophrosyne/grpc/checks/checks_pb2_grpc.py
deleted file mode 100644
index e042a58..0000000
--- a/src/sophrosyne/grpc/checks/checks_pb2_grpc.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-"""Client and server classes corresponding to protobuf-defined services."""
-import grpc
-
-from sophrosyne.grpc.checks import checks_pb2 as sophrosyne_dot_grpc_dot_checks_dot_checks__pb2
-
-
-class CheckServiceStub(object):
- """Missing associated documentation comment in .proto file."""
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.Check = channel.unary_unary(
- '/checks.v1.CheckService/Check',
- request_serializer=sophrosyne_dot_grpc_dot_checks_dot_checks__pb2.CheckRequest.SerializeToString,
- response_deserializer=sophrosyne_dot_grpc_dot_checks_dot_checks__pb2.CheckResponse.FromString,
- )
-
-
-class CheckServiceServicer(object):
- """Missing associated documentation comment in .proto file."""
-
- def Check(self, request, context):
- """Missing associated documentation comment in .proto file."""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
-
-def add_CheckServiceServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'Check': grpc.unary_unary_rpc_method_handler(
- servicer.Check,
- request_deserializer=sophrosyne_dot_grpc_dot_checks_dot_checks__pb2.CheckRequest.FromString,
- response_serializer=sophrosyne_dot_grpc_dot_checks_dot_checks__pb2.CheckResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'checks.v1.CheckService', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
-
-
- # This class is part of an EXPERIMENTAL API.
-class CheckService(object):
- """Missing associated documentation comment in .proto file."""
-
- @staticmethod
- def Check(request,
- target,
- options=(),
- channel_credentials=None,
- call_credentials=None,
- insecure=False,
- compression=None,
- wait_for_ready=None,
- timeout=None,
- metadata=None):
- return grpc.experimental.unary_unary(request, target, '/checks.v1.CheckService/Check',
- sophrosyne_dot_grpc_dot_checks_dot_checks__pb2.CheckRequest.SerializeToString,
- sophrosyne_dot_grpc_dot_checks_dot_checks__pb2.CheckResponse.FromString,
- options, channel_credentials,
- insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/sophrosyne/healthcheck/__init__.py b/src/sophrosyne/healthcheck/__init__.py
deleted file mode 100644
index 8d68ddb..0000000
--- a/src/sophrosyne/healthcheck/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""Healthcheck module."""
diff --git a/src/sophrosyne/healthcheck/models.py b/src/sophrosyne/healthcheck/models.py
deleted file mode 100644
index de814d0..0000000
--- a/src/sophrosyne/healthcheck/models.py
+++ /dev/null
@@ -1,182 +0,0 @@
-"""Healthcheck models.
-
-Models are based on the following expired IETF healthcheck draft:
-https://datatracker.ietf.org/doc/html/draft-inadarei-api-health-check
-
-Attributes:
- Status (Enum): The status of the health check.
- SubComponent (BaseModel): The subcomponent of the health check.
- Check (BaseModel): The check of the health check.
- HealthCheck (BaseModel): The health check.
-"""
-
-from enum import Enum
-from typing import Any
-
-from pydantic import (
- AwareDatetime,
- BaseModel,
- Field,
- model_serializer,
-)
-
-
-class Status(str, Enum):
- """Represents the status of a health check.
-
- Attributes:
- PASS (str): Indicates a successful health check.
- FAIL (str): Indicates a failed health check.
- WARN (str): Indicates a warning for a health check.
- """
-
- PASS = "pass"
- FAIL = "fail"
- WARN = "warn"
-
-
-class SubComponent(BaseModel):
- """Represents a subcomponent of a health check.
-
- Attributes:
- component_id (str | None): The ID of the subcomponent.
- component_type (str | None): The type of the subcomponent.
- observed_value (str | int | float | list | dict | bool | None): The observed value of the subcomponent.
- observed_unit (str | None): The unit of measurement for the observed value.
- status (Status | None): The status of the subcomponent.
- affected_endpoints (list[str] | None): The endpoints affected by the subcomponent.
- time (AwareDatetime | None): The timestamp of when the subcomponent was observed.
- output (str | None): The output of the subcomponent.
- links (dict[str, str] | None): Additional links related to the subcomponent.
- additional_keys (dict[str, str] | None): Additional custom keys and values for the subcomponent.
- """
-
- component_id: str | None = Field(default=None, serialization_alias="componentId")
- component_type: str | None = Field(
- default=None, serialization_alias="componentType"
- )
- observed_value: str | int | float | list | dict | bool | None = Field(
- default=None, serialization_alias="observedValue"
- )
- observed_unit: str | None = Field(default=None, serialization_alias="observedUnit")
- status: Status | None = Field(default=None)
- affected_endpoints: list[str] | None = Field(
- default=None, serialization_alias="affectedEndpoints"
- )
- time: AwareDatetime | None = Field(default=None)
- output: str | None = Field(default=None)
- links: dict[str, str] | None = Field(default=None)
-
- additional_keys: dict[str, str] | None = Field(
- default=None, serialization_alias="additionalKeys"
- )
-
- @model_serializer
- def ser_model(self) -> dict[str, Any]:
- """Serializes the model object into a dictionary.
-
- Returns:
- dict[str, Any]: The serialized model as a dictionary.
- """
- out: dict[str, Any] = {}
- if self.component_id is not None:
- out["componentId"] = self.component_id
- if self.component_type is not None:
- out["componentType"] = self.component_type
- if self.observed_value is not None:
- out["observedValue"] = self.observed_value
- if self.observed_unit is not None:
- out["observedUnit"] = self.observed_unit
- if self.status is not None:
- out["status"] = self.status
- if self.affected_endpoints is not None and self.status != Status.PASS:
- out["affectedEndpoints"] = self.affected_endpoints
- if self.time is not None:
- out["time"] = self.time
- if self.output is not None and self.status != Status.PASS:
- out["output"] = self.output
- if self.links is not None:
- out["links"] = self.links
-
- if self.additional_keys is not None:
- for key, value in self.additional_keys.items():
- if key not in out: # Do not overwrite existing keys
- out[key] = value
- return out
-
-
-class Check(BaseModel):
- """Represents a health check.
-
- Attributes:
- sub_components (dict[str, list[SubComponent]] | None): A dictionary mapping sub-component names to lists of SubComponent objects.
- """
-
- sub_components: dict[str, list[SubComponent]] | None = Field(default=None)
-
- @model_serializer
- def ser_model(self) -> dict[str, Any] | None:
- """Serialize the Check object to a dictionary.
-
- Returns:
- dict[str, Any] | None: The serialized Check object, or None if sub_components is None.
- """
- out: dict[str, Any] = {}
- if self.sub_components is None:
- return None
- for key, value in self.sub_components.items():
- out[key] = [v.ser_model() for v in value]
-
- return out
-
-
-class HealthCheck(BaseModel):
- """Represents a health check object.
-
- Attributes:
- status (Status): The status of the health check.
- version (str | None): The version of the health check (default: None).
- release_ID (str | None): The release ID of the health check (default: None).
- notes (str | None): Additional notes for the health check (default: None).
- output (str | None): The output of the health check (default: None).
- checks (Check | None): The checks performed for the health check (default: None).
- links (dict[str, str] | None): Links related to the health check (default: None).
- service_id (str | None): The service ID of the health check (default: None).
- description (str | None): The description of the health check (default: None).
- """
-
- status: Status = Field()
- version: str | None = Field(default=None)
- release_ID: str | None = Field(default=None, serialization_alias="releaseId")
- notes: str | None = Field(default=None)
- output: str | None = Field(default=None)
- checks: Check | None = Field(default=None)
- links: dict[str, str] | None = Field(default=None)
- service_id: str | None = Field(default=None, serialization_alias="serviceId")
- description: str | None = Field(default=None)
-
- @model_serializer
- def ser_model(self) -> dict[str, Any]:
- """Serializes the model into a dictionary.
-
- Returns:
- dict[str, Any]: The serialized model.
- """
- out: dict[str, Any] = {"status": self.status}
- if self.version is not None:
- out["version"] = self.version
- if self.release_ID is not None:
- out["releaseId"] = self.release_ID
- if self.notes is not None:
- out["notes"] = self.notes
- if self.output is not None and self.status != Status.PASS:
- out["output"] = self.output
- if self.checks is not None:
- out["checks"] = self.checks.ser_model()
- if self.links is not None:
- out["links"] = self.links
- if self.service_id is not None:
- out["serviceId"] = self.service_id
- if self.description is not None:
- out["description"] = self.description
- return out
diff --git a/src/sophrosyne/main.py b/src/sophrosyne/main.py
deleted file mode 100644
index 6e38799..0000000
--- a/src/sophrosyne/main.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Main module for the SOPH API.
-
-This module is the entry point for the SOPH API service. It creates the FastAPI
-application and starts the uvicorn server.
-
-Example:
- To start the SOPH API service, run the following command:
- $ python -m sophrosyne.main run
-
- It's also to run use a filepath to the main module:
- $ python src/sophrosyne/main.py run
-"""
-
-#
-#
-# Do NOT import any modules from sophrosyne, except the commands module, before
-# you've read the docstring for the sophrosyne.commands._necessary_evil
-# function.
-#
-#
-
-import sys
-
-# Remove local directory from sys.path to avoid importing local modules by mistake
-# instead of the installed ones. Currently, if this is not in place, the local
-# `grpc` module will be imported instead of the installed `grpc` module.
-sys.path = sys.path[1:]
-
-from sophrosyne.commands import setup_and_run_commands
-
-if __name__ == "__main__":
- setup_and_run_commands()
diff --git a/src/sophrosyne/migrations/env.py b/src/sophrosyne/migrations/env.py
deleted file mode 100644
index 47a6d42..0000000
--- a/src/sophrosyne/migrations/env.py
+++ /dev/null
@@ -1,90 +0,0 @@
-"""environment setup for alembic."""
-
-import asyncio
-from logging.config import fileConfig
-
-from alembic import context
-from sqlalchemy import pool
-from sqlalchemy.engine import Connection
-from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
-from sqlmodel import SQLModel
-
-# this is the Alembic Config object, which provides
-# access to the values within the .ini file in use.
-config = context.config
-
-# Interpret the config file for Python logging.
-# This line sets up loggers basically.
-if config.config_file_name is not None:
- fileConfig(config.config_file_name)
-
-# add your model's MetaData object here
-# for 'autogenerate' support
-# from myapp import mymodel
-# target_metadata = mymodel.Base.metadata
-target_metadata = SQLModel.metadata
-
-# other values from the config, defined by the needs of env.py,
-# can be acquired:
-# my_important_option = config.get_main_option("my_important_option")
-# ... etc.
-
-
-def run_migrations_offline() -> None:
- """Run migrations in 'offline' mode.
-
- This configures the context with just a URL
- and not an Engine, though an Engine is acceptable
- here as well. By skipping the Engine creation
- we don't even need a DBAPI to be available.
-
- Calls to context.execute() here emit the given string to the
- script output.
- """
- url = config.get_main_option("sqlalchemy.url")
- context.configure(
- url=url,
- target_metadata=target_metadata,
- literal_binds=True,
- dialect_opts={"paramstyle": "named"},
- )
-
- with context.begin_transaction():
- context.run_migrations()
-
-
-def run_migrations_online() -> None:
- """Run migrations in 'online' mode."""
- connectable = context.config.attributes.get("connection", None)
- if connectable is None:
- connectable = async_engine_from_config(
- config.get_section(config.config_ini_section, {}),
- prefix="sqlalchemy.",
- poolclass=pool.NullPool,
- )
-
- if isinstance(connectable, AsyncEngine):
- asyncio.run(run_async_migrations(connectable))
- else:
- do_run_migrations(connectable)
-
-
-def do_run_migrations(connection: Connection) -> None:
- """Run migrations synchronously."""
- context.configure(connection=connection, target_metadata=target_metadata)
-
- with context.begin_transaction():
- context.run_migrations()
-
-
-async def run_async_migrations(connectable) -> None:
- """Run migrations asynchronously."""
- async with connectable.connect() as connection:
- await connection.run_sync(do_run_migrations)
- await connectable.dispose()
-
-
-if context.is_offline_mode():
- run_migrations_offline()
-else:
- run_migrations_online()
diff --git a/src/sophrosyne/migrations/script.py.mako b/src/sophrosyne/migrations/script.py.mako
deleted file mode 100644
index 6ce3351..0000000
--- a/src/sophrosyne/migrations/script.py.mako
+++ /dev/null
@@ -1,27 +0,0 @@
-"""${message}
-
-Revision ID: ${up_revision}
-Revises: ${down_revision | comma,n}
-Create Date: ${create_date}
-
-"""
-from typing import Sequence, Union
-
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-${imports if imports else ""}
-
-# revision identifiers, used by Alembic.
-revision: str = ${repr(up_revision)}
-down_revision: Union[str, None] = ${repr(down_revision)}
-branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
-depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
-
-
-def upgrade() -> None:
- ${upgrades if upgrades else "pass"}
-
-
-def downgrade() -> None:
- ${downgrades if downgrades else "pass"}
diff --git a/src/sophrosyne/migrations/versions/089f4a23cd3c_init.py b/src/sophrosyne/migrations/versions/089f4a23cd3c_init.py
deleted file mode 100644
index cdd115c..0000000
--- a/src/sophrosyne/migrations/versions/089f4a23cd3c_init.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""empty message
-
-Revision ID: 089f4a23cd3c
-Revises:
-Create Date: 2024-04-05 23:51:41.954611
-
-"""
-from typing import Sequence, Union
-
-import sqlalchemy as sa
-import sqlmodel
-from alembic import op
-
-# revision identifiers, used by Alembic.
-revision: str = '089f4a23cd3c'
-down_revision: Union[str, None] = None
-branch_labels: Union[str, Sequence[str], None] = None
-depends_on: Union[str, Sequence[str], None] = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.create_table('check',
- sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column('created_at', sa.DateTime(), nullable=False),
- sa.Column('id', sa.Integer(), nullable=False),
- sa.PrimaryKeyConstraint('id')
- )
- op.create_index(op.f('ix_check_name'), 'check', ['name'], unique=True)
- op.create_table('profile',
- sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column('created_at', sa.DateTime(), nullable=False),
- sa.Column('id', sa.Integer(), nullable=False),
- sa.PrimaryKeyConstraint('id')
- )
- op.create_index(op.f('ix_profile_name'), 'profile', ['name'], unique=True)
- op.create_table('profilecheckassociation',
- sa.Column('profile_id', sa.Integer(), nullable=False),
- sa.Column('check_id', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['check_id'], ['check.id'], ),
- sa.ForeignKeyConstraint(['profile_id'], ['profile.id'], ),
- sa.PrimaryKeyConstraint('profile_id', 'check_id')
- )
- op.create_table('user',
- sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column('created_at', sa.DateTime(), nullable=False),
- sa.Column('contact', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column('is_active', sa.Boolean(), nullable=False),
- sa.Column('default_profile', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('token', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.ForeignKeyConstraint(['default_profile'], ['profile.name'], ),
- sa.PrimaryKeyConstraint('id')
- )
- op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=True)
- op.create_index(op.f('ix_user_token'), 'user', ['token'], unique=True)
- # ### end Alembic commands ###
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_index(op.f('ix_user_token'), table_name='user')
- op.drop_index(op.f('ix_user_name'), table_name='user')
- op.drop_table('user')
- op.drop_table('profilecheckassociation')
- op.drop_index(op.f('ix_profile_name'), table_name='profile')
- op.drop_table('profile')
- op.drop_index(op.f('ix_check_name'), table_name='check')
- op.drop_table('check')
- # ### end Alembic commands ###
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/integration/auth01/bruno.json b/tests/integration/auth01/bruno.json
deleted file mode 100644
index d1d986c..0000000
--- a/tests/integration/auth01/bruno.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "version": "1",
- "name": "integration_auth01",
- "type": "collection",
- "ignore": []
-}
diff --git a/tests/integration/auth01/collection.bru b/tests/integration/auth01/collection.bru
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/integration/auth01/config.yaml b/tests/integration/auth01/config.yaml
deleted file mode 100644
index 3f169ba..0000000
--- a/tests/integration/auth01/config.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-security:
- site_key: 'lecGtZB0czpTHnCAKAbK3biwvDb+uFUZZ6yQL4CAb3lC8RpokUfAr0cBp8CNzYXLHOl+8paSnJDifq4F7Rx70A=='
- salt: 'BtNgvcpdVBxV7OQ9hIm4XJGOru0k7GonkIJKBcjQoNY='
- key_path: 'build/server.key'
- certificate_path: 'build/server.crt'
- outgoing_tls_ca_path: 'build/server.crt'
-database:
- host: 'db'
diff --git a/tests/integration/auth01/docker-compose.yml b/tests/integration/auth01/docker-compose.yml
deleted file mode 100644
index 889a677..0000000
--- a/tests/integration/auth01/docker-compose.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-services:
-
- tester:
- image: ghcr.io/madsrc/bru:latest
- depends_on:
- api:
- condition: service_healthy
- command: run --env integration --insecure --env-var root_token="${ROOT_TOKEN}" ./tests -r
- volumes:
- - ./tests:/app/tests:ro
- - ./bruno.json:/app/bruno.json:ro
- - ./environments:/app/environments:ro
- - ./collection.bru:/app/collection.bru:ro
-
- api:
- image: sophrosyne:${VERSION}
- ports:
- - "8000:8000"
- environment:
- - SOPH_DEVELOPMENT__STATIC_ROOT_TOKEN=${ROOT_TOKEN}
- - SOPH__CONFIG_YAML_FILE=/app/integration.yaml
- depends_on:
- db:
- condition: service_healthy
- volumes:
- - ./config.yaml:/app/integration.yaml:ro
- - ../../../build:/app/build:ro
- healthcheck:
- test: ["CMD", "python", "-m", "sophrosyne.main", "healthcheck"]
- interval: 5s
- timeout: 5s
- retries: 5
-
- db:
- image: postgres
- restart: always
- # set shared memory limit when using docker-compose
- shm_size: 128mb
- ports:
- - "5432:5432"
- environment:
- POSTGRES_PASSWORD: postgres
- healthcheck:
- test: ["CMD-SHELL", "pg_isready -U postgres"]
- interval: 5s
- timeout: 5s
- retries: 5
diff --git a/tests/integration/auth01/environments/integration.bru b/tests/integration/auth01/environments/integration.bru
deleted file mode 100644
index 32811a7..0000000
--- a/tests/integration/auth01/environments/integration.bru
+++ /dev/null
@@ -1,8 +0,0 @@
-vars {
- scheme: https://
- host: api
- port: 8000
-}
-vars:secret [
- root_token
-]
diff --git a/tests/integration/auth01/tests/authenticated-healthcheck.bru b/tests/integration/auth01/tests/authenticated-healthcheck.bru
deleted file mode 100644
index f154a5d..0000000
--- a/tests/integration/auth01/tests/authenticated-healthcheck.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: authenticated-healthcheck
- type: http
- seq: 2
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/health
- body: json
- auth: bearer
-}
-
-headers {
- Content-Type: application/json
-}
-
-auth:bearer {
- token: {{root_token}}
-}
-
-assert {
- res.status: eq 200
- res.body.status: pass
- res.body.version: isDefined
- res.body.checks: isDefined
-}
diff --git a/tests/integration/auth01/tests/bad-authentication-healthcheck.bru b/tests/integration/auth01/tests/bad-authentication-healthcheck.bru
deleted file mode 100644
index 285f4eb..0000000
--- a/tests/integration/auth01/tests/bad-authentication-healthcheck.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: authenticated-healthcheck
- type: http
- seq: 3
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/health
- body: json
- auth: bearer
-}
-
-headers {
- Content-Type: application/json
-}
-
-auth:bearer {
- token: badToken
-}
-
-assert {
- res.status: eq 200
- res.body.status: pass
- res.body.version: isUndefined
- res.body.checks: isUndefined
-}
diff --git a/tests/integration/auth01/tests/unauthenticated-healthcheck.bru b/tests/integration/auth01/tests/unauthenticated-healthcheck.bru
deleted file mode 100644
index eb58cf0..0000000
--- a/tests/integration/auth01/tests/unauthenticated-healthcheck.bru
+++ /dev/null
@@ -1,22 +0,0 @@
-meta {
- name: unauthenticated-healthcheck
- type: http
- seq: 1
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/health
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 200
- res.body.status: pass
- res.body.version: isUndefined
- res.body.checks: isUndefined
-}
diff --git a/tests/integration/auth_required/bruno.json b/tests/integration/auth_required/bruno.json
deleted file mode 100644
index dc78460..0000000
--- a/tests/integration/auth_required/bruno.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "version": "1",
- "name": "integration_auth_required",
- "type": "collection",
- "ignore": []
-}
diff --git a/tests/integration/auth_required/collection.bru b/tests/integration/auth_required/collection.bru
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/integration/auth_required/config.yaml b/tests/integration/auth_required/config.yaml
deleted file mode 100644
index 3f169ba..0000000
--- a/tests/integration/auth_required/config.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-security:
- site_key: 'lecGtZB0czpTHnCAKAbK3biwvDb+uFUZZ6yQL4CAb3lC8RpokUfAr0cBp8CNzYXLHOl+8paSnJDifq4F7Rx70A=='
- salt: 'BtNgvcpdVBxV7OQ9hIm4XJGOru0k7GonkIJKBcjQoNY='
- key_path: 'build/server.key'
- certificate_path: 'build/server.crt'
- outgoing_tls_ca_path: 'build/server.crt'
-database:
- host: 'db'
diff --git a/tests/integration/auth_required/docker-compose.yml b/tests/integration/auth_required/docker-compose.yml
deleted file mode 100644
index 889a677..0000000
--- a/tests/integration/auth_required/docker-compose.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-services:
-
- tester:
- image: ghcr.io/madsrc/bru:latest
- depends_on:
- api:
- condition: service_healthy
- command: run --env integration --insecure --env-var root_token="${ROOT_TOKEN}" ./tests -r
- volumes:
- - ./tests:/app/tests:ro
- - ./bruno.json:/app/bruno.json:ro
- - ./environments:/app/environments:ro
- - ./collection.bru:/app/collection.bru:ro
-
- api:
- image: sophrosyne:${VERSION}
- ports:
- - "8000:8000"
- environment:
- - SOPH_DEVELOPMENT__STATIC_ROOT_TOKEN=${ROOT_TOKEN}
- - SOPH__CONFIG_YAML_FILE=/app/integration.yaml
- depends_on:
- db:
- condition: service_healthy
- volumes:
- - ./config.yaml:/app/integration.yaml:ro
- - ../../../build:/app/build:ro
- healthcheck:
- test: ["CMD", "python", "-m", "sophrosyne.main", "healthcheck"]
- interval: 5s
- timeout: 5s
- retries: 5
-
- db:
- image: postgres
- restart: always
- # set shared memory limit when using docker-compose
- shm_size: 128mb
- ports:
- - "5432:5432"
- environment:
- POSTGRES_PASSWORD: postgres
- healthcheck:
- test: ["CMD-SHELL", "pg_isready -U postgres"]
- interval: 5s
- timeout: 5s
- retries: 5
diff --git a/tests/integration/auth_required/environments/integration.bru b/tests/integration/auth_required/environments/integration.bru
deleted file mode 100644
index 32811a7..0000000
--- a/tests/integration/auth_required/environments/integration.bru
+++ /dev/null
@@ -1,8 +0,0 @@
-vars {
- scheme: https://
- host: api
- port: 8000
-}
-vars:secret [
- root_token
-]
diff --git a/tests/integration/auth_required/tests/checks/create-check.bru b/tests/integration/auth_required/tests/checks/create-check.bru
deleted file mode 100644
index 445f755..0000000
--- a/tests/integration/auth_required/tests/checks/create-check.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: create-check
- type: http
- seq: 1
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/create-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/checks/delete-check.bru b/tests/integration/auth_required/tests/checks/delete-check.bru
deleted file mode 100644
index 40193ea..0000000
--- a/tests/integration/auth_required/tests/checks/delete-check.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: delete-check
- type: http
- seq: 5
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/delete-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/checks/list-check.bru b/tests/integration/auth_required/tests/checks/list-check.bru
deleted file mode 100644
index 95d1d71..0000000
--- a/tests/integration/auth_required/tests/checks/list-check.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: list-check
- type: http
- seq: 3
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/list-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/checks/list-checks.bru b/tests/integration/auth_required/tests/checks/list-checks.bru
deleted file mode 100644
index 4cf0779..0000000
--- a/tests/integration/auth_required/tests/checks/list-checks.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: list-checks
- type: http
- seq: 2
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/list-checks
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/checks/update-check.bru b/tests/integration/auth_required/tests/checks/update-check.bru
deleted file mode 100644
index 8edc42d..0000000
--- a/tests/integration/auth_required/tests/checks/update-check.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: update-check
- type: http
- seq: 4
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/update-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/profiles/create-profile.bru b/tests/integration/auth_required/tests/profiles/create-profile.bru
deleted file mode 100644
index 25c3cef..0000000
--- a/tests/integration/auth_required/tests/profiles/create-profile.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: create-profile
- type: http
- seq: 1
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/create-profile
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/profiles/delete-profile.bru b/tests/integration/auth_required/tests/profiles/delete-profile.bru
deleted file mode 100644
index 3a7663d..0000000
--- a/tests/integration/auth_required/tests/profiles/delete-profile.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: delete-profile
- type: http
- seq: 5
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/delete-profile
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/profiles/list-profile.bru b/tests/integration/auth_required/tests/profiles/list-profile.bru
deleted file mode 100644
index 8bbda38..0000000
--- a/tests/integration/auth_required/tests/profiles/list-profile.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: list-profile
- type: http
- seq: 3
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/list-profile
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/profiles/list-profiles.bru b/tests/integration/auth_required/tests/profiles/list-profiles.bru
deleted file mode 100644
index 4d4296e..0000000
--- a/tests/integration/auth_required/tests/profiles/list-profiles.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: list-profiles
- type: http
- seq: 2
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/list-profiles
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/profiles/update-profile.bru b/tests/integration/auth_required/tests/profiles/update-profile.bru
deleted file mode 100644
index a43a8d2..0000000
--- a/tests/integration/auth_required/tests/profiles/update-profile.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: update-profile
- type: http
- seq: 4
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/update-profile
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/safety/scan.bru b/tests/integration/auth_required/tests/safety/scan.bru
deleted file mode 100644
index cc4bdad..0000000
--- a/tests/integration/auth_required/tests/safety/scan.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: scan
- type: http
- seq: 1
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/safety/scan
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/users/create-user.bru b/tests/integration/auth_required/tests/users/create-user.bru
deleted file mode 100644
index 4ea4388..0000000
--- a/tests/integration/auth_required/tests/users/create-user.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: create-user
- type: http
- seq: 1
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/create-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/users/delete-user.bru b/tests/integration/auth_required/tests/users/delete-user.bru
deleted file mode 100644
index d2372b5..0000000
--- a/tests/integration/auth_required/tests/users/delete-user.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: delete-user
- type: http
- seq: 5
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/users/delete-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/users/list-user.bru b/tests/integration/auth_required/tests/users/list-user.bru
deleted file mode 100644
index 5e8f5fa..0000000
--- a/tests/integration/auth_required/tests/users/list-user.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: list-user
- type: http
- seq: 3
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/list-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/users/list-users.bru b/tests/integration/auth_required/tests/users/list-users.bru
deleted file mode 100644
index 4e6fe34..0000000
--- a/tests/integration/auth_required/tests/users/list-users.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: list-users
- type: http
- seq: 2
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/v1/users/list-users
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/users/rotate-token.bru b/tests/integration/auth_required/tests/users/rotate-token.bru
deleted file mode 100644
index b9df3d2..0000000
--- a/tests/integration/auth_required/tests/users/rotate-token.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: rotate-token
- type: http
- seq: 6
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/rotate-token
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/auth_required/tests/users/update-users.bru b/tests/integration/auth_required/tests/users/update-users.bru
deleted file mode 100644
index e6e728c..0000000
--- a/tests/integration/auth_required/tests/users/update-users.bru
+++ /dev/null
@@ -1,21 +0,0 @@
-meta {
- name: update-user
- type: http
- seq: 4
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/users/update-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 403
- res.body: isDefined
- res.body.detail: Not authenticated
-}
diff --git a/tests/integration/doc.go b/tests/integration/doc.go
new file mode 100644
index 0000000..cac324b
--- /dev/null
+++ b/tests/integration/doc.go
@@ -0,0 +1,45 @@
+// Sophrosyne
+// Copyright (C) 2024 Mads R. Havmand
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+// Package integration contains integration tests for Sophrosyne. The integration tests is done in the style of
+// black-box testing and thus doesn't try to test private/internal functionality that doesn't affect the public
+// footprint of Sophrosyne.
+//
+// Our integration tests relies heavily on [github.com/testcontainers/testcontainers-go] in order to spin up a
+// realistic, yet ephemeral and reproducible test environment. There have previously been issues with running
+// testcontainers-go with Podman and Colima, so for the time being, using Docker is preferred.
+//
+// The tests take the form of an outside observer / client, and as such the actual Sophrosyne application will be
+// started as a container and the tests will interact with this container, usually via HTTP. In every test, the first
+// order of business is to run the [setupEnv] function, as this will bootstrap everything. It is not recommended to
+// spin up a new environment for every test, setting up the environment and taking it down again takes 5-10 seconds.
+// Only create a brand-new environment if absolutely necessary.
+//
+// [setupEnv] returns a [testEnv] struct that contains everything necessary to talk to the integration test environment.
+//
+// The test environment will attempt to unmarshall every log from Sophrosyne as JSON, and if this isn't possible, the
+// running test will fail.
+//
+// Before running the integration tests, the code has to be build first. This does not happen as part of running the
+// test. As it is running in Docker, even if running on MacOS, you will need to ensure the binary is build for Linux.
+// On a MacOS M-series machine, the software can be build and a docker image created and loaded by running:
+//
+// mise run build:dist --goos=linux --goarch=arm64
+// mise run build:docker
+// cat build/sophrosyne.tar | docker load
+//
+// This also applies if making changes to the code, and you want to test these changes.
+package integration
diff --git a/tests/integration/healthy_instance/bruno.json b/tests/integration/healthy_instance/bruno.json
deleted file mode 100644
index ad1942e..0000000
--- a/tests/integration/healthy_instance/bruno.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "version": "1",
- "name": "integration_healthy_instance",
- "type": "collection",
- "ignore": []
-}
diff --git a/tests/integration/healthy_instance/collection.bru b/tests/integration/healthy_instance/collection.bru
deleted file mode 100644
index 6181bdb..0000000
--- a/tests/integration/healthy_instance/collection.bru
+++ /dev/null
@@ -1,7 +0,0 @@
-auth {
- mode: bearer
-}
-
-auth:bearer {
- token: {{root_token}}
-}
diff --git a/tests/integration/healthy_instance/config.yaml b/tests/integration/healthy_instance/config.yaml
deleted file mode 100644
index 3f169ba..0000000
--- a/tests/integration/healthy_instance/config.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-security:
- site_key: 'lecGtZB0czpTHnCAKAbK3biwvDb+uFUZZ6yQL4CAb3lC8RpokUfAr0cBp8CNzYXLHOl+8paSnJDifq4F7Rx70A=='
- salt: 'BtNgvcpdVBxV7OQ9hIm4XJGOru0k7GonkIJKBcjQoNY='
- key_path: 'build/server.key'
- certificate_path: 'build/server.crt'
- outgoing_tls_ca_path: 'build/server.crt'
-database:
- host: 'db'
diff --git a/tests/integration/healthy_instance/docker-compose.yml b/tests/integration/healthy_instance/docker-compose.yml
deleted file mode 100644
index 889a677..0000000
--- a/tests/integration/healthy_instance/docker-compose.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-services:
-
- tester:
- image: ghcr.io/madsrc/bru:latest
- depends_on:
- api:
- condition: service_healthy
- command: run --env integration --insecure --env-var root_token="${ROOT_TOKEN}" ./tests -r
- volumes:
- - ./tests:/app/tests:ro
- - ./bruno.json:/app/bruno.json:ro
- - ./environments:/app/environments:ro
- - ./collection.bru:/app/collection.bru:ro
-
- api:
- image: sophrosyne:${VERSION}
- ports:
- - "8000:8000"
- environment:
- - SOPH_DEVELOPMENT__STATIC_ROOT_TOKEN=${ROOT_TOKEN}
- - SOPH__CONFIG_YAML_FILE=/app/integration.yaml
- depends_on:
- db:
- condition: service_healthy
- volumes:
- - ./config.yaml:/app/integration.yaml:ro
- - ../../../build:/app/build:ro
- healthcheck:
- test: ["CMD", "python", "-m", "sophrosyne.main", "healthcheck"]
- interval: 5s
- timeout: 5s
- retries: 5
-
- db:
- image: postgres
- restart: always
- # set shared memory limit when using docker-compose
- shm_size: 128mb
- ports:
- - "5432:5432"
- environment:
- POSTGRES_PASSWORD: postgres
- healthcheck:
- test: ["CMD-SHELL", "pg_isready -U postgres"]
- interval: 5s
- timeout: 5s
- retries: 5
diff --git a/tests/integration/healthy_instance/environments/integration.bru b/tests/integration/healthy_instance/environments/integration.bru
deleted file mode 100644
index 84adbd8..0000000
--- a/tests/integration/healthy_instance/environments/integration.bru
+++ /dev/null
@@ -1,8 +0,0 @@
-vars {
- scheme: https://
- host: api
- port: 8000
-}
-vars:secret [
- bearer_token
-]
diff --git a/tests/integration/healthy_instance/tests/inactiveUser01/create-user.bru b/tests/integration/healthy_instance/tests/inactiveUser01/create-user.bru
deleted file mode 100644
index af11ee6..0000000
--- a/tests/integration/healthy_instance/tests/inactiveUser01/create-user.bru
+++ /dev/null
@@ -1,43 +0,0 @@
-meta {
- name: create-user
- type: http
- seq: 1
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/create-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"inactiveUser01",
- "contact":"test@email.com",
- "is_admin": true
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq inactiveUser01
- res.body.created_at: isDefined
- res.body.contact: eq test@email.com
- res.body.is_active: eq true
- res.body.default_profile: eq default
- res.body.token: isDefined
-}
-
-script:post-response {
- bru.setEnvVar("user_token", res.body.token);
-}
-
-tests {
- test("should use bearer token", function() {
- expect(req.getHeader("Authorization")).to.equal("Bearer "+bru.getEnvVar("root_token"));
- })
-}
diff --git a/tests/integration/healthy_instance/tests/inactiveUser01/delete-user.bru b/tests/integration/healthy_instance/tests/inactiveUser01/delete-user.bru
deleted file mode 100644
index 968074b..0000000
--- a/tests/integration/healthy_instance/tests/inactiveUser01/delete-user.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: delete-user
- type: http
- seq: 4
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/users/delete-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"inactiveUser01"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.ok: eq true
-}
diff --git a/tests/integration/healthy_instance/tests/inactiveUser01/list-users.bru b/tests/integration/healthy_instance/tests/inactiveUser01/list-users.bru
deleted file mode 100644
index 7ba8c0d..0000000
--- a/tests/integration/healthy_instance/tests/inactiveUser01/list-users.bru
+++ /dev/null
@@ -1,30 +0,0 @@
-meta {
- name: list-users
- type: http
- seq: 3
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/v1/users/list-users
- body: json
- auth: bearer
-}
-
-headers {
- Content-Type: application/json
-}
-
-auth:bearer {
- token: {{user_token}}
-}
-
-assert {
- res.status: eq 403
- res.body.detail: eq Not authenticated
-}
-
-tests {
- test("should have used token from previous call", function() {
- expect(req.getHeader("Authorization")).to.equal("Bearer "+bru.getEnvVar("user_token"));
- })
-}
diff --git a/tests/integration/healthy_instance/tests/inactiveUser01/update-user.bru b/tests/integration/healthy_instance/tests/inactiveUser01/update-user.bru
deleted file mode 100644
index de396af..0000000
--- a/tests/integration/healthy_instance/tests/inactiveUser01/update-user.bru
+++ /dev/null
@@ -1,41 +0,0 @@
-meta {
- name: update-user
- type: http
- seq: 2
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/users/update-user
- body: json
- auth: bearer
-}
-
-headers {
- Content-Type: application/json
-}
-
-auth:bearer {
- token: {{user_token}}
-}
-
-body:json {
- {
- "name":"inactiveUser01",
- "is_active": false
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq inactiveUser01
- res.body.created_at: isDefined
- res.body.contact: eq test@email.com
- res.body.is_active: eq false
- res.body.default_profile: eq default
-}
-
-tests {
- test("should have used token from previous call", function() {
- expect(req.getHeader("Authorization")).to.equal("Bearer "+bru.getEnvVar("user_token"));
- })
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/create-check.bru b/tests/integration/healthy_instance/tests/scanProcedure01/create-check.bru
deleted file mode 100644
index 4d3a824..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/create-check.bru
+++ /dev/null
@@ -1,39 +0,0 @@
-meta {
- name: create-check
- type: http
- seq: 1
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/create-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"local:dummy:scanProcedure01",
- "upstream_services": ["127.0.0.1:50051"],
- "supported_types": ["text"],
- "config": {
- "result": true
- }
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq local:dummy:scanProcedure01
- res.body.upstream_services: length 1
- res.body.upstream_services[0]: eq 127.0.0.1:50051
- res.body.config: isDefined
- res.body.config.result: eq 1
- res.body.profiles: isDefined
- res.body.profiles: length 0
- res.body.supported_types: length 1
- res.body.supported_types[0]: eq text
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/create-profile.bru b/tests/integration/healthy_instance/tests/scanProcedure01/create-profile.bru
deleted file mode 100644
index 49b3d01..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/create-profile.bru
+++ /dev/null
@@ -1,30 +0,0 @@
-meta {
- name: create-profile
- type: http
- seq: 2
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/create-profile
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"scanProcedure01",
- "checks":["local:dummy:scanProcedure01"]
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq scanProcedure01
- res.body.checks: length 1
- res.body.checks[0]: eq local:dummy:scanProcedure01
- res.body.created_at: isDefined
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/delete-check.bru b/tests/integration/healthy_instance/tests/scanProcedure01/delete-check.bru
deleted file mode 100644
index ca3b03f..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/delete-check.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: delete-check
- type: http
- seq: 7
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/delete-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"local:dummy:scanProcedure01"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.ok: eq true
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/delete-profile.bru b/tests/integration/healthy_instance/tests/scanProcedure01/delete-profile.bru
deleted file mode 100644
index b6ee040..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/delete-profile.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: delete-profile
- type: http
- seq: 8
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/profiles/delete-profile
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"scanProcedure01"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.ok: eq true
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/list-check local:dummy:scanProcedure01.bru b/tests/integration/healthy_instance/tests/scanProcedure01/list-check local:dummy:scanProcedure01.bru
deleted file mode 100644
index 314d10d..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/list-check local:dummy:scanProcedure01.bru
+++ /dev/null
@@ -1,35 +0,0 @@
-meta {
- name: list-check local:dummy:scanProcedure01
- type: http
- seq: 3
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/checks/list-check
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"local:dummy:scanProcedure01"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq local:dummy:scanProcedure01
- res.body.upstream_services: length 1
- res.body.upstream_services[0]: eq 127.0.0.1:50051
- res.body.config: isDefined
- res.body.config.result: eq 1
- res.body.profiles: isDefined
- res.body.profiles: length 1
- res.body.supported_types: length 1
- res.body.supported_types[0]: eq text
- res.body.profiles[0]: eq scanProcedure01
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/reset default profile for root.bru b/tests/integration/healthy_instance/tests/scanProcedure01/reset default profile for root.bru
deleted file mode 100644
index 7670203..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/reset default profile for root.bru
+++ /dev/null
@@ -1,35 +0,0 @@
-meta {
- name: reset default profile for root
- type: http
- seq: 6
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/users/update-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"root",
- "default_profile":"default"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq root
- res.body.created_at: isDefined
- res.body.contact: eq replaceme@withareal.email
- res.body.is_active: eq true
- res.body.default_profile: eq default
-}
-
-script:pre-request {
- //console.log("user_token from previous request: "+bru.getEnvVar("user_token"))
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/scan.bru b/tests/integration/healthy_instance/tests/scanProcedure01/scan.bru
deleted file mode 100644
index f56a079..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/scan.bru
+++ /dev/null
@@ -1,27 +0,0 @@
-meta {
- name: scan
- type: http
- seq: 5
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/safety/scan
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "text": "this is something"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.verdict: eq true
- res.body.checks: isDefined 1
-}
diff --git a/tests/integration/healthy_instance/tests/scanProcedure01/set default profile for root.bru b/tests/integration/healthy_instance/tests/scanProcedure01/set default profile for root.bru
deleted file mode 100644
index f2572cc..0000000
--- a/tests/integration/healthy_instance/tests/scanProcedure01/set default profile for root.bru
+++ /dev/null
@@ -1,35 +0,0 @@
-meta {
- name: set default profile for root
- type: http
- seq: 4
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/users/update-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"root",
- "default_profile":"scanProcedure01"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq root
- res.body.created_at: isDefined
- res.body.contact: eq replaceme@withareal.email
- res.body.is_active: eq true
- res.body.default_profile: eq scanProcedure01
-}
-
-script:pre-request {
- //console.log("user_token from previous request: "+bru.getEnvVar("user_token"))
-}
diff --git a/tests/integration/healthy_instance/tests/users/create-user.bru b/tests/integration/healthy_instance/tests/users/create-user.bru
deleted file mode 100644
index ed7cc9f..0000000
--- a/tests/integration/healthy_instance/tests/users/create-user.bru
+++ /dev/null
@@ -1,32 +0,0 @@
-meta {
- name: create-user
- type: http
- seq: 3
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/create-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"testuserone",
- "contact":"test@email.com"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq testuserone
- res.body.created_at: isDefined
- res.body.contact: eq test@email.com
- res.body.is_active: eq true
- res.body.default_profile: eq default
- res.body.token: isDefined
-}
diff --git a/tests/integration/healthy_instance/tests/users/delete-user.bru b/tests/integration/healthy_instance/tests/users/delete-user.bru
deleted file mode 100644
index 912710b..0000000
--- a/tests/integration/healthy_instance/tests/users/delete-user.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: delete-user
- type: http
- seq: 8
-}
-
-delete {
- url: {{scheme}}{{host}}:{{port}}/v1/users/delete-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"testuserone"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.ok: eq true
-}
diff --git a/tests/integration/healthy_instance/tests/users/list-user as user.bru b/tests/integration/healthy_instance/tests/users/list-user as user.bru
deleted file mode 100644
index add82ce..0000000
--- a/tests/integration/healthy_instance/tests/users/list-user as user.bru
+++ /dev/null
@@ -1,40 +0,0 @@
-meta {
- name: list-user as user
- type: http
- seq: 5
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/list-user
- body: json
- auth: bearer
-}
-
-headers {
- Content-Type: application/json
-}
-
-auth:bearer {
- token: {{user_token}}
-}
-
-body:json {
- {
- "name":"testuserone"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq testuserone
- res.body.created_at: isDefined
- res.body.is_active: eq true
- res.body.default_profile: eq default
- res.body.id: isUndefined
-}
-
-tests {
- test("should have used token from previous call", function() {
- expect(req.getHeader("Authorization")).to.equal("Bearer "+bru.getEnvVar("user_token"));
- })
-}
diff --git a/tests/integration/healthy_instance/tests/users/list-user.bru b/tests/integration/healthy_instance/tests/users/list-user.bru
deleted file mode 100644
index 2710510..0000000
--- a/tests/integration/healthy_instance/tests/users/list-user.bru
+++ /dev/null
@@ -1,30 +0,0 @@
-meta {
- name: list-user
- type: http
- seq: 2
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/list-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"root"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq root
- res.body.created_at: isDefined
- res.body.is_active: eq true
- res.body.default_profile: eq default
- res.body.id: isUndefined
-}
diff --git a/tests/integration/healthy_instance/tests/users/list-users.bru b/tests/integration/healthy_instance/tests/users/list-users.bru
deleted file mode 100644
index 6c00ae3..0000000
--- a/tests/integration/healthy_instance/tests/users/list-users.bru
+++ /dev/null
@@ -1,24 +0,0 @@
-meta {
- name: list-users
- type: http
- seq: 1
-}
-
-get {
- url: {{scheme}}{{host}}:{{port}}/v1/users/list-users
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-assert {
- res.status: eq 200
- res.body[0].name: eq root
- res.body[0].created_at: isDefined
- res.body[0].is_active: eq true
- res.body[0].default_profile: eq default
- res.body: length 1
-}
diff --git a/tests/integration/healthy_instance/tests/users/rotate-token as root.bru b/tests/integration/healthy_instance/tests/users/rotate-token as root.bru
deleted file mode 100644
index abd005f..0000000
--- a/tests/integration/healthy_instance/tests/users/rotate-token as root.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: rotate-token as root
- type: http
- seq: 6
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/rotate-token
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"testuserone"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.token: isDefined
-}
diff --git a/tests/integration/healthy_instance/tests/users/rotate-token own token.bru b/tests/integration/healthy_instance/tests/users/rotate-token own token.bru
deleted file mode 100644
index f71921b..0000000
--- a/tests/integration/healthy_instance/tests/users/rotate-token own token.bru
+++ /dev/null
@@ -1,31 +0,0 @@
-meta {
- name: rotate-token own token
- type: http
- seq: 4
-}
-
-post {
- url: {{scheme}}{{host}}:{{port}}/v1/users/rotate-token
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"testuserone"
- }
-}
-
-assert {
- res.status: eq 200
- res.body.token: isDefined
-}
-
-script:post-response {
- bru.setEnvVar("user_token", res.body.token);
- //console.log("user_token set to :"+bru.getEnvVar("user_token"))
-}
diff --git a/tests/integration/healthy_instance/tests/users/update-user.bru b/tests/integration/healthy_instance/tests/users/update-user.bru
deleted file mode 100644
index 827815a..0000000
--- a/tests/integration/healthy_instance/tests/users/update-user.bru
+++ /dev/null
@@ -1,35 +0,0 @@
-meta {
- name: update-user
- type: http
- seq: 7
-}
-
-patch {
- url: {{scheme}}{{host}}:{{port}}/v1/users/update-user
- body: json
- auth: none
-}
-
-headers {
- Content-Type: application/json
-}
-
-body:json {
- {
- "name":"testuserone",
- "is_active": false
- }
-}
-
-assert {
- res.status: eq 200
- res.body.name: eq testuserone
- res.body.created_at: isDefined
- res.body.contact: eq test@email.com
- res.body.is_active: eq false
- res.body.default_profile: eq default
-}
-
-script:pre-request {
- //console.log("user_token from previous request: "+bru.getEnvVar("user_token"))
-}
diff --git a/tests/integration/startup_test.go b/tests/integration/startup_test.go
new file mode 100644
index 0000000..d4c31a9
--- /dev/null
+++ b/tests/integration/startup_test.go
@@ -0,0 +1,280 @@
+// Sophrosyne
+// Copyright (C) 2024 Mads R. Havmand
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+package integration
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "crypto/rand"
+ "crypto/tls"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net"
+ "net/http"
+ "net/url"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+ "github.com/testcontainers/testcontainers-go/modules/postgres"
+ "github.com/testcontainers/testcontainers-go/network"
+ "github.com/testcontainers/testcontainers-go/wait"
+
+ "github.com/testcontainers/testcontainers-go"
+)
+
+type testEnv struct {
+ t *testing.T
+ database *postgres.PostgresContainer
+ api testcontainers.Container
+ network *testcontainers.DockerNetwork
+ rootToken string
+ httpClient *http.Client
+ endpoint string
+ healthEndpoint *url.URL
+ rpcEndpoint *url.URL
+}
+
+func (te testEnv) Close(ctx context.Context) {
+ var err error
+ if te.database != nil {
+ errors.Join(err, te.database.Terminate(ctx))
+ }
+ if te.api != nil {
+ errors.Join(err, te.api.Terminate(ctx))
+ }
+ if te.network != nil {
+ errors.Join(err, te.network.Remove(ctx))
+ }
+
+ require.NoError(te.t, err, "could not clean up test environment")
+}
+
+func setupEnv(ctx context.Context, t *testing.T) testEnv {
+ t.Helper()
+ te := testEnv{t: t}
+
+ nw, err := network.New(ctx,
+ network.WithCheckDuplicate(),
+ network.WithAttachable(),
+ network.WithDriver("bridge"),
+ )
+ require.NoError(t, err)
+ te.network = nw
+
+ dbName := "users"
+ dbUser := "user"
+ dbPassword := "password"
+
+ postgresContainer, err := postgres.RunContainer(ctx,
+ testcontainers.WithImage("docker.io/postgres:16-alpine"),
+ postgres.WithDatabase(dbName),
+ postgres.WithUsername(dbUser),
+ postgres.WithPassword(dbPassword),
+ testcontainers.WithWaitStrategy(
+ wait.ForLog("database system is ready to accept connections").
+ WithOccurrence(2).
+ WithStartupTimeout(5*time.Second),
+ ),
+ network.WithNetwork(nil, nw),
+ )
+ if err != nil {
+ t.Fatalf("failed to start container: %s", err)
+ }
+
+ te.database = postgresContainer
+
+ _, err = postgresContainer.Endpoint(ctx, "")
+ require.NoError(t, err)
+ pgIP, err := postgresContainer.ContainerIP(ctx)
+ require.NoError(t, err)
+
+ siteKey := make([]byte, 64)
+ salt := make([]byte, 32)
+ _, err = rand.Read(siteKey)
+ require.NoError(t, err)
+ _, err = rand.Read(salt)
+ require.NoError(t, err)
+
+ siteKeyContent := bytes.NewReader(siteKey)
+ saltContent := bytes.NewReader([]byte(salt))
+ r := bytes.NewReader([]byte(fmt.Sprintf(`database:
+ host: %s
+ port: %s
+ user: user
+ password: password
+ name: users`, pgIP, "5432")))
+
+ req := testcontainers.ContainerRequest{
+ Image: "sophrosyne:0.0.0",
+ ExposedPorts: []string{"8080/tcp"},
+ WaitingFor: wait.ForLog("Starting server"),
+ Cmd: []string{"--secretfiles", "/security.salt,/security.siteKey", "run"},
+ Files: []testcontainers.ContainerFile{
+ {
+ Reader: r,
+ ContainerFilePath: "/config.yaml",
+ FileMode: 0644,
+ },
+ {
+ Reader: saltContent,
+ ContainerFilePath: "/security.salt",
+ FileMode: 0644,
+ },
+ {
+ Reader: siteKeyContent,
+ ContainerFilePath: "/security.siteKey",
+ FileMode: 0644,
+ },
+ },
+ Networks: []string{nw.Name},
+ LogConsumerCfg: &testcontainers.LogConsumerConfig{
+ Opts: []testcontainers.LogProductionOption{testcontainers.WithLogProductionTimeout(10 * time.Second)},
+ Consumers: []testcontainers.LogConsumer{ensureJSON{t: te.t}},
+ },
+ }
+ sophC, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{
+ ContainerRequest: req,
+ Started: true,
+ })
+ if err != nil {
+ t.Fatalf("Could not start sophrosyne: %s", err)
+ }
+ te.api = sophC
+
+ te.rootToken = extractToken(t, ctx, te.api)
+ require.NotEmpty(t, te.rootToken, "unable to extract root token")
+
+ te.httpClient = newHTTPClient(t)
+
+ te.endpoint, err = te.api.Endpoint(ctx, "")
+ require.NoError(t, err)
+
+ te.healthEndpoint, err = url.Parse(fmt.Sprintf("https://%s/healthz", te.endpoint))
+ require.NoError(t, err)
+
+ te.rpcEndpoint, err = url.Parse(fmt.Sprintf("https://%s/v1/rpc", te.endpoint))
+ require.NoError(t, err)
+
+ return te
+}
+
+func extractToken(t *testing.T, ctx context.Context, c testcontainers.Container) string {
+ t.Helper()
+
+ rc, err := c.Logs(ctx)
+ require.NoError(t, err)
+
+ var count int
+ buf := bufio.NewReader(rc)
+ for {
+ count = count + 1
+ require.Less(t, count, 100, "unable to extract token within first 100 log lines")
+ line, err := buf.ReadString('\n')
+ require.NoError(t, err)
+ var d map[string]interface{}
+ err = json.Unmarshal([]byte(line), &d)
+ require.NoError(t, err)
+ if d["token"] != nil {
+ return d["token"].(string)
+ }
+ }
+}
+
+type ensureJSON struct {
+ t *testing.T
+}
+
+func (e ensureJSON) Accept(l testcontainers.Log) {
+ e.t.Helper()
+ var cnt map[string]interface{}
+ err := json.Unmarshal(l.Content, &cnt)
+ require.NoError(e.t, err, "could not unmarshal log: '%s'", string(l.Content))
+}
+
+func newHTTPClient(t *testing.T) *http.Client {
+ t.Helper()
+ return &http.Client{
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{
+ InsecureSkipVerify: true,
+ },
+ },
+ }
+}
+
+func TestStartup(t *testing.T) {
+
+ ctx := context.Background()
+
+ te := setupEnv(ctx, t)
+ t.Cleanup(func() {
+ te.Close(ctx)
+ })
+
+ t.Run("API served via TLS", func(t *testing.T) {
+ conf := &tls.Config{
+ InsecureSkipVerify: true,
+ }
+ tlsConn, err := tls.Dial("tcp", te.endpoint, conf)
+ require.NoError(t, err)
+ _, err = fmt.Fprintf(tlsConn, "GET / HTTP/1.0\r\n\r\n")
+ require.NoError(t, err)
+ status, err := bufio.NewReader(tlsConn).ReadString('\n')
+ require.NoError(t, err)
+ require.Equal(t, "HTTP/1.0 404 Not Found\r\n", status)
+ require.NoError(t, tlsConn.Close())
+ })
+
+ // The Go default HTTP server responds with `HTTP/1.0 400 Bad Request
+ //
+ //Client sent an HTTP request to an HTTPS server.` when receiving an HTTP request on an HTTPS listener.
+ t.Run("API not served via plaintext", func(t *testing.T) {
+ rawConn, err := net.Dial("tcp", te.endpoint)
+ require.NoError(t, err)
+ _, err = fmt.Fprintf(rawConn, "GET / HTTP/1.0\r\n\r\n")
+ require.NoError(t, err)
+ status, err := bufio.NewReader(rawConn).ReadString('\n')
+ require.NoError(t, err)
+ require.Equal(t, "HTTP/1.0 400 Bad Request\r\n", status)
+ require.NoError(t, rawConn.Close())
+ })
+
+ // When a client terminates the TLS handshake due to a bad certificate, in this case because it doesn't trust the
+ // certificate, the server logs a remote error. This tests ensures that when that happens, it is logged. Because
+ // of the LogConsumer added to setupEnv, if this log cannot be unmarshalled as JSON, it fails. Thus this test
+ // ensures that it is logged as JSON.
+ t.Run("client remote error logged as non-json", func(t *testing.T) {
+ tlsConn, err := tls.Dial("tcp", te.endpoint, &tls.Config{})
+ require.Error(t, err)
+ require.Nil(t, tlsConn)
+ })
+
+ t.Run("Health endpoint is available", func(t *testing.T) {
+ res, err := te.httpClient.Get(te.healthEndpoint.String())
+ require.NoError(t, err)
+ require.Equal(t, http.StatusOK, res.StatusCode)
+ })
+
+ t.Run("RPC endpoint is available", func(t *testing.T) {
+ res, err := te.httpClient.Get(te.rpcEndpoint.String())
+ require.NoError(t, err)
+ require.Equal(t, http.StatusUnauthorized, res.StatusCode)
+ })
+}
diff --git a/users.go b/users.go
new file mode 100644
index 0000000..f9313e7
--- /dev/null
+++ b/users.go
@@ -0,0 +1,314 @@
+package sophrosyne
+
+import (
+ "context"
+ "fmt"
+ "time"
+)
+
+type User struct {
+ ID string
+ Name string
+ Email string
+ Token []byte
+ IsAdmin bool
+ DefaultProfile Profile
+ CreatedAt time.Time
+ UpdatedAt time.Time
+ DeletedAt *time.Time
+}
+
+func (u User) EntityType() string {
+ return "User"
+}
+
+func (u User) EntityID() string {
+ return u.ID
+}
+
+type UserService interface {
+ GetUser(ctx context.Context, id string) (User, error)
+ GetUserByEmail(ctx context.Context, email string) (User, error)
+ GetUserByName(ctx context.Context, name string) (User, error)
+ GetUserByToken(ctx context.Context, token []byte) (User, error)
+ // Returns a list of users less than, or equal to, the configured page size.
+ // Configuration of the page size is an implementation detail, but should be
+ // derived from [Config.Services.Users.PageSize].
+ //
+ // The cursor is used to paginate the results. [DatabaseCursor.Position] is
+ // treated as the last read ID. If the cursor is nil, the first page of
+ // results should be returned.
+ //
+ // When the users have been read, but before returning the list of users,
+ // the cursor must be advanced to the ID of the last user returned. If no
+ // users are returned, or if it is known that a subsequent call would return
+ // zero users, the cursors Reset method must be called.
+ //
+ // The returned list of users should be ordered by ID in ascending order.
+ GetUsers(ctx context.Context, cursor *DatabaseCursor) ([]User, error)
+ CreateUser(ctx context.Context, user CreateUserRequest) (User, error)
+ UpdateUser(ctx context.Context, user UpdateUserRequest) (User, error)
+ DeleteUser(ctx context.Context, name string) error
+ RotateToken(ctx context.Context, name string) ([]byte, error)
+}
+
+type GetUserRequest struct {
+ ID string `json:"id"`
+ Email string `json:"email"`
+ Name string `json:"name"`
+}
+
+func (p GetUserRequest) Validate(interface{}) error {
+ if p.ID == "" && p.Name == "" && p.Email == "" {
+ return fmt.Errorf("one of ID, Name or Email must be provided")
+ }
+ if p.ID != "" && (p.Name != "" || p.Email != "") {
+ return fmt.Errorf("only one of ID, Name or Email must be provided")
+ }
+ return nil
+}
+
+type GetUserResponse struct {
+ Name string `json:"name"`
+ Email string `json:"email"`
+ IsAdmin bool `json:"is_admin"`
+ CreatedAt string `json:"created_at"`
+ UpdatedAt string `json:"updated_at"`
+ DeletedAt string `json:"deleted_at,omitempty"`
+}
+
+func (r *GetUserResponse) FromUser(u User) *GetUserResponse {
+ r.Name = u.Name
+ r.Email = u.Email
+ r.IsAdmin = u.IsAdmin
+ r.CreatedAt = u.CreatedAt.Format(TimeFormatInResponse)
+ r.UpdatedAt = u.UpdatedAt.Format(TimeFormatInResponse)
+ if u.DeletedAt != nil {
+ r.DeletedAt = u.DeletedAt.Format(TimeFormatInResponse)
+ }
+
+ return r
+}
+
+type GetUsersRequest struct {
+ Cursor string `json:"cursor"`
+}
+
+type GetUsersResponse struct {
+ Users []GetUserResponse `json:"users"`
+ Cursor string `json:"cursor"`
+ Total int `json:"total"`
+}
+
+type CreateUserRequest struct {
+ Name string `json:"name" validate:"required"`
+ Email string `json:"email" validate:"required"`
+ IsAdmin bool `json:"is_admin"`
+}
+
+type CreateUserResponse struct {
+ Name string `json:"name"`
+ Email string `json:"email"`
+ Token []byte `json:"token"`
+ IsAdmin bool `json:"is_admin"`
+ CreatedAt string `json:"created_at"`
+ UpdatedAt string `json:"updated_at"`
+ DeletedAt string `json:"deleted_at,omitempty"`
+}
+
+func (r *CreateUserResponse) FromUser(u User) *CreateUserResponse {
+ r.Name = u.Name
+ r.Email = u.Email
+ r.Token = u.Token
+ r.IsAdmin = u.IsAdmin
+ r.CreatedAt = u.CreatedAt.Format(TimeFormatInResponse)
+ r.UpdatedAt = u.UpdatedAt.Format(TimeFormatInResponse)
+ if u.DeletedAt != nil {
+ r.DeletedAt = u.DeletedAt.Format(TimeFormatInResponse)
+ }
+
+ return r
+}
+
+type UpdateUserRequest struct {
+ Name string `json:"name" validate:"required"`
+ Email string `json:"email"`
+ IsAdmin bool `json:"is_admin"`
+}
+
+type UpdateUserResponse struct {
+ Name string `json:"name"`
+ Email string `json:"email"`
+ IsAdmin bool `json:"is_admin"`
+ CreatedAt string `json:"created_at"`
+ UpdatedAt string `json:"updated_at"`
+ DeletedAt string `json:"deleted_at,omitempty"`
+}
+
+func (r *UpdateUserResponse) FromUser(u User) *UpdateUserResponse {
+ r.Name = u.Name
+ r.Email = u.Email
+ r.IsAdmin = u.IsAdmin
+ r.CreatedAt = u.CreatedAt.Format(TimeFormatInResponse)
+ r.UpdatedAt = u.UpdatedAt.Format(TimeFormatInResponse)
+ if u.DeletedAt != nil {
+ r.DeletedAt = u.DeletedAt.Format(TimeFormatInResponse)
+ }
+
+ return r
+}
+
+type DeleteUserRequest struct {
+ Name string `json:"name" validate:"required"`
+}
+
+type RotateTokenRequest struct {
+ Name string `json:"name" validate:"required"`
+}
+
+type RotateTokenResponse struct {
+ Token []byte `json:"token"`
+}
+
+func (r *RotateTokenResponse) FromUser(u User) *RotateTokenResponse {
+ r.Token = u.Token
+
+ return r
+}
+
+type UserContextKey struct{}
+
+type UserServiceCache struct {
+ cache *Cache
+ userService UserService
+ tracingService TracingService
+}
+
+func NewUserServiceCache(config *Config, userService UserService, tracingService TracingService) *UserServiceCache {
+ return &UserServiceCache{
+ cache: NewCache(config.Services.Users.CacheTTL),
+ userService: userService,
+ tracingService: tracingService,
+ }
+}
+
+func (c *UserServiceCache) GetUser(ctx context.Context, id string) (User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.GetUser")
+ v, ok := c.cache.Get(id)
+ if ok {
+ span.End()
+ return v.(User), nil
+ }
+
+ user, err := c.userService.GetUser(ctx, id)
+ if err != nil {
+ span.End()
+ return User{}, err
+ }
+
+ c.cache.Set(id, user)
+ span.End()
+ return user, nil
+}
+
+func (c *UserServiceCache) GetUserByEmail(ctx context.Context, email string) (User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.GetUserByEmail")
+ user, err := c.userService.GetUserByEmail(ctx, email)
+ if err != nil {
+ span.End()
+ return User{}, err
+ }
+
+ c.cache.Set(user.ID, user)
+ span.End()
+ return user, nil
+}
+
+func (c *UserServiceCache) GetUserByName(ctx context.Context, name string) (User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.GetUserByName")
+ user, err := c.userService.GetUserByName(ctx, name)
+ if err != nil {
+ span.End()
+ return User{}, err
+ }
+
+ c.cache.Set(user.ID, user)
+ span.End()
+ return user, nil
+}
+
+func (c *UserServiceCache) GetUserByToken(ctx context.Context, token []byte) (User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.GetUserByToken")
+ user, err := c.userService.GetUserByToken(ctx, token)
+ if err != nil {
+ span.End()
+ return User{}, err
+ }
+
+ c.cache.Set(user.ID, user)
+ span.End()
+ return user, nil
+}
+
+func (c *UserServiceCache) GetUsers(ctx context.Context, cursor *DatabaseCursor) ([]User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.GetUsers")
+ users, err := c.userService.GetUsers(ctx, cursor)
+ if err != nil {
+ span.End()
+ return nil, err
+ }
+
+ for _, user := range users {
+ c.cache.Set(user.ID, user)
+ }
+
+ span.End()
+ return users, nil
+}
+
+func (c *UserServiceCache) CreateUser(ctx context.Context, req CreateUserRequest) (User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.CreateUser")
+ user, err := c.userService.CreateUser(ctx, req)
+ if err != nil {
+ span.End()
+ return User{}, err
+ }
+
+ c.cache.Set(user.ID, user)
+ span.End()
+ return user, nil
+}
+
+func (c *UserServiceCache) UpdateUser(ctx context.Context, req UpdateUserRequest) (User, error) {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.UpdateUser")
+ user, err := c.userService.UpdateUser(ctx, req)
+ if err != nil {
+ span.End()
+ return User{}, err
+ }
+
+ c.cache.Set(user.ID, user)
+ span.End()
+ return user, nil
+}
+
+func (c *UserServiceCache) DeleteUser(ctx context.Context, id string) error {
+ ctx, span := c.tracingService.StartSpan(ctx, "UserServiceCache.DeleteUser")
+ err := c.userService.DeleteUser(ctx, id)
+ if err != nil {
+ span.End()
+ return err
+ }
+
+ c.cache.Delete(id)
+ span.End()
+ return nil
+}
+
+func (c *UserServiceCache) RotateToken(ctx context.Context, id string) ([]byte, error) {
+ return c.userService.RotateToken(ctx, id)
+}
+
+func (c *UserServiceCache) Health(ctx context.Context) (bool, []byte) {
+ return true, []byte(`{"ok"}`)
+}