diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..04c2d7d8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,81 @@ +# exclude unnecessary dirs/files +projects +web-app/node_modules +server/pytest.ini +server/test* +server/coverage* +server/celerybeat* + +.git +.travis.yaml +.swagger-codegen-ignore +README.md +tox.ini +git_push.sh +test-requirements.txt +setup.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.python-version + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..f0057c6d --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +# demo data +logs/ +projects/ + +*.log +.DS_Store +.idea + +.mergin.env \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..ccb5dae5 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,64 @@ +# multistage build requires at least docker-ce 17.06 +FROM ubuntu:18.04 AS builder +MAINTAINER Martin Varga "martin.varga@lutraconsulting.co.uk" + +# use some custom packages that are too obsolete in official repo +# use node.js from nodesource PPA (for npm packages) +RUN apt-get update -y && \ + apt-get install -y --no-install-recommends\ + python \ + python-pip \ + gnupg \ + wget && \ + wget https://deb.nodesource.com/setup_10.x --no-check-certificate && \ + bash setup_10.x && \ + apt-get install -y --no-install-recommends\ + nodejs && \ + rm -rf /var/lib/apt/lists/* + +# server files will be merely copied from builder +COPY ./server /mergin/server + +# build frontend app +COPY ./web-app /mergin/web-app +WORKDIR /mergin/web-app +RUN npm install +RUN npm run build + +CMD echo 'Build is finished.' + +FROM ubuntu:18.04 +MAINTAINER Martin Varga "martin.varga@lutraconsulting.co.uk" + +RUN apt-get update -y && \ + apt-get install -y --no-install-recommends\ + musl-dev \ + python3 \ + python3-pip \ + python3-setuptools \ + iputils-ping \ + gcc build-essential binutils cmake extra-cmake-modules && \ + rm -rf /var/lib/apt/lists/* + + +# needed for geodiff +RUN pip3 install --upgrade pip +RUN ln -s /usr/lib/x86_64-linux-musl/libc.so /lib/libc.musl-x86_64.so.1 + +# create mergin user to run container with +RUN groupadd -r mergin -g 901 +RUN groupadd -r mergin-family -g 999 +RUN useradd -u 901 -r --home-dir /app --create-home -g mergin -G mergin-family -s /sbin/nologin mergin + +WORKDIR /app +COPY --from=builder /mergin/server . + +RUN pip3 install pipenv==2018.11.26 +# for locale check this http://click.pocoo.org/5/python3/ +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 +RUN pipenv install --system --deploy + +USER mergin +COPY ./entrypoint.sh . +ENTRYPOINT ["./entrypoint.sh"] \ No newline at end of file diff --git a/README.md b/README.md index 7d1b5a78..f64acf47 100644 --- a/README.md +++ b/README.md @@ -5,5 +5,58 @@ https://public.cloudmergin.com/ Store and track changes to your geo-data -**This repository does not contain source code of Mergin project - it is used for tracking of feature requests and issues reported by Mergin users.** +## Running with Docker +Adjust configuration, e.g. replace 'fixme' entries: +```shell +$ cp mergin.env.template mergin.env +``` +Run with docker compose: +```shell +$ export TAG=2021.6 # specify version +$ docker-compose up +$ docker exec -it mergin-server flask init-db +$ docker exec -it mergin-server flask add-user admin topsecret --is-admin --email admin@example.com +$ sudo chown -R 901:999 ./projects/ +$ sudo chmod g+s ./projects/ +``` +Projects are saved locally in `./projects` folder. + +## Running locally (for dev) +Install dependencies and run services: + +```shell +$ docker run -d --rm --name mergin_db -p 5002:5432 -e POSTGRES_PASSWORD=postgres postgres:10 +$ docker run -d --rm --name redis -p 6379:6379 redis +``` + +### Server +```shell +$ pip3 install pipenv +$ cd server +$ pipenv install --dev --three +$ pipenv run flask init-db +$ pipenv run flask add-user admin topsecret --is-admin --email admin@example.com +$ pipenv run celery worker -A src.run_celery.celery --loglevel=info & +$ pipenv run flask run # run dev server on port 5000 +``` + +### Web app +```shell +$ sudo apt install nodejs +$ cd web-app +$ npm install +$ npm run serve +``` +and open your browser to here: +``` +http://localhost:8080 +``` + +## Running tests +To launch the unit tests run: +```shell +$ docker run -d --rm --name testing_pg -p 5435:5432 -e POSTGRES_PASSWORD=postgres postgres:10 +$ cd server +$ pipenv run pytest --cov-report html --cov=src test +``` \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..60ac4bbf --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,35 @@ +version: "3.7" + +services: + db: + image: postgres:10 + container_name: mergin-db + ports: + - 5432:5432 + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + + server: + image: mergin:${TAG} + build: + context: . + dockerfile: Dockerfile + container_name: mergin-server + volumes: + - ./projects:/data # map data dir to host + env_file: + - mergin.env + environment: + - VERSION=${TAG} + ports: + - 5000:5000 + depends_on: + - db + links: + - db + redis: + image: redis + container_name: mergin-redis + ports: + - 6379:6379 diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 00000000..e7deb14f --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# make sure all files created by gunicorn (mergin server) have proper permissions +umask 0027 + +# We store a base config in config.py and override things as needed +# using the environment variable GUNICORN_CMD_ARGS. + +/bin/bash -c "celery beat -A src.celery --loglevel=info &" +/bin/bash -c "celery worker -A src.run_celery.celery --loglevel=info &" +/bin/bash -c "gunicorn --config config.py mergin:application" + diff --git a/mergin.env.template b/mergin.env.template new file mode 100644 index 00000000..ce56f99e --- /dev/null +++ b/mergin.env.template @@ -0,0 +1,35 @@ +SWAGGER_UI=0 +SECRET_KEY=fixme +SECURITY_PASSWORD_SALT=fixme +LOCAL_PROJECTS=/data +MAINTENANCE_FILE=/data/MAINTENANCE +USER_SELF_REGISTRATION=1 +USE_X_ACCEL=0 +GEODIFF_LOGGER_LEVEL=2 +TEMP_DIR=/data/tmp +MERGIN_TESTING=0 +MAX_CHUNK_SIZE=10485760 # 10 MB +TEMP_EXPIRATION=7 +DELETED_PROJECT_EXPIRATION=7 +CLOSED_ACCOUNT_EXPIRATION=1 +DEFAULT_STORAGE_SIZE=104857600 # 100 MB +# database +DB_USER=postgres +DB_PASSWORD=postgres +DB_HOST=172.17.0.1 +DB_PORT=5432 +DB_APPLICATION_NAME=mergin +# SMTP settings +MAIL_SUPPRESS_SEND=1 # set to 0 in production +MAIL_SERVER=fixme +MAIL_DEFAULT_SENDER=fixme +MAIL_USERNAME=fixme +# for email templates +MERGIN_BASE_URL=your-server-url +MERGIN_LOGO_URL="" +CONTACT_EMAIL=fix-me +# integration with slack +SLACK_HOOK_URL=fixme +# integration with celery and redis +CELERY_BROKER_URL="redis://172.17.0.1:6379/0" +CELERY_RESULT_BACKEND="redis://172.17.0.1:6379/0" diff --git a/server/.coveragerc b/server/.coveragerc new file mode 100644 index 00000000..fc5e49f0 --- /dev/null +++ b/server/.coveragerc @@ -0,0 +1,18 @@ +# .coveragerc to control coverage.py +[run] +omit = + # omit auto-generated files + src/models/base_model_.py + src/encoder.py + src/util.py + src/auth/commands.py + # omit test dir + src/test/* + +[report] +exclude_lines = + pragma: no cover + raise NotImplementedError + +[html] +directory = coverage_html_report \ No newline at end of file diff --git a/server/.env b/server/.env new file mode 100644 index 00000000..2fd1681e --- /dev/null +++ b/server/.env @@ -0,0 +1,2 @@ +FLASK_APP=commands.py +GEODIFF_LOGGER_LEVEL="2" diff --git a/server/.gitignore b/server/.gitignore new file mode 100644 index 00000000..829c1dbc --- /dev/null +++ b/server/.gitignore @@ -0,0 +1,80 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.python-version + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints + +.pytest_cache/ + +# report from pytest, dir defined in .coveragerc +coverage_html_report + +# autogenerated template from web-app +src/templates/app.html + +pygeodiff + +# related to local deployment +src/version.py +deployment/grafana-database/ +celerybeat-schedule +celerybeat.pid diff --git a/server/.test.env b/server/.test.env new file mode 100644 index 00000000..abee15b5 --- /dev/null +++ b/server/.test.env @@ -0,0 +1,15 @@ +DB_USER=postgres +DB_PASSWORD=postgres +DB_HOST=localhost +DB_PORT=5435 +LOCAL_PROJECTS=/tmp +TESTING=1 +MAIL_SUPPRESS_SEND=1 +WTF_CSRF_ENABLED=0 +CELERY_BROKER_URL='memory://' +CELERY_RESULT_BACKEND='' +MAIL_DEFAULT_SENDER='test@test.com' +MERGIN_STAGING=0 +TEMP_DIR=/tmp/mergin_tmp +MERGIN_SUBSCRIPTIONS=0 +MERGIN_TESTING=1 diff --git a/server/Pipfile b/server/Pipfile new file mode 100644 index 00000000..4ab6a0af --- /dev/null +++ b/server/Pipfile @@ -0,0 +1,51 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +connexion = {extras = ["swagger-ui"],version = "==2.7.0"} +python-dateutil = "==2.6.0" +flask-marshmallow = "==0.10.1" +marshmallow-sqlalchemy = "==0.17.0" +psycopg2-binary = "==2.8.6" +itsdangerous = "==1.1.0" +requests-toolbelt = "==0.8.0" +Flask-SQLAlchemy = "==2.4.0" +zipfly = "==6.0.3" +gunicorn = {extras = ["gevent"],version = "==19.9"} +python-dotenv = "==0.9.1" +flask-login = "==0.4.0" +bcrypt = "==3.1.3" +wtforms = "==2.1" +flask-wtf = "==0.14.2" +flask-mail = "==0.9.1" +safe = "==0.4" +flask-migrate = "==2.6.0" +wtforms-json = "==0.3.3" +pytz = "==2019.1" +scikit-build = "==0.10.0" +pygeodiff = "==0.8.6" +pathvalidate = "==0.29.0" +werkzeug = "==0.16" +celery= "==4.4.7" +redis= "==3.4.1" +result = "==0.5" +jsons = "==1.1.2" +binaryornot = "==0.4.4" +python-decouple = "==3.4" + + +[dev-packages] +pytest = "==6.2.2" +pytest-cov = "==2.6.1" +pylint = "==2.3.1" +responses = "==0.10.7" +pytest-dotenv= "==0.4.0" +pytest-selenium = "==2.0.1" +# requirements for pytest running with python < py3.8 +importlib-metadata = "==3.4.0" +typing-extensions = "==3.7.4" + +[requires] +python_version = "3.6" diff --git a/server/Pipfile.lock b/server/Pipfile.lock new file mode 100644 index 00000000..38da6acc --- /dev/null +++ b/server/Pipfile.lock @@ -0,0 +1,1205 @@ +{ + "_meta": { + "hash": { + "sha256": "e042162203b7474d658b8c8dab4add5c562e3b9327cb73d9b828812573d66d52" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.6" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "alembic": { + "hashes": [ + "sha256:8a259f0a4c8b350b03579d77ce9e810b19c65bf0af05f84efb69af13ad50801e", + "sha256:e27fd67732c97a1c370c33169ef4578cf96436fa0e7dcfaeeef4a917d0737d56" + ], + "version": "==1.5.8" + }, + "amqp": { + "hashes": [ + "sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21", + "sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59" + ], + "version": "==2.6.1" + }, + "attrs": { + "hashes": [ + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" + ], + "version": "==20.3.0" + }, + "bcrypt": { + "hashes": [ + "sha256:05b35b9842b009b44496fa5433ce462f69966291e50fbd471dbb427f399f748f", + "sha256:06280fe19ffbcf6cf904de25190dd6fcd313e30bc79da305f5642a8295d1616e", + "sha256:1f3054d4da7c4a84b797d9130c36425b6e28134e9e67cd47b393774ea7168a3d", + "sha256:329c547f5525808aecd77ccade9fd443e381f9d91fc8164da1dae9c195e1f1f5", + "sha256:35cfdfa3f64f8bad68d25f261bdc60fbb9e43f164881c496cb590bf8001aad83", + "sha256:4396a33e112907f5978d5c40ca858b79fb1f6afd1caf3f8721c1411593d83bc2", + "sha256:47d47df72e9f0462c8065a82da7b87f4b2577eb8a3fc855bcea165c2293beb84", + "sha256:4eb357ab2cd27f4c6151d33130c667e9245beb9d1e6779ccf7e196d4cc20ffa8", + "sha256:5129442f3f131c1210734699ffa2b7f956786e3e943d38a3158ce174708d0296", + "sha256:5817b2b70a074cdab74c3871a2018d6931f8ef5148844006b8868b2be26f5abe", + "sha256:5929214153af8e3e461777837e0de1a00f2ced48d921e55fb952b4d5702e1be2", + "sha256:5dce51509227741323469b5c5ac6c6ccb2b2380800acc72e60bdf1b3359a5deb", + "sha256:60c8aa69d18ee750d3678d67a265f1cf3559eabef98f90e3e646c3ff41e8795d", + "sha256:6645c8d0ad845308de3eb9be98b6fd22a46ec5412bfc664a423e411cdd8f5488", + "sha256:66e3e1c105c1b811cf25974ee1eff53f2ebf4203c2e2c90e4b1097a837ba4a66", + "sha256:69dc348c0b6b34b855f3f340756b35f0398e18f0637fc815be567c664cba7d1b", + "sha256:70b69c68aede91eb96df81227c0a060ead75a008c3d7e4858dc165265d125c44", + "sha256:73aabf7967c4dc319644c8bcfcbfe8106a475b16af481e0c12402dce2fbac44d", + "sha256:7f08048c4315f56f02dcef350c9b149a1b7a88c46d316e16a83f1f25a71323c6", + "sha256:81eb609e0e15336ce6d6acf37a2e4a89c5a2030b76bc7a907e7010d5b4332c38", + "sha256:84b65684f53c4b6b9d9f61d7f9306e286613187bdf46f30d9a0bbadccfde2205", + "sha256:8b32df00118b8e7de99eb5ca78374eff1a3934ca42972a54283ddcb85d77164d", + "sha256:9c3b27bcc772958a4a051ae333de802423ebe0a5dd4e58e642e6e9d641cd7f3c", + "sha256:9cdc6fcd0eda471b66aca565ce214639f1ad1c5f3a71a205c492d04d139bb75b", + "sha256:b67e222177bfd51532955307a1ec9afd5e9c90ba2ff9561acae562d1e75c6ac1", + "sha256:b7679e478041ad8eae3d70e73dc7f6a2e913142a5fc35a6cabfcb7af977559c1", + "sha256:b949fd8cab330cddb101d4d5fa1b02772b887c31280f10ee4530c2090c378b13", + "sha256:bebccf76fad01fe91426023b3a41ee991a3ab539f3d3d74b7acdb75baf111224", + "sha256:c22f3e2c29cb592b65020605649f6d3cd4cf626e6cf97ce1843547e5ea4d5f68", + "sha256:c68f89a235752c2c191f17ca9a5a2496cb278b57ceb3753621f502a3f576fec5", + "sha256:c8e236eabdaf230e5c49f26c28f95bc3788ba1296765a4e1b98143741fcd779c", + "sha256:cdbcff01afda970b8d8e57633204f4501eddbe2c03932f51e7030aebddaf72d5", + "sha256:e0d47491bd496f823c7fdfdd6154b3e68b8ddf07556210bc46d39af31035f90a", + "sha256:f05cf87b6787b19c0daf9a7b89ac98708e48903120ccc04e4120528daba280fb", + "sha256:f1255ae53392e6e3a40955fa363d9522c915d3245c83029fd7a8ef500cd5f3be", + "sha256:f2bfc588ff4e779e3659b8db348f5fb7b8192d63efc4bf15ecc50a72c820a221" + ], + "index": "pypi", + "version": "==3.1.3" + }, + "billiard": { + "hashes": [ + "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", + "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b" + ], + "version": "==3.6.4.0" + }, + "binaryornot": { + "hashes": [ + "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061", + "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4" + ], + "index": "pypi", + "version": "==0.4.4" + }, + "blinker": { + "hashes": [ + "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6" + ], + "version": "==1.4" + }, + "celery": { + "hashes": [ + "sha256:a92e1d56e650781fb747032a3997d16236d037c8199eacd5217d1a72893bca45", + "sha256:d220b13a8ed57c78149acf82c006785356071844afe0b27012a4991d44026f9f" + ], + "index": "pypi", + "version": "==4.4.7" + }, + "certifi": { + "hashes": [ + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" + ], + "version": "==2020.12.5" + }, + "cffi": { + "hashes": [ + "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", + "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", + "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", + "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", + "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", + "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", + "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", + "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", + "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", + "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", + "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", + "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", + "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", + "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", + "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", + "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", + "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", + "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", + "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", + "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", + "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", + "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", + "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", + "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", + "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", + "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", + "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", + "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", + "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", + "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", + "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", + "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", + "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", + "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", + "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", + "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", + "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" + ], + "version": "==1.14.5" + }, + "chardet": { + "hashes": [ + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "version": "==4.0.0" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "version": "==7.1.2" + }, + "clickclick": { + "hashes": [ + "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c", + "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5" + ], + "version": "==20.10.2" + }, + "connexion": { + "extras": [ + "swagger-ui" + ], + "hashes": [ + "sha256:1ccfac57d4bb7adf4295ba6f5e48f5a1f66057df6a0713417766c9b5235182ee", + "sha256:5439e9659a89c4380d93a07acfbf3380d70be4130574de8881e5f0dfec7ad0e2" + ], + "index": "pypi", + "version": "==2.7.0" + }, + "flask": { + "hashes": [ + "sha256:4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060", + "sha256:8a4fdd8936eba2512e9c85df320a37e694c93945b33ef33c89946a340a238557" + ], + "version": "==1.1.2" + }, + "flask-login": { + "hashes": [ + "sha256:d25e356b14a59f52da0ab30c31c2ad285fa23a840f0f6971df7ed247c77082a7", + "sha256:f9149b63ec6b32aec44acb061ad851eb4eb065e742341147d116d69f8e35ae2b" + ], + "index": "pypi", + "version": "==0.4.0" + }, + "flask-mail": { + "hashes": [ + "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41" + ], + "index": "pypi", + "version": "==0.9.1" + }, + "flask-marshmallow": { + "hashes": [ + "sha256:4f507f883838b397638a3a36c7d36ee146b255a49db952f5d9de3f6f4522e8a8", + "sha256:69e99e3a123393894884a032ae2d11e6bdf4519a505819b66cec7eda32057741" + ], + "index": "pypi", + "version": "==0.10.1" + }, + "flask-migrate": { + "hashes": [ + "sha256:8626af845e6071ef80c70b0dc16d373f761c981f0ad61bb143a529cab649e725", + "sha256:c1601dfd46b9204233935e5d73473cd7fa959db7a4b0e894c7aa7a9e8aeebf0e" + ], + "index": "pypi", + "version": "==2.6.0" + }, + "flask-sqlalchemy": { + "hashes": [ + "sha256:0c9609b0d72871c540a7945ea559c8fdf5455192d2db67219509aed680a3d45a", + "sha256:8631bbea987bc3eb0f72b1f691d47bd37ceb795e73b59ab48586d76d75a7c605" + ], + "index": "pypi", + "version": "==2.4.0" + }, + "flask-wtf": { + "hashes": [ + "sha256:5d14d55cfd35f613d99ee7cba0fc3fbbe63ba02f544d349158c14ca15561cc36", + "sha256:d9a9e366b32dcbb98ef17228e76be15702cd2600675668bca23f63a7947fd5ac" + ], + "index": "pypi", + "version": "==0.14.2" + }, + "gevent": { + "hashes": [ + "sha256:16574e4aa902ebc7bad564e25aa9740a82620fdeb61e0bbf5cbc32e84c13cb6a", + "sha256:188c3c6da67e17ffa28f960fc80f8b7e4ba0f4efdc7519822c9d3a1784ca78ea", + "sha256:1e5af63e452cc1758924528a2ba6d3e472f5338e1534b7233cd01d3429fc1082", + "sha256:242e32cc011ad7127525ca9181aef3379ce4ad9c733aefe311ecf90248ad9a6f", + "sha256:2a9ae0a0fd956cbbc9c326b8f290dcad2b58acfb2e2732855fe1155fb110a04d", + "sha256:33741e3cd51b90483b14f73b6a3b32b779acf965aeb91d22770c0c8e0c937b73", + "sha256:3694f393ab08372bd337b9bc8eebef3ccab3c1623ef94536762a1eee68821449", + "sha256:464ec84001ba5108a9022aded4c5e69ea4d13ef11a2386d3ec37c1d08f3074c9", + "sha256:520cc2a029a9eef436e4e56b007af7859315cafa21937d43c1d5269f12f2c981", + "sha256:77b65a68c83e1c680f52dc39d5e5406763dd10a18ce08420665504b6f047962e", + "sha256:7bdfee07be5eee4f687bf90c54c2a65c909bcf2b6c4878faee51218ffa5d5d3e", + "sha256:969743debf89d6409423aaeae978437cc042247f91f5801e946a07a0a3b59148", + "sha256:96f704561a9dd9a817c67f2e279e23bfad6166cf95d63d35c501317e17f68bcf", + "sha256:9f99c3ec61daed54dc074fbcf1a86bcf795b9dfac2f6d4cdae6dfdb8a9125692", + "sha256:a130a1885603eabd8cea11b3e1c3c7333d4341b537eca7f0c4794cb5c7120db1", + "sha256:a54b9c7516c211045d7897a73a4ccdc116b3720c9ad3c591ef9592b735202a3b", + "sha256:ac98570649d9c276e39501a1d1cbf6c652b78f57a0eb1445c5ff25ff80336b63", + "sha256:afaeda9a7e8e93d0d86bf1d65affe912366294913fe43f0d107145dc32cd9545", + "sha256:b6ffc1131e017aafa70d7ec19cc24010b19daa2f11d5dc2dc191a79c3c9ea147", + "sha256:ba0c6ad94614e9af4240affbe1b4839c54da5a0a7e60806c6f7f69c1a7f5426e", + "sha256:bdb3677e77ab4ebf20c4752ac49f3b1e47445678dd69f82f9905362c68196456", + "sha256:c2c4326bb507754ef354635c05f560a217c171d80f26ca65bea81aa59b1ac179", + "sha256:cfb2878c2ecf27baea436bb9c4d8ab8c2fa7763c3916386d5602992b6a056ff3", + "sha256:e370e0a861db6f63c75e74b6ee56a40f5cdac90212ec404621445afa12bfc94b", + "sha256:e8a5d9fcf5d031f2e4c499f5f4b53262face416e22e8769078354f641255a663", + "sha256:ecff28416c99e0f73137f35849c3027cc3edde9dc13b7707825ebbf728623928", + "sha256:f0498df97a303da77e180a9368c9228b0fc94d10dd2ce79fc5ebb63fec0d2fc9", + "sha256:f91fd07b9cf642f24e58ed381e19ec33e28b8eee8726c19b026ea24fcc9ff897" + ], + "version": "==21.1.2" + }, + "greenlet": { + "hashes": [ + "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196", + "sha256:0ddd77586553e3daf439aa88b6642c5f252f7ef79a39271c25b1d4bf1b7cbb85", + "sha256:111cfd92d78f2af0bc7317452bd93a477128af6327332ebf3c2be7df99566683", + "sha256:122c63ba795fdba4fc19c744df6277d9cfd913ed53d1a286f12189a0265316dd", + "sha256:181300f826625b7fd1182205b830642926f52bd8cdb08b34574c9d5b2b1813f7", + "sha256:1a1ada42a1fd2607d232ae11a7b3195735edaa49ea787a6d9e6a53afaf6f3476", + "sha256:1bb80c71de788b36cefb0c3bb6bfab306ba75073dbde2829c858dc3ad70f867c", + "sha256:1d1d4473ecb1c1d31ce8fd8d91e4da1b1f64d425c1dc965edc4ed2a63cfa67b2", + "sha256:292e801fcb3a0b3a12d8c603c7cf340659ea27fd73c98683e75800d9fd8f704c", + "sha256:2c65320774a8cd5fdb6e117c13afa91c4707548282464a18cf80243cf976b3e6", + "sha256:4365eccd68e72564c776418c53ce3c5af402bc526fe0653722bc89efd85bf12d", + "sha256:5352c15c1d91d22902582e891f27728d8dac3bd5e0ee565b6a9f575355e6d92f", + "sha256:58ca0f078d1c135ecf1879d50711f925ee238fe773dfe44e206d7d126f5bc664", + "sha256:5d4030b04061fdf4cbc446008e238e44936d77a04b2b32f804688ad64197953c", + "sha256:5d69bbd9547d3bc49f8a545db7a0bd69f407badd2ff0f6e1a163680b5841d2b0", + "sha256:5f297cb343114b33a13755032ecf7109b07b9a0020e841d1c3cedff6602cc139", + "sha256:62afad6e5fd70f34d773ffcbb7c22657e1d46d7fd7c95a43361de979f0a45aef", + "sha256:647ba1df86d025f5a34043451d7c4a9f05f240bee06277a524daad11f997d1e7", + "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8", + "sha256:7cd5a237f241f2764324396e06298b5dee0df580cf06ef4ada0ff9bff851286c", + "sha256:875d4c60a6299f55df1c3bb870ebe6dcb7db28c165ab9ea6cdc5d5af36bb33ce", + "sha256:90b6a25841488cf2cb1c8623a53e6879573010a669455046df5f029d93db51b7", + "sha256:94620ed996a7632723a424bccb84b07e7b861ab7bb06a5aeb041c111dd723d36", + "sha256:b5f1b333015d53d4b381745f5de842f19fe59728b65f0fbb662dafbe2018c3a5", + "sha256:c5b22b31c947ad8b6964d4ed66776bcae986f73669ba50620162ba7c832a6b6a", + "sha256:c93d1a71c3fe222308939b2e516c07f35a849c5047f0197442a4d6fbcb4128ee", + "sha256:cdb90267650c1edb54459cdb51dab865f6c6594c3a47ebd441bc493360c7af70", + "sha256:cfd06e0f0cc8db2a854137bd79154b61ecd940dce96fad0cba23fe31de0b793c", + "sha256:d3789c1c394944084b5e57c192889985a9f23bd985f6d15728c745d380318128", + "sha256:da7d09ad0f24270b20f77d56934e196e982af0d0a2446120cb772be4e060e1a2", + "sha256:df3e83323268594fa9755480a442cabfe8d82b21aba815a71acf1bb6c1776218", + "sha256:df8053867c831b2643b2c489fe1d62049a98566b1646b194cc815f13e27b90df", + "sha256:e1128e022d8dce375362e063754e129750323b67454cac5600008aad9f54139e", + "sha256:e6e9fdaf6c90d02b95e6b0709aeb1aba5affbbb9ccaea5502f8638e4323206be", + "sha256:eac8803c9ad1817ce3d8d15d1bb82c2da3feda6bee1153eec5c58fa6e5d3f770", + "sha256:eb333b90036358a0e2c57373f72e7648d7207b76ef0bd00a4f7daad1f79f5203", + "sha256:ed1d1351f05e795a527abc04a0d82e9aecd3bdf9f46662c36ff47b0b00ecaf06", + "sha256:f3dc68272990849132d6698f7dc6df2ab62a88b0d36e54702a8fd16c0490e44f", + "sha256:f59eded163d9752fd49978e0bab7a1ff21b1b8d25c05f0995d140cc08ac83379", + "sha256:f5e2d36c86c7b03c94b8459c3bd2c9fe2c7dab4b258b8885617d44a22e453fb7", + "sha256:f6f65bf54215e4ebf6b01e4bb94c49180a589573df643735107056f7a910275b", + "sha256:f8450d5ef759dbe59f84f2c9f77491bb3d3c44bc1a573746daf086e70b14c243", + "sha256:f97d83049715fd9dec7911860ecf0e17b48d8725de01e45de07d8ac0bd5bc378" + ], + "markers": "python_version >= '3'", + "version": "==1.0.0" + }, + "gunicorn": { + "extras": [ + "gevent" + ], + "hashes": [ + "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", + "sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3" + ], + "index": "pypi", + "version": "==19.9" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "version": "==2.10" + }, + "inflection": { + "hashes": [ + "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417", + "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2" + ], + "version": "==0.5.1" + }, + "isodate": { + "hashes": [ + "sha256:2e364a3d5759479cdb2d37cce6b9376ea504db2ff90252a2e5b7cc89cc9ff2d8", + "sha256:aa4d33c06640f5352aca96e4b81afd8ab3b47337cc12089822d6f322ac772c81" + ], + "version": "==0.6.0" + }, + "itsdangerous": { + "hashes": [ + "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", + "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" + ], + "index": "pypi", + "version": "==1.1.0" + }, + "jinja2": { + "hashes": [ + "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", + "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" + ], + "version": "==2.11.3" + }, + "jsons": { + "hashes": [ + "sha256:7b813225eb4b13b85faa6d911f390f1823b2b94b4bf91598ff056fe21adb25e5" + ], + "index": "pypi", + "version": "==1.1.2" + }, + "jsonschema": { + "hashes": [ + "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163", + "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a" + ], + "version": "==3.2.0" + }, + "kombu": { + "hashes": [ + "sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a", + "sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74" + ], + "version": "==4.6.11" + }, + "mako": { + "hashes": [ + "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab", + "sha256:aea166356da44b9b830c8023cd9b557fa856bd8b4035d6de771ca027dfc5cc6e" + ], + "version": "==1.1.4" + }, + "markupsafe": { + "hashes": [ + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", + "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", + "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", + "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", + "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", + "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", + "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", + "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", + "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" + ], + "version": "==1.1.1" + }, + "marshmallow": { + "hashes": [ + "sha256:0dd42891a5ef288217ed6410917f3c6048f585f8692075a0052c24f9bfff9dfd", + "sha256:16e99cb7f630c0ef4d7d364ed0109ac194268dde123966076ab3dafb9ae3906b" + ], + "version": "==3.11.1" + }, + "marshmallow-sqlalchemy": { + "hashes": [ + "sha256:d662fc8103a06b59aafbc33c8ab4f1318799d66aae4f660c1e7e278bc45ef69e", + "sha256:f69e740ea8d3b678fd712263e5dd739c94482cd8840e2f6cf4314919e6547cee" + ], + "index": "pypi", + "version": "==0.17.0" + }, + "openapi-schema-validator": { + "hashes": [ + "sha256:215b516d0942f4e8e2446cf3f7d4ff2ed71d102ebddcc30526d8a3f706ab1df6", + "sha256:a4b2712020284cee880b4c55faa513fbc2f8f07f365deda6098f8ab943c9f0df", + "sha256:b65d6c2242620bfe76d4c749b61cd9657e4528895a8f4fb6f916085b508ebd24" + ], + "version": "==0.1.5" + }, + "openapi-spec-validator": { + "hashes": [ + "sha256:4083fc5aac3e9f751c2a82d4ec5cf3adad5f967d0faf31495d8b56a0b0f9705c", + "sha256:53ba3d884e98ff2062d5ada025aa590541dcd665b8f81067dc82dd61c0923759", + "sha256:e11df7c559339027bd04f2399bc82474983129a6a7a6a0421eaa95e2c844d686" + ], + "version": "==0.3.0" + }, + "packaging": { + "hashes": [ + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" + ], + "version": "==20.9" + }, + "pathvalidate": { + "hashes": [ + "sha256:6d18aac8478ac97456b7fb639dc456429863fefd0f75e77d33b3166bca7c5651", + "sha256:bf250828f40d94881df764950cffca14bb3fa83783d9f239125a6b5e23a5646e" + ], + "index": "pypi", + "version": "==0.29.0" + }, + "psycopg2-binary": { + "hashes": [ + "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c", + "sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67", + "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0", + "sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6", + "sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db", + "sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94", + "sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52", + "sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056", + "sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b", + "sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd", + "sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550", + "sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679", + "sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83", + "sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77", + "sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2", + "sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77", + "sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2", + "sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd", + "sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859", + "sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1", + "sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25", + "sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152", + "sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf", + "sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f", + "sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729", + "sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71", + "sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66", + "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4", + "sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449", + "sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da", + "sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a", + "sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c", + "sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb", + "sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4", + "sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5" + ], + "index": "pypi", + "version": "==2.8.6" + }, + "pycparser": { + "hashes": [ + "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", + "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" + ], + "version": "==2.20" + }, + "pygeodiff": { + "hashes": [ + "sha256:015554b1849f98233e4edf4bb2fafa003f6f5443d9a2447fb894f943ca7e2137", + "sha256:025ac9d8db9d144ffbf20dadc15e429e8cb526fc14277c7072cc9802fdf878a5", + "sha256:0df51544a5133e608a5541ac8b904058f1c511828f08441291ffccab96e9f132", + "sha256:4da89ba9d9836e708819343cefe383b77a9d61808fac58b4cb0b3a157c03671a", + "sha256:87614130d5762ec90cf2d55b9634d15a85d67febd352066cf4fac91f33aad19d", + "sha256:950f0eef75869dbe6dc987b6a858cdc54dcd9fab3cc300528ad529c41abcdbfc", + "sha256:9c6b10c6fc0d490ce7ecd65ff1828d775065ac67382faa739dbb0f14dec0bea6", + "sha256:b0d05552df55da1a619bf694a99ad7602b83195a0a2157434604fe8c1eced632", + "sha256:b60f3ed72779600d3a93e2709211dedc2ab76f79f200091b827cf7d785871300", + "sha256:d69085feddac198206eaa877755e601b67935a1eff2dde7f3701084c665d04bb", + "sha256:dcf550e3492a3de4652737ecf4f43b6ed418b1f7f2f4f607fbc0a2a62177777c" + ], + "index": "pypi", + "version": "==0.8.6" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "version": "==2.4.7" + }, + "pyrsistent": { + "hashes": [ + "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e" + ], + "version": "==0.17.3" + }, + "python-dateutil": { + "hashes": [ + "sha256:3acbef017340600e9ff8f2994d8f7afd6eacb295383f286466a6df3961e486f0", + "sha256:537bf2a8f8ce6f6862ad705cd68f9e405c0b5db014aa40fa29eab4335d4b1716", + "sha256:62a2f8df3d66f878373fd0072eacf4ee52194ba302e00082828e0d263b0418d2" + ], + "index": "pypi", + "version": "==2.6.0" + }, + "python-decouple": { + "hashes": [ + "sha256:2e5adb0263a4f963b58d7407c4760a2465d464ee212d733e2a2c179e54c08d8f", + "sha256:a8268466e6389a639a20deab9d880faee186eb1eb6a05e54375bdf158d691981" + ], + "index": "pypi", + "version": "==3.4" + }, + "python-dotenv": { + "hashes": [ + "sha256:122290a38ece9fe4f162dc7c95cae3357b983505830a154d3c98ef7f6c6cea77", + "sha256:4a205787bc829233de2a823aa328e44fd9996fedb954989a21f1fc67c13d7a77" + ], + "index": "pypi", + "version": "==0.9.1" + }, + "python-editor": { + "hashes": [ + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", + "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" + ], + "version": "==1.0.4" + }, + "pytz": { + "hashes": [ + "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda", + "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141" + ], + "index": "pypi", + "version": "==2019.1" + }, + "pyyaml": { + "hashes": [ + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" + ], + "version": "==5.4.1" + }, + "redis": { + "hashes": [ + "sha256:0dcfb335921b88a850d461dc255ff4708294943322bd55de6cfd68972490ca1f", + "sha256:b205cffd05ebfd0a468db74f0eedbff8df1a7bfc47521516ade4692991bb0833" + ], + "index": "pypi", + "version": "==3.4.1" + }, + "requests": { + "hashes": [ + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + ], + "version": "==2.25.1" + }, + "requests-toolbelt": { + "hashes": [ + "sha256:42c9c170abc2cacb78b8ab23ac957945c7716249206f90874651971a4acff237", + "sha256:f6a531936c6fa4c6cfce1b9c10d5c4f498d16528d2a54a22ca00011205a187b5" + ], + "index": "pypi", + "version": "==0.8.0" + }, + "result": { + "hashes": [ + "sha256:46f039a2d17e47709c13e29af359c3fa91fd5cacddba2a8109fdcb514e6ff471", + "sha256:f4563ff615b1147822d13eb363fbda202511fcbf281b3cf7acf0723ca7cb612b" + ], + "index": "pypi", + "version": "==0.5" + }, + "safe": { + "hashes": [ + "sha256:4e7315087d80fdbbc781362a656376cfa273c5c68a8abc0412e159bf541fcbda", + "sha256:a2fdac9fe8a9dcf02b438201d6ce0b7be78f85dc6492d03edfb89be2adf489de" + ], + "index": "pypi", + "version": "==0.4" + }, + "scikit-build": { + "hashes": [ + "sha256:7342017cc82dd6178e3b19377389b8a8d1f8b429d9cdb315cfb1094e34a0f526", + "sha256:e343cd0f012e4cc282132324223a15e6bae23c77f9c3e7f3b3b067a0db08d3b2" + ], + "index": "pypi", + "version": "==0.10.0" + }, + "six": { + "hashes": [ + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "version": "==1.15.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:0140f6dac2659fa6783e7029085ab0447d8eb23cf4d831fb907588d27ba158f7", + "sha256:034b42a6a59bf4ddc57e5a38a9dbac83ccd94c0b565ba91dba4ff58149706028", + "sha256:03a503ecff0cc2be3ad4dafd220eaff13721edb11c191670b7662932fb0a5c3a", + "sha256:069de3a701d33709236efe0d06f38846b738b19c63d45cc47f54590982ba7802", + "sha256:1735e06a3d5b0793d5ee2d952df8a5c63edaff6383c2210c9b5c93dc2ea4c315", + "sha256:19633df6be629200ff3c026f2837e1dd17908fb1bcea860290a5a45e6fa5148e", + "sha256:1e14fa32969badef9c309f55352e5c46f321bd29f7c600556caacdaa3eddfcf6", + "sha256:31e941d6db8b026bc63e46ef71e877913f128bd44260b90c645432626b7f9a47", + "sha256:452c4e002be727cb6f929dbd32bbc666a0921b86555b8af09709060ed3954bd3", + "sha256:45a720029756800628359192630fffdc9660ab6f27f0409bd24d9e09d75d6c18", + "sha256:4a2e7f037d3ca818d6d0490e3323fd451545f580df30d62b698da2f247015a34", + "sha256:4a7d4da2acf6d5d068fb41c48950827c49c3c68bfb46a1da45ea8fbf7ed4b471", + "sha256:4ad4044eb86fbcbdff2106e44f479fbdac703d77860b3e19988c8a8786e73061", + "sha256:4f631edf45a943738fa77612e85fc5c5d3fb637c4f5a530f7eedd1a7cd7a70a7", + "sha256:6389b10e23329dc8b5600c1a84e3da2628d0f437d8a5cd05aefd1470ec571dd1", + "sha256:6ebd58e73b7bd902688c0bb8dbabb0c36b756f02cc7b27ad5efa2f380c611f95", + "sha256:7180830ea1082b96b94884bc352b274e29b45151b6ee911bf1fd79cba2de659b", + "sha256:789be639501445d85fd4ca41d04f0f5c6cbb6deb0c6826aaa6f22774fe84ef94", + "sha256:7d89add44938ea4f52c7641d5805c9e154fed4381e874ef3221483eeb191a96d", + "sha256:842b0d4698381aac047f8ae57409c90b7e63ebabf5bc02814ddc8eaefd13499e", + "sha256:8f96d4b6a49d3f0f109365bb6303ae5d266d3f90280ca68cf8b2c46032491038", + "sha256:961b089e64c2ad29ad367487dd3ba1aa3eeba56bc82037ce91732baaa0f6ca90", + "sha256:96de1d4a2e05d4a017087cb29cd6a8ebfeecfd0e9f872880b1a589f011c1c02e", + "sha256:98214f04802a3fc740038744d8981a8f2fdca710f791ca125fc4792737d9f3a7", + "sha256:9cf94161cb55507cee147bf8abcfd3c076b353ad18743296764dd81108ea74f8", + "sha256:9fdf0713166f33e5e6ea98cf59deb305cb323131277f6880de6c509f468076f8", + "sha256:a41ab83ecfadf38a47bdfaf4e488f71579df47a711e1ab1dce30d34c7c25bd00", + "sha256:ac14fee167653ec6dee32d6aa4d501d90ae1bfbbc3eb5816940bccf227f0d617", + "sha256:b8b7d66ee8b8ac272adce0af1342a60854f0d89686e6d3318127a6a82a2f765c", + "sha256:bb1072fdf48ba870c0fe81bee8babe4ba2f096fb56bb4f3e0c2386a7626e405c", + "sha256:cd823071b97c1a6ac3af9e43b5d861126a1304033dcd18dfe354a02ec45642fe", + "sha256:d08173144aebdf30c21a331b532db16535cfa83deed12e8703fa6c67c0894ffc", + "sha256:e7d76312e904aa4ea221a92c0bc2e299ad46e4580e2d72ca1f7e6d31dce5bfab", + "sha256:f772e4428d413c0affe2a34836278fbe9df9a9c0940705860c2d3a4b50af1a66" + ], + "version": "==1.4.11" + }, + "swagger-ui-bundle": { + "hashes": [ + "sha256:f5255f786cde67a2638111f4a7d04355836743198a83c4ecbe815d9fc384b0c8", + "sha256:f5691167f2e9f73ecbe8229a89454ae5ea958f90bb0d4583ed7adaae598c4122" + ], + "version": "==0.0.8" + }, + "typish": { + "hashes": [ + "sha256:36f940e39d0f4d2bd4c524d167936024308bc52ef22ec718ce5c4e38a3ba5a95" + ], + "version": "==1.9.2" + }, + "urllib3": { + "hashes": [ + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" + ], + "version": "==1.26.4" + }, + "vine": { + "hashes": [ + "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", + "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" + ], + "version": "==1.3.0" + }, + "werkzeug": { + "hashes": [ + "sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7", + "sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4" + ], + "index": "pypi", + "version": "==0.16.0" + }, + "wheel": { + "hashes": [ + "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e", + "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e" + ], + "version": "==0.36.2" + }, + "wtforms": { + "hashes": [ + "sha256:ffdf10bd1fa565b8233380cb77a304cd36fd55c73023e91d4b803c96bc11d46f" + ], + "index": "pypi", + "version": "==2.1" + }, + "wtforms-json": { + "hashes": [ + "sha256:3d3193819a83daa14b3cb5e029fe807fb4a70172ec299abc07d9d58d51b2ecc4" + ], + "index": "pypi", + "version": "==0.3.3" + }, + "zipfly": { + "hashes": [ + "sha256:ac78c6feb76313548b8e0256df5bbb13611ab7a9983b64ef3d1d76570d11ad71" + ], + "index": "pypi", + "version": "==6.0.3" + }, + "zope.event": { + "hashes": [ + "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42", + "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330" + ], + "version": "==4.5.0" + }, + "zope.interface": { + "hashes": [ + "sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192", + "sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702", + "sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09", + "sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4", + "sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a", + "sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3", + "sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf", + "sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c", + "sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d", + "sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78", + "sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83", + "sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531", + "sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46", + "sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021", + "sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94", + "sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc", + "sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63", + "sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54", + "sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117", + "sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25", + "sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05", + "sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e", + "sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1", + "sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004", + "sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2", + "sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e", + "sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f", + "sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f", + "sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120", + "sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f", + "sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1", + "sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9", + "sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e", + "sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7", + "sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8", + "sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b", + "sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155", + "sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7", + "sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c", + "sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325", + "sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d", + "sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb", + "sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e", + "sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959", + "sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7", + "sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920", + "sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e", + "sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48", + "sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8", + "sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4", + "sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263" + ], + "version": "==5.4.0" + } + }, + "develop": { + "astroid": { + "hashes": [ + "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e", + "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975" + ], + "version": "==2.5.6" + }, + "attrs": { + "hashes": [ + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" + ], + "version": "==20.3.0" + }, + "certifi": { + "hashes": [ + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" + ], + "version": "==2020.12.5" + }, + "chardet": { + "hashes": [ + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "version": "==4.0.0" + }, + "coverage": { + "hashes": [ + "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c", + "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6", + "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45", + "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a", + "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03", + "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529", + "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a", + "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a", + "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2", + "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6", + "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759", + "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53", + "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a", + "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4", + "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff", + "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502", + "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793", + "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb", + "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905", + "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821", + "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b", + "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81", + "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0", + "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b", + "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3", + "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184", + "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701", + "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a", + "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82", + "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638", + "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5", + "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083", + "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6", + "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90", + "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465", + "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a", + "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3", + "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e", + "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066", + "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf", + "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b", + "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae", + "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669", + "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873", + "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b", + "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6", + "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb", + "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160", + "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c", + "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079", + "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d", + "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6" + ], + "version": "==5.5" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "version": "==2.10" + }, + "importlib-metadata": { + "hashes": [ + "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771", + "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d" + ], + "index": "pypi", + "version": "==3.4.0" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" + }, + "isort": { + "hashes": [ + "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", + "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" + ], + "version": "==4.3.21" + }, + "lazy-object-proxy": { + "hashes": [ + "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653", + "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61", + "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2", + "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837", + "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3", + "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43", + "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726", + "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3", + "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587", + "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8", + "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a", + "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd", + "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f", + "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad", + "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4", + "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b", + "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf", + "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981", + "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741", + "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e", + "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93", + "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b" + ], + "version": "==1.6.0" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "packaging": { + "hashes": [ + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" + ], + "version": "==20.9" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" + ], + "version": "==1.10.0" + }, + "pylint": { + "hashes": [ + "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09", + "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" + ], + "index": "pypi", + "version": "==2.3.1" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "version": "==2.4.7" + }, + "pytest": { + "hashes": [ + "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9", + "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839" + ], + "index": "pypi", + "version": "==6.2.2" + }, + "pytest-base-url": { + "hashes": [ + "sha256:7f1f32e08c2ee751e59e7f5880235b46e83496adc5cba5a01ca218c6fe81333d", + "sha256:8b6523a1a3af73c317bdae97b722dfb55a7336733d1ad411eb4a4931347ba77a" + ], + "version": "==1.4.2" + }, + "pytest-cov": { + "hashes": [ + "sha256:0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", + "sha256:230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f" + ], + "index": "pypi", + "version": "==2.6.1" + }, + "pytest-dotenv": { + "hashes": [ + "sha256:12ff70de238cdc2c8d838ef92b720325ceb6ae2c9967be5f4e606a36de48345c", + "sha256:a3a0adf9e17cf7c9e38ec597a74cc902ac76baa1cc5a9bd7581a3711930ad057" + ], + "index": "pypi", + "version": "==0.4.0" + }, + "pytest-html": { + "hashes": [ + "sha256:3ee1cf319c913d19fe53aeb0bc400e7b0bc2dbeb477553733db1dad12eb75ee3", + "sha256:b7f82f123936a3f4d2950bc993c2c1ca09ce262c9ae12f9ac763a2401380b455" + ], + "version": "==3.1.1" + }, + "pytest-metadata": { + "hashes": [ + "sha256:576055b8336dd4a9006dd2a47615f76f2f8c30ab12b1b1c039d99e834583523f", + "sha256:71b506d49d34e539cc3cfdb7ce2c5f072bea5c953320002c95968e0238f8ecf1" + ], + "version": "==1.11.0" + }, + "pytest-selenium": { + "hashes": [ + "sha256:a0008e6dce7c68501369c1c543420f5906ffada493d4ff0c5d9d5ccdf4022203", + "sha256:fd632e0b657be6360f6319445eb0f475872d488b67634f791561851d55e390b1" + ], + "index": "pypi", + "version": "==2.0.1" + }, + "pytest-variables": { + "hashes": [ + "sha256:ccf4afcd70de1f5f18b4463758a19f24647a9def1805f675e80db851c9e00ac0", + "sha256:f79851e4c92a94c93d3f1d02377b5ac97cc8800392e87d108d2cbfda774ecc2a" + ], + "version": "==1.9.0" + }, + "python-dotenv": { + "hashes": [ + "sha256:122290a38ece9fe4f162dc7c95cae3357b983505830a154d3c98ef7f6c6cea77", + "sha256:4a205787bc829233de2a823aa328e44fd9996fedb954989a21f1fc67c13d7a77" + ], + "index": "pypi", + "version": "==0.9.1" + }, + "requests": { + "hashes": [ + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + ], + "version": "==2.25.1" + }, + "responses": { + "hashes": [ + "sha256:46d4e546a19fc6106bc7e804edd4551ef04690405e41e7e750ebc295d042623b", + "sha256:93b1e0f2f960c0f3304ca4436856241d64c33683ef441431b9caf1d05d9d9e23" + ], + "index": "pypi", + "version": "==0.10.7" + }, + "selenium": { + "hashes": [ + "sha256:2d7131d7bc5a5b99a2d9b04aaf2612c411b03b8ca1b1ee8d3de5845a9be2cb3c", + "sha256:deaf32b60ad91a4611b98d8002757f29e6f2c2d5fcaf202e1c9ad06d6772300d" + ], + "version": "==3.141.0" + }, + "six": { + "hashes": [ + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "version": "==1.15.0" + }, + "tenacity": { + "hashes": [ + "sha256:baed357d9f35ec64264d8a4bbf004c35058fad8795c5b0d8a7dc77ecdcbb8f39", + "sha256:e14d191fb0a309b563904bbc336582efe2037de437e543b38da749769b544d7f" + ], + "version": "==6.3.1" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "version": "==0.10.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:2ed632b30bb54fc3941c382decfd0ee4148f5c591651c9272473fea2c6397d95", + "sha256:b1edbbf0652660e32ae780ac9433f4231e7339c7f9a8057d0f042fcbcea49b87", + "sha256:d8179012ec2c620d3791ca6fe2bf7979d979acdbef1fca0bc56b37411db682ed" + ], + "index": "pypi", + "version": "==3.7.4" + }, + "urllib3": { + "hashes": [ + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" + ], + "version": "==1.26.4" + }, + "wrapt": { + "hashes": [ + "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7" + ], + "version": "==1.12.1" + }, + "zipp": { + "hashes": [ + "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", + "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" + ], + "version": "==3.4.1" + } + } +} diff --git a/server/commands.py b/server/commands.py new file mode 100644 index 00000000..1b14babd --- /dev/null +++ b/server/commands.py @@ -0,0 +1,187 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +import json +import shutil +import click +from datetime import datetime +from sqlalchemy.schema import MetaData +from marshmallow import ValidationError +from wtforms.validators import ValidationError + +from src import create_app, db +from src.forms import namespace_validation +from src.auth.models import User, UserProfile +from src.models.db_models import Project +from src.util import is_name_allowed, mergin_secure_filename + +app = create_app() + + +@app.cli.command() +def init_db(): + """Re-creates app's database""" + print('Database initialization ...') + db.drop_all(bind=None) + db.create_all(bind=None) + print('Done. Tables created.') + db.session.commit() + print('Done.') + + +class JsonEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, datetime): + return o.isoformat() + + +@app.cli.command() +def dump_db(): + """Dumps data from database in JSON format (to stdout)""" + metadata = MetaData() + metadata.reflect(bind=db.engine) + + result = {} + for table in metadata.sorted_tables: + result[table.name] = [dict(row) for row in db.engine.execute(table.select())] + + print(json.dumps(result, cls=JsonEncoder, indent=2)) + + +@app.cli.command() +@click.argument('input-file') +def load_db(input_file): + """Load data from JSON file into the database""" + with open(input_file) as json_file: + data = json.load(json_file) + + connection = db.engine.connect() + trans = connection.begin() + metadata = MetaData() + metadata.reflect(bind=db.engine) + + try: + for table in metadata.sorted_tables: + items = data.get(table.name, []) + for item in items: + connection.execute(table.insert(), **item) + trans.commit() + except: + trans.rollback() + raise + + +@app.cli.command() +def check_location_file(): + """ Return log for checking files for every version """ + # Checking every files + # check every project + for project in Project.query.all(): + + # check every version of project + for version in project.versions.all(): + + # check every file of version + for _file in version.files: + + location = os.path.join(project.storage.project_dir, _file['location']) + if os.path.join(project.storage.project_dir, version.name) not in location: + continue + + location_after_sanitized = os.path.join( + project.storage.project_dir, version.name, mergin_secure_filename(_file['path'])) + if location != location_after_sanitized: + print( + f'- files not same between original and sanitized `{location}` - `{location_after_sanitized}`') + if os.path.exists(location): + print(f'- but location found `{location}`') + else: + if not os.path.exists(location): + print(f'- location not found : `{location}`') + + +@app.cli.command() +def check_broken_username(): + """ Return log for broken username """ + + # Checking every user + class UserField(object): + def __init__(self, data): + self.data = data + + for user in User.query.all(): + try: + namespace_validation( + None, UserField(user.username)) + except ValidationError as e: + print(f'{user.username} - {e}') + + +@app.cli.command() +def check_broken_project_name(): + """ Return log for broken project name""" + for project in Project.query.all(): + if not is_name_allowed(project.name): + print(f'{project.name}') + + +@app.cli.command() +def find_projects_with_missing_dirs(): + """ Return broken projects with missing associated projects directories on file system """ + print("Missing project directories: ") + for project in Project.query.all(): + if not os.path.exists(project.storage.project_dir): + print(f"{project.namespace}/{project.name}: {project.storage.project_dir}") + + +@app.cli.command() +@click.argument('username') +@click.argument('password') +@click.option('--is-admin', is_flag=True) +@click.option('--email', required=True) +def add_user(username, password, is_admin, email): # pylint: disable=W0612 + """Create user account""" + create_user(username, password, is_admin, email) + + +def create_user(username, password, is_admin, email): + user = User.query.filter_by(username=username).first() + if user: + print("ERROR: User {} already exists!\n".format(user.username)) + + user = User(username=username, passwd=password, is_admin=is_admin, email=email) + user.profile = UserProfile() + user.active = True + db.session.add(user) + db.session.commit() + + +@app.cli.command() +@click.argument('project-name') +@click.option('--version', required=True) +@click.option('--directory', type=click.Path(), required=True) +def download_project(project_name, version, directory): # pylint: disable=W0612 + """ Download files for project at particular version """ + ns, name = project_name.split('/') + project = Project.query.filter_by(namespace=ns, name=name).first() + if not project: + print("ERROR: Project does not exist") + return + pv = next((v for v in project.versions if v.name == version), None) + if not pv: + print("ERROR:Project version does not exist") + return + if os.path.exists(directory): + print(f"ERROR: Destination directory {directory} already exist") + return + + os.mkdir(directory) + files = pv.files + for f in files: + project.storage.restore_versioned_file(f['path'], version) + f_dir = os.path.dirname(f["path"]) + if f_dir: + os.makedirs(os.path.join(directory, f_dir), exist_ok=True) + shutil.copy(os.path.join(project.storage.project_dir, f["location"]), os.path.join(directory, f["path"])) + print("Project downloaded successfully") diff --git a/server/config.py b/server/config.py new file mode 100644 index 00000000..25f8d355 --- /dev/null +++ b/server/config.py @@ -0,0 +1,73 @@ +""" + + Common gunicorn configuration + + Gunicorn uses the following order of precidence for configuration + values: + + 1. Command-line (highest) + 2. Environment variable + 3. File referenced by --config (lowest) + + Put configuration common across all deployment environments here and + add environment-specific items to the GUNICORN_CMD_ARGS env var. + +======= + WARNING: You may be tempted to *pythonise* the command-line arguments + - instead, make sure you look at this page: the headings are the + names the variables should have: + + https://docs.gunicorn.org/en/stable/settings.html#settings +""" + +bind = '0.0.0.0:5000' + +worker_class = 'gevent' + +workers = 2 + +worker_connections = 1000 + +backlog = 480 + +keepalive = 30 + +accesslog = '-' + +errorlog = '-' + +access_log_format = '[ACCESS] %({x-forwarded-for}i)s %(m)s %(U)s %(q)s %(H)s %(s)s %(B)s %(f)s "%(a)s" %(D)s %(p)s' + +logconfig = 'gunicorn-logging.conf' + +max_requests = 20000 + +max_requests_jitter = 5000 + +""" + The following server hook is executed when a worker times-out. It + allows us to print a traceback which may well indicate where in the + code we stopped telling the master process we were still alive. + + This function was lifted directly from here: + https://github.com/benoitc/gunicorn/blob/master/examples/example_config.py + +""" + + +def worker_abort(worker): + worker.log.info("worker received SIGABRT signal") + + ## get traceback info + import threading, sys, traceback + id2name = {th.ident: th.name for th in threading.enumerate()} + code = [] + for threadId, stack in sys._current_frames().items(): + code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""), + threadId)) + for filename, lineno, name, line in traceback.extract_stack(stack): + code.append('File: "%s", line %d, in %s' % (filename, + lineno, name)) + if line: + code.append(" %s" % (line.strip())) + worker.log.info("\n".join(code)) \ No newline at end of file diff --git a/server/gunicorn-logging.conf b/server/gunicorn-logging.conf new file mode 100644 index 00000000..ebb44b0a --- /dev/null +++ b/server/gunicorn-logging.conf @@ -0,0 +1,42 @@ +[loggers] +keys=root, gunicorn.error, gunicorn.access + +[handlers] +keys=console_stdout, console_stdout, console_stderr + +[formatters] +keys=errors, access + +[logger_root] +level=INFO +handlers=console_stderr + +[logger_gunicorn.error] +level=INFO +handlers=console_stderr +propagate=0 +qualname=gunicorn.error + +[logger_gunicorn.access] +level=INFO +handlers=console_stdout +propagate=0 +qualname=gunicorn.access + +[handler_console_stdout] +class=StreamHandler +formatter=access +args=(sys.stdout, ) + +[handler_console_stderr] +class=StreamHandler +formatter=errors +args=(sys.stderr, ) + +[formatter_errors] +format=[%(levelname)s] [%(process)d] %(message)s +class=mergin.OneLineExceptionFormatter + +[formatter_access] +format=%(message)s +class=mergin.OneLineExceptionFormatter diff --git a/server/mergin.py b/server/mergin.py new file mode 100644 index 00000000..47dfdd5a --- /dev/null +++ b/server/mergin.py @@ -0,0 +1,36 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +# we need to monkey patch before app is loaded resolve this: +# "gunicorn/workers/ggevent.py:65: MonkeyPatchWarning: Monkey-patching ssl after ssl has already been imported may lead +# to errors, including RecursionError on Python 3.6. It may also silently lead to incorrect behaviour on Python 3.7. +# Please monkey-patch earlier. See https://github.com/gevent/gevent/issues/1016. +# Modules that had direct imports (NOT patched): ['urllib3.util, 'urllib3.util.ssl']" +# which comes from using requests (its deps) lib in webhooks + +import os +if not os.getenv("NO_MONKEY_PATCH", False): + import gevent.monkey + gevent.monkey.patch_all(subprocess=True) + +import logging +from src import create_app + +application = create_app() + + +class OneLineExceptionFormatter(logging.Formatter): + """ + Reformat Exceptions with traceback to be single line. + Please note that for custom flask/logging you need to exc_info=True for non-exception levels + + :Example: + >>> application.logger.error("Crazy long \\n exception msg", exc_info=True) + [2019-11-20 16:49:09 +0100] [17950] [ERROR] Crazy long || exception msg ||Traceback (most recent call last):|| + File "/__init__.py", line 163, in ping|| x = 1 / 0||ZeroDivisionError: division by zero + """ + def format(self, record): + msg = super().format(record) # format message according to formatter class passed + if record.exc_text: + msg = msg.replace('\n', '||') + return msg diff --git a/server/pytest.ini b/server/pytest.ini new file mode 100644 index 00000000..2b8f6c0c --- /dev/null +++ b/server/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +env_files = + .env + .test.env \ No newline at end of file diff --git a/server/src/.env b/server/src/.env new file mode 100644 index 00000000..1c0f22bf --- /dev/null +++ b/server/src/.env @@ -0,0 +1,7 @@ +# only for dev +SECRET_KEY='top-secret' +SECURITY_PASSWORD_SALT='top-secret' +MAIL_DEFAULT_SENDER='' +MAIL_USERNAME='' +MAIL_PASSWORD='' +FLASK_DEBUG=1 diff --git a/server/src/__init__.py b/server/src/__init__.py new file mode 100644 index 00000000..8cfba0e9 --- /dev/null +++ b/server/src/__init__.py @@ -0,0 +1,466 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import logging +import os +from operator import and_ + +import connexion +from sqlalchemy.schema import MetaData +from flask_sqlalchemy import SQLAlchemy +from flask_marshmallow import Marshmallow +from flask import json, jsonify, render_template, send_from_directory, request, abort, url_for, current_app +from flask_login import current_user +from flask_wtf.csrf import generate_csrf +from flask_migrate import Migrate +from flask_mail import Mail +import wtforms_json +from sqlalchemy import desc, asc, or_ + +from . import encoder +from .forms import SendEmailForm, AccessPermissionForm +from .mergin_utils import get_blacklisted_dirs, get_blacklisted_files +from .webhooks import WebhookManager, Webhook + + +class PostgresAlchemy(SQLAlchemy): + def apply_driver_hacks(self, app, info, options): + options.update({ + 'json_serializer': json.dumps + }) + super(PostgresAlchemy, self).apply_driver_hacks(app, info, options) + + +convention = { + "ix": "ix_%(column_0_label)s", + "uq": "uq_%(table_name)s_%(column_0_name)s", + "ck": "ck_%(table_name)s_%(constraint_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s" +} + +wtforms_json.init() +metadata = MetaData(naming_convention=convention) +db = PostgresAlchemy(metadata=metadata) +ma = Marshmallow() +mail = Mail() +wm = WebhookManager() + +# signals +SIG_NEW_PROJECT = 'new-project' +SIG_NEW_USER = 'new-user' +SIG_DELETED_USER = 'deleted-user' +SIG_PROJECT_TRANSFERED = 'project-transferred' +SIG_NEW_ORGANISATION = 'new-organisation' +SIG_DELETED_ORGANISATION = 'deleted-organisation' + + +class SlackWebhook(Webhook): + """ Class for sending notifications to Slack """ + + def __init__(self): + slack_url = os.environ.get('SLACK_HOOK_URL', '') + super().__init__('Slack', slack_url) + + def format_data(self, data): + # check this for reference https://api.slack.com/messaging/composing/formatting#basics + return {'text': data} + + +this_dir = os.path.dirname(os.path.realpath(__file__)) + +class InitDBError(Exception): + pass + + +def create_app(): + from .permissions import require_project, ProjectPermissions + from .models.db_models import Project, ProjectAccess, ProjectVersion, Namespace, ProjectTransfer, Upload, Account, RemovedProject + from .models.schemas import ProjectSchema, ProjectListSchema, RemovedProjectSchema + from .config import Configuration + from .auth import auth_required, init_app + from .auth.models import User, UserProfile + from .auth.schemas import UserSchema + from .controllers import project_controller + from .celery import celery, send_email_async + from .organisation import init_app + from .organisation.models import Organisation, OrganisationInvitation + from .db_events import register_events + from .storages.disk import move_to_tmp + + app = connexion.FlaskApp(__name__, specification_dir=os.path.join(this_dir, os.pardir)) + app.app.json_encoder = encoder.JSONEncoder + + api_options = {"swagger_ui": Configuration} + app.add_api('swagger.yaml', arguments={'title': 'Mergin'}, options=api_options) + + app.app.config.from_object(Configuration) + + db.init_app(app.app) + ma.init_app(app.app) + auth.init_app(app.app) + Migrate(app.app, db) + mail.init_app(app.app) + organisation.init_app(app.app) + + slack = SlackWebhook() + # register and connect some basic signals + signals = [SIG_NEW_USER, SIG_DELETED_USER, SIG_NEW_PROJECT] + for signal in signals: + wm.register_signal(signal) + wm.connect_handler(signal, slack) + + # Adjust CSRF policy for API + csrf = app.app.extensions['csrf'] + + @app.app.before_request + def check_maintenance(): + allowed_endpoinds = ["/project/by_names", "/auth/login"] + if request.method in ['POST', 'PUT', 'PATCH', 'DELETE'] and os.path.isfile(current_app.config['MAINTENANCE_FILE']) and all(path not in request.path for path in allowed_endpoinds): + abort(503, "Service unavailable due to maintenance, please try later") + + def custom_protect(): + if request.path.startswith('/v1/') and 'session' not in request.cookies: + # Disable csrf for non-web clients + return + return csrf._protect() + + csrf._protect = csrf.protect + csrf.protect = custom_protect + + # Cannot read csrf token from data (can be large stream)! + # Read csrf token only from headers for API endpoints + _get_csrf_token = csrf._get_csrf_token + + def get_csrf_token(): + if request.path.startswith('/v1/'): + for header_name in app.app.config['WTF_CSRF_HEADERS']: + csrf_token = request.headers.get(header_name) + if csrf_token: + return csrf_token + return _get_csrf_token() + + csrf._get_csrf_token = get_csrf_token + + def get_startup_data(): + from .organisation import find_organisations_by_username, OrganisationSchema + + data = { + "version": app.app.config['VERSION'] + } + if current_user.is_authenticated and current_user.active: + schema = UserSchema() + data["user"] = schema.dump(current_user) + organisations = find_organisations_by_username(current_user.username) + data['organisations'] = OrganisationSchema(many=True, context={"user": current_user}).dump(organisations) + projects_count = Project.query.filter(Project.creator_id == current_user.id).filter_by(namespace=current_user.username).count() + data["user"]["has_project"] = True if projects_count > 0 else False + if app.app.config["USER_SELF_REGISTRATION"]: + data["registration"] = url_for('auth.self_register_user') + return data + + # update celery config with flask app config + celery.conf.update(app.app.config) + + @app.route("/assets/") + def send_asset(filename): # pylint: disable=W0612 + return send_from_directory(app.app.config["PUBLIC_DIR"], filename) # pragma: no cover + + def web_app(path=None): # pylint: disable=W0613,W0612 # pragma: no cover + """ Default view function to render vue application """ + data = get_startup_data() + data['csrf'] = generate_csrf() + return render_template("app.html", data=data) + + # register frontend routes as flask default view endpoint + rules = [ + '/', '/signup', '/login', '/login/', '/users', '/users/', '/projects', + '/projects/', '/organisations', '/organisations/', '/profile', '/dashboard' + ] + for rule in rules: + app.add_url_rule(rule, 'web_app', web_app, methods=["GET"]) + + @app.route('/admin', methods=['GET']) + @app.route('/admin/', methods=['GET']) + @auth_required(permissions=['admin']) + def admin_web_app(path=None): # pylint: disable=W0613,W0612 # pragma: no cover + data = get_startup_data() + data['csrf'] = generate_csrf() + return render_template("app.html", data=data) + + @app.route('/ping', methods=['GET']) + def ping(): # pylint: disable=W0612 + """ healthcheck and basic service info endpoint """ + supported_endpoints = { + "project": { + "GET": [ + "/project", "/project/{namespace}/{project_name}", + "/project/version/{namespace}/{project_name}" + ], + "POST": [ + "/project/{namespace}", + "/project/clone/{namespace}/{project_name}" + ], + "DELETE": [ + "/project/{namespace}/{project_name}" + ], + "PUT": [ + "/project/{namespace}/{project_name}" + ] + }, + "data_sync": { + "GET": [ + "/project/download/{namespace}/{project_name}", + "/project/raw/{namespace}/{project_name}", + "/resource/history/{namespace}/{project_name}/{file}" + ], + "POST": [ + "/project/push/cancel/{transaction_id}", + "/project/push/finish/{transaction_id}", + "/project/push/{namespace}/{project_name}", + "/project/push/chunk/{transaction_id}/{chunk_id}" + ] + }, + "user": { + "GET": [ + "/user/{username}" + ], + "POST": [ + "/auth/login", + "/auth/register" + ] + } + } + status = json.dumps({ + "service": "Mergin", + "status": "online", + "base_url": "v1", + "endpoints": supported_endpoints, + "version": app.app.config['VERSION'], + "blacklist_dirs": get_blacklisted_dirs(app.app.config['BLACKLIST']), + "blacklist_files": get_blacklisted_files(app.app.config['BLACKLIST']), + "maintenance": os.path.isfile(app.app.config['MAINTENANCE_FILE']), + "subscriptions_enabled": app.app.config["MERGIN_SUBSCRIPTIONS"] + }) + return status, 200 + + # reading raw input stream not supported in connexion so far + # https://github.com/zalando/connexion/issues/592 + # and as workaround we use custom Flask endpoint in create_app function + @app.route('/v1/project/push/chunk//', methods=['POST']) + @auth_required + def chunk_upload(transaction_id, chunk_id): + return project_controller.chunk_upload(transaction_id, chunk_id) + + @app.route('/app/project/access_request//', methods=['POST']) + @auth_required + def create_project_access_request(namespace, project_name): # noqa: E501 + from src.models.db_models import AccessRequest + if not current_user.active: + return "You are not active anymore", 409 + + project = Project.query.filter(Project.name == project_name, Project.namespace == namespace).first_or_404() + if current_user.id in project.access.readers: + return "You already have access to project", 409 + + access_request = AccessRequest.query.filter_by(namespace=namespace, project_id=project.id, user_id=current_user.id).first() + if access_request: + return "Project access request already exists", 409 + + access_request = AccessRequest(project, current_user.id) + db.session.add(access_request) + db.session.commit() + # notify project owners + owners = User.query.join(UserProfile)\ + .filter(User.verified_email, User.id.in_(project.access.owners))\ + .filter(UserProfile.receive_notifications)\ + .all() + for owner in owners: + email_data = { + "subject": "Project access requested", + "html": render_template( + "email/project_access_request.html", + expire=access_request.expire, + link=f"{request.url_root.rstrip('/')}/projects/{project.namespace}/{project.name}/settings", + user=current_user.username, username=owner.username, + project_name=f"{project.namespace}/{project.name}" + ), + "recipients": [owner.email], + "sender": app.app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + return "", 200 + + @app.route('/app/project/access_request/', methods=['DELETE']) + @auth_required + def delete_project_access_request(request_id): # noqa: E501 + from .models.db_models import AccessRequest + access_request = AccessRequest.query.get_or_404(request_id) + if current_user.id in access_request.project.access.owners \ + or current_user.id == access_request.project.creator \ + or current_user.id == access_request.user_id: + AccessRequest.query.filter(AccessRequest.id == request_id).delete() + db.session.commit() + return "", 200 + else: + return "You don't have permissions to remove project access request", 403 + + @app.route('/app/project/accept/request/', methods=['POST']) + @auth_required + def accept_project_access_request(request_id): + from .models.db_models import AccessRequest + + form = AccessPermissionForm() + if form.validate(): + permission = form.permissions.data + access_request = AccessRequest.query.get_or_404(request_id) + if current_user.id in access_request.project.access.owners or current_user.id == access_request.project.creator: + access_request.accept(permission) + return "", 200 + else: + return "You don't have permissions to accept project access request", 403 + else: + return jsonify(form.errors), 400 + + @app.route('/app/project/access_requests', methods=['GET']) + @auth_required + def get_project_access_requests(): + from .models.db_models import AccessRequest + from .models.schemas import ProjectAccessRequestSchema + + access_requests = AccessRequest.query.filter((AccessRequest.user_id == current_user.id) | (AccessRequest.namespace == current_user.username)).all() + return jsonify(ProjectAccessRequestSchema(many=True).dump(access_requests)), 200 + + @app.route('/app/removed-project', methods=['GET']) + @auth_required(permissions=['admin']) + def paginate_removed_projects(): # noqa: E501 + page = int(request.args.get('page', 1)) + per_page = int(request.args.get('per_page', 25)) + order_by = request.args.get('order_by', None) + descending = str(request.args.get('descending', 'false')) == 'true' + + projects = RemovedProject.query + if descending and order_by: + projects = projects.order_by(desc(RemovedProject.__table__.c[order_by])) + elif not descending and order_by: + projects = projects.order_by(asc(RemovedProject.__table__.c[order_by])) + result = projects.paginate(page, per_page).items + total = projects.paginate(page, per_page).total + data = RemovedProjectSchema(many=True).dump(result) + data = {'projects': data, 'count': total} + return data, 200 + + @app.route('/app/removed-project/restore/', methods=['POST']) + @auth_required(permissions=['admin']) + def restore_project(id): # noqa: E501 + rp = RemovedProject.query.get_or_404(id) + project_name = f"{rp.namespace}/{rp.name}" + project = Project.query.filter(or_(and_(Project.name == rp.name, Project.namespace == rp.namespace), Project.id == rp.properties["id"] )).first() + if project: + abort(409, f"Failed to restore: project {project_name} already exists") + + creator = User.query.get(rp.properties['creator_id']) + if not (creator and creator.active): + abort(400, f"Failed to restore: creator of project {project_name} is not available") + + # create new project, restore metadata and recreate project versions + p = Project(rp.name, rp.properties["storage_params"], creator, rp.namespace) + for attr in ("id", "created", "updated", "files", "tags", "disk_usage", "latest_version"): + setattr(p, attr, rp.properties.get(attr)) + p.access = ProjectAccess(p, public=False) + db.session.add(p) + for version in rp.properties["versions"]: + pv = ProjectVersion(p, version["name"], version["author"], version["changes"], + version["files"], version["ip_address"], version["user_agent"]) + pv.created = version["created"] + pv.ip_geolocation_country = version["ip_geolocation_country"] + db.session.add(pv) + db.session.delete(rp) + db.session.commit() + return "", 201 + + @app.route('/app/removed-project/', methods=['DELETE']) + @auth_required(permissions=['admin']) + def retire_removed_project(id): # noqa: E501 + rp = RemovedProject.query.get_or_404(id) + db.session.add(rp) + db.session.delete(rp) + db.session.commit() + rp_dir = os.path.abspath(os.path.join(current_app.config['LOCAL_PROJECTS'], rp.properties["storage_params"]["location"])) + if os.path.exists(rp_dir): + move_to_tmp(rp_dir) + return "", 204 + + @app.route('/app/templates', methods=['GET']) + @auth_required + def template_projects(): # pylint: disable=W0612 + projects = Project.query.filter(Project.creator.has(username='TEMPLATES')).all() + project_schema = ProjectListSchema(many=True) + return jsonify(project_schema.dump(projects)), 200 + + @app.route('/app/email_notification', methods=['POST']) + @auth_required(permissions=['admin']) + def send_email_notification(): + """ + Send email composed in web UI to all selected users as BCC. + """ + form = SendEmailForm() + if form.validate(): + users = User.query.join( + UserProfile).filter(User.verified_email, + User.username.in_(form.users.data)).filter( + UserProfile.receive_notifications).all() + if not users: + return jsonify({"success": True}) + + email_data = { + 'subject': form.subject.data, + 'html': form.message.data, + 'recipients': [app.app.config['MAIL_DEFAULT_SENDER']], + 'bcc': [user.email for user in users], + 'sender': app.app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + return jsonify({"success": True}) + return jsonify(form.errors), 404 + + if app.app.config['DEBUG']: # pragma: no cover + @app.route("/dev/init") # pylint: disable=W0613,W0612 + def init(): # pylint: disable=W0612 + response = jsonify(get_startup_data()) + response.headers.set('X-CSRF-Token', generate_csrf()) + return response + + # Enable SQL debugging + # logging.basicConfig() + # logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) + + register_events() + application = app.app + + # REGISTER BLUEPRINTS + from .controllers.account_controller import account + application.register_blueprint(account, url_prefix='/app') + + @application.after_request + def log_bad_request(response): + """ Log bad requests for easier debugging """ + if response.status_code == 400: + if response.json.get("detail"): + # default response from connexion (check against swagger.yaml) + logging.warning(f'HTTP 400: {response.json["detail"]}') + else: + # either WTF form validation error or custom validation with abort(400) + logging.warning(f'HTTP 400: {response.data}') + elif response.status_code == 409: + # request which would result in conflict, e.g. creating the same project again + logging.warning(f'HTTP 409: {response.data}') + elif response.status_code == 422: + # request was valid but still could not be processed, e.g. geodiff error + logging.error(f'HTTP 422: {response.data}', exc_info=True) + else: + # ignore other errors + pass + + return response + + return application diff --git a/server/src/auth/__init__.py b/server/src/auth/__init__.py new file mode 100644 index 00000000..10c68ed8 --- /dev/null +++ b/server/src/auth/__init__.py @@ -0,0 +1,404 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import functools +import os +from datetime import datetime, timedelta + +from flask import request, current_app, jsonify, url_for, render_template, redirect, abort, Blueprint +from flask_login import LoginManager, login_user, logout_user, current_user +from itsdangerous import URLSafeTimedSerializer, BadSignature, BadTimeSignature +from flask_mail import Mail +from flask_wtf.csrf import CSRFProtect +from sqlalchemy import func + +from .models import User, UserProfile, LoginHistory +from .schemas import UserSchema, UserSearchSchema, AccountSearchSchema +from .forms import (LoginForm, RegisterUserForm, UserPasswordForm, ResetPasswordForm, + UserRegistrationForm, UserForm, UserProfileDataForm, UserChangePasswordForm) +from .bearer import decode_token +from .. import db, wm, SIG_NEW_USER, SIG_DELETED_USER + +_permissions = {} +def register_permission(name, fn): + _permissions[name] = fn + +register_permission('admin', lambda user: user.is_admin) + + +def auth_required(f=None, permissions=None): + if f is None: + if permissions: + permissions_fn = [] + for name in permissions: + if name not in _permissions: + raise KeyError('Unknown permission: {}'.format(name)) + permissions_fn.append(_permissions[name]) + return functools.partial(auth_required, permissions=permissions_fn) + + @functools.wraps(f) + def wrapped_func(*args, **kwargs): + if not current_user or not current_user.is_authenticated: + return 'Authentication information is missing or invalid.', 401 + if permissions: + for check_permission in permissions: + if not check_permission(current_user): + return 'Permission denied.', 403 + return f(*args, **kwargs) + return wrapped_func + + +def authenticate(login, password): + if '@' in login: + query = {"email": login} + else: + query = {"username": login} + user = User.query.filter_by(**query).one_or_none() + if user and user.check_password(password): + return user + + +def generate_confirmation_token(app, email): + serializer = URLSafeTimedSerializer(app.config['SECRET_KEY']) + return serializer.dumps(email, salt=app.config['SECURITY_PASSWORD_SALT']) + + +def send_confirmation_email(app, user, url, template, header): + from ..celery import send_email_async + token = generate_confirmation_token(app, user.email) + confirm_url = url_for(url, token=token, _external=True) + html = render_template(template, subject=header, confirm_url=confirm_url, user=user) + email_data = { + 'subject': header, + 'html': html, + 'recipients': [user.email], + 'sender': app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + +def do_register_user(username, email, password): + user = User(username.strip(), email.strip(), password, False) + user.profile = UserProfile() + db.session.add(user) + db.session.commit() + wm.emit_signal(SIG_NEW_USER, request.path, msg=f'New user *{username}* has been registered') + + +def init_app(app): + csrf = CSRFProtect(app) # pylint: disable=W0612 + login_manager = LoginManager(app) + mail = Mail(app) + app.mail = mail + auth = Blueprint('auth', __name__, template_folder='templates') + + @login_manager.user_loader + def load_user(user_id): # pylint: disable=W0613,W0612 + return User.query.get(user_id) + + @login_manager.header_loader + def load_user_from_header(header_val): # pylint: disable=W0613,W0612 + if header_val.startswith('Bearer'): + header_val = header_val.replace('Bearer ', '', 1) + try: + data = decode_token(app.config['SECRET_KEY'], header_val, app.config['BEARER_TOKEN_EXPIRATION']) + user = User.query.filter_by(id=data['user_id'], username=data['username'], email=data['email']).one_or_none() + if user and user.active: + return user + except (BadSignature, BadTimeSignature, KeyError): + pass + + @auth.route('/login', methods=['POST']) + def login(): # pylint: disable=W0613,W0612 + form = LoginForm() + if form.validate(): + user = authenticate(form.login.data, form.password.data) + if user and user.active: + login_user(user) + if not os.path.isfile(current_app.config['MAINTENANCE_FILE']): + LoginHistory.add_record(user.username, request) + schema = UserSchema() + return jsonify(schema.dump(user)) + elif not user: + abort(401, 'Invalid username or password') + elif not user.active: + abort(401, 'Account is not active, please contact administrators') + return jsonify(form.errors), 401 + + @auth.route('/change_password', methods=['POST']) + @auth_required + def change_password(): # pylint: disable=W0613,W0612 + form = UserChangePasswordForm() + if form.validate_on_submit(): + if not current_user.check_password(form.old_password.data): + form.errors['old_password'] = ['The old password is incorrect'] + return jsonify(form.errors), 400 + current_user.assign_password(form.password.data) + db.session.add(current_user) + db.session.commit() + return jsonify({"success": True}) + return jsonify(form.errors), 400 + + @auth.route('/user_profile_by_name/', methods=['GET']) + @auth_required(permissions=['admin']) + def get_user_profile_by_username(username): + user = User.query.filter(User.username == username).first_or_404() + data = UserSchema().dumps(user) + return data, 200 + + @auth.route('/delete_account', methods=['DELETE']) + @auth_required + def delete_account(): # pylint: disable=W0613,W0612 + """ Delete user account. + User and all its references are permanently removed from db and disk. + """ + from ..models.db_models import Account + from ..celery import send_email_async + + username = current_user.username + recipient = current_app.config['MAIL_DEFAULT_SENDER'] + account = Account.query.filter_by(type="user", owner_id=current_user.id).first() + db.session.delete(account) + db.session.commit() + db.session.delete(current_user) + db.session.commit() + + # send email into admin + # TODO: this is just notification for mergin admin, we might want to do something better for statistics + # TODO: also it might be cleaner to do it outside of auth module, e.g. on delete signal + email_data = { + 'subject': f'{username} has been deleted', + 'html': f'user {username} has deleted their account', + 'recipients': [recipient], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + wm.emit_signal(SIG_DELETED_USER, request.path, msg=f'User *{username}* has been deleted') + return jsonify({"success": True}) + + @auth.route('/logout') + @auth_required + def logout(): # pylint: disable=W0613,W0612 + logout_user() + return jsonify({"success": True}) + + @auth.route('/resend_confirm_email') + @auth_required + def resend_confirm_email(): # pylint: disable=W0613,W0612 + send_confirmation_email( + app, + current_user, + 'auth.confirm_email', + 'email/email_confirmation.html', + 'Email confirmation' + ) + return jsonify({"success": True}) + + def confirm_token(token, expiration=3600*24*3): + serializer = URLSafeTimedSerializer(app.config['SECRET_KEY']) + try: + email = serializer.loads( + token, + salt=app.config['SECURITY_PASSWORD_SALT'], + max_age=expiration + ) + except: + return + return email + + @auth.route('/user', methods=['POST']) + @auth_required(permissions=['admin']) + def register_user(): # pylint: disable=W0613,W0612 + form = RegisterUserForm() + if form.validate(): + do_register_user(form.username.data, form.email.data, '') + return jsonify({"success": True}), 200 + return jsonify(form.errors), 400 + + @auth.route('/confirm/', methods=['GET']) + def confirm_email(token): # pylint: disable=W0613,W0612 + from ..celery import send_email_async + + email = confirm_token(token) + if not email: + abort(400, "Invalid token") + + user = User.query.filter_by(email=email).first_or_404() + if user.verified_email: + return render_template('email_verified.html') + + if not user.check_password(''): + user.verified_email = True + db.session.add(user) + db.session.commit() + # send welcome email if user is freshly registered + if user.profile.registration_date and user.profile.registration_date > datetime.utcnow() - timedelta(hours=1): + html = render_template('email/welcome_email.html', subject="Welcome to Mergin!") + email_data = { + 'subject': "Welcome to Mergin", + 'html': html, + 'recipients': [user.email], + 'sender': app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + return render_template('email_verified.html') + + @auth.route('/password_changed', methods=['GET']) + def password_changed(): # pylint: disable=W0613,W0612 + return render_template('password_reset_complete.html') # pragma: no cover + + @auth.route('/confirm_password/', methods=['GET', 'POST']) + def confirm_new_password(token): # pylint: disable=W0613,W0612 + email = confirm_token(token) + if not email: + abort(400, "Invalid token") + + user = User.query.filter_by(email=email).first_or_404() + # user should confirm email first + if not user.active: + abort(400, "Account is not active") + + form = UserPasswordForm(request.form) + if request.method == 'POST': + form = UserPasswordForm() + if form.validate(): + user.assign_password(form.password.data) + db.session.add(user) + db.session.commit() + return redirect(url_for('auth.password_changed')) + + return render_template('change_password_form.html', form=form) + + @auth.route('/password_reset', methods=['POST']) + def password_reset(): # pylint: disable=W0613,W0612 + form = ResetPasswordForm() + if not form.validate(): + return jsonify(form.errors), 404 + + user = User.query.filter(func.lower(User.email) == func.lower(form.email.data.strip())).one_or_none() + if not user: + return jsonify({"email": ["Account with given email does not exists"]}), 404 + if not user.active: + # user should confirm email first + return jsonify({"email": ["Account is not active"]}), 400 + + send_confirmation_email( + app, + user, + '.confirm_new_password', + 'email/password_reset.html', + 'Password reset' + ) + return jsonify({"success": True}) + + if app.config['USER_SELF_REGISTRATION']: + @auth.route('/signup', methods=['POST']) + def self_register_user(): # pylint: disable=W0613,W0612 + form = UserRegistrationForm() + if form.validate_on_submit(): + do_register_user(form.username.data, form.email.data, form.password.data) + user = User.query.filter(User.username == form.username.data).first() + send_confirmation_email( + app, + user, + 'auth.confirm_email', + 'email/user_registration.html', + 'Email confirmation' + ) + login_user(user) + LoginHistory.add_record(user.username, request) + schema = UserSchema() + return jsonify(schema.dump(user)) + return jsonify(form.errors), 400 + + @auth.route('/user/', methods=['DELETE']) + @auth_required(permissions=['admin']) + def delete_user(username): # pylint: disable=W0613,W0612 + from ..models.db_models import Account + + user = User.query.filter_by(username=username).first_or_404("User not found") + account = Account.query.filter_by(type="user", owner_id=user.id).first() + db.session.delete(account) + db.session.commit() + db.session.delete(user) + db.session.commit() + return jsonify({"success": True}) + + @auth.route('/user/profile', methods=['POST']) + @auth_required + def update_user_profile(): # pylint: disable=W0613,W0612 + form = UserProfileDataForm.from_json(request.json) + email_changed = current_user.email != form.email.data.strip() + if not form.validate_on_submit(): + return jsonify(form.errors), 400 + if email_changed: + user = User.query.filter(func.lower(User.email) == func.lower(form.email.data.strip())).first() + if user: + form.email.errors.append("Email already exists") + return jsonify(form.errors), 400 + current_user.verified_email = False + + form.update_obj(current_user.profile) + form.update_obj(current_user) + db.session.add(current_user) + db.session.commit() + if email_changed: + resend_confirm_email() + + return jsonify({"success": True}) + + @auth.route('/user/', methods=['POST']) + @auth_required(permissions=['admin']) + def update_user(username): # pylint: disable=W0613,W0612 + form = UserForm.from_json(request.json) + if not form.validate_on_submit(): + return jsonify(form.errors), 400 + + user = User.query.filter_by(username=username).first_or_404("User not found") + form.update_obj(user) + db.session.add(user) + db.session.commit() + return jsonify(UserSchema().dump(user)) + + @auth.route('/user/search', methods=['GET']) + @auth_required + def search_users(): # pylint: disable=W0613,W0612 + """ + search by like param returns results in order: 1.) match is on start of words - ordered by id + 2.) match is anywhere - ordered by id + """ + query = None + ids = request.args.get('id') + names = request.args.get('names') + like = request.args.get('like') + users = User.query.filter_by(active=True) + schema = UserSearchSchema(many=True) + if ids: + ids = request.args.get('id') + try: + ids = map(int, ids.split(',')) + query = User.id.in_(ids) + except (ValueError, AttributeError): + pass + elif names: + names = names.split(",") + query = User.username.in_(names) + elif like: + ilike = "{}%".format(like) + # match on start of words + query_username_prioritized = User.username.ilike(ilike) | User.username.op("~")(f'[\\.|\\-|_| ]{like}.*') + users1 = users.filter(query_username_prioritized).order_by(User.username).limit(10).all() + if len(users1) < 10: + # match anywhere except the previous results + query_match_anywhere = User.username.ilike(f"%{ilike}") & User.id.notin_([usr.id for usr in users1]) + users2 = users.filter(query_match_anywhere).order_by(User.username).limit(10 - len(users1)).all() + users1.extend(users2) + return jsonify(schema.dump(users1)) + if query is not None: + users = users.filter(query).order_by(User.username) + + return jsonify(schema.dump(users.limit(10).all())) + + app.register_blueprint(auth, url_prefix='/auth') diff --git a/server/src/auth/bearer.py b/server/src/auth/bearer.py new file mode 100644 index 00000000..72bb79b5 --- /dev/null +++ b/server/src/auth/bearer.py @@ -0,0 +1,28 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import hashlib +from itsdangerous import URLSafeTimedSerializer +from flask.sessions import TaggedJSONSerializer + + +def decode_token(secret_key, token, max_age=None): + salt = 'bearer-session' + serializer = TaggedJSONSerializer() + signer_kwargs = { + 'key_derivation': 'hmac', + 'digest_method': hashlib.sha1 + } + s = URLSafeTimedSerializer(secret_key, salt=salt, serializer=serializer, signer_kwargs=signer_kwargs) + return s.loads(token, max_age=max_age) + + +def encode_token(secret_key, data): + salt = 'bearer-session' + serializer = TaggedJSONSerializer() + signer_kwargs = { + 'key_derivation': 'hmac', + 'digest_method': hashlib.sha1 + } + s = URLSafeTimedSerializer(secret_key, salt=salt, serializer=serializer, signer_kwargs=signer_kwargs) + return s.dumps(data) diff --git a/server/src/auth/forms.py b/server/src/auth/forms.py new file mode 100644 index 00000000..35f26d83 --- /dev/null +++ b/server/src/auth/forms.py @@ -0,0 +1,121 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import safe +from flask_wtf import FlaskForm +from sqlalchemy import func +from wtforms import StringField, PasswordField, BooleanField, FormField, validators, IntegerField, SelectField +from wtforms.validators import DataRequired, Optional, Length, ValidationError, Email, EqualTo +from wtforms.compat import iteritems + +from ..forms import namespace_validation +from .models import User + + +def whitespace_filter(obj): + return obj.strip() if isinstance(obj, str) else obj + + +class UpdateForm(FlaskForm): + """ + Base class for forms with reasonable update strategy. + Doesn't overwrite optional fields which was not passed in data! + """ + def update_obj(self, obj): + for name, field in iteritems(self._fields): + is_optional = any((isinstance(v, Optional) for v in field.validators)) + # update only required fields or passed optional fields + if not is_optional or (field.data or field.raw_data != []): + field.populate_obj(obj, name) + + +class PasswordValidator(): + + def __init__(self, min_length=8): + self.min_length = min_length + + def __call__(self, form, field): + if len(field.data) < self.min_length: + raise ValidationError("Passwords must be at least {} characters long.".format(self.min_length)) + + strength = safe.check(field.data, self.min_length) + if not bool(strength): + raise ValidationError('Password is not strong enough') + + +class LoginForm(FlaskForm): + """ Form with username and password fields for user to sign in. """ + login = StringField(validators=[DataRequired(), Length(max=80)]) + password = PasswordField(validators=[DataRequired()]) + + +class RegisterUserForm(FlaskForm): + username = StringField( + 'Username', + validators=[validators.Length(min=4, max=25), namespace_validation], + filters=(whitespace_filter, ) + ) + email = StringField( + 'Email Address', + validators=[DataRequired(), Email()], + filters=(whitespace_filter, ) + ) + + def validate(self): + if not super().validate(): + return False + + # check for existence in Namespace table + from ..models.db_models import Namespace + ns = Namespace.query.filter(func.lower(Namespace.name) == func.lower(self.username.data.strip())).first() + if ns: + self.username.errors.append("Already exists") + return False + user = User.query.filter( + (func.lower(User.username) == func.lower(self.username.data.strip())) | + (func.lower(User.email) == func.lower(self.email.data.strip()))).first() + if user: + if user.username.lower() == self.username.data.lower().strip(): + self.username.errors.append("Already exists") + if user.email.lower() == self.email.data.lower().strip(): + self.email.errors.append("Already exists") + return False + return True + + +class ResetPasswordForm(FlaskForm): + email = StringField('Email Address', [DataRequired(), Email()], filters=(whitespace_filter, )) + + +class UserPasswordForm(FlaskForm): + password = PasswordField( + 'Password', + [DataRequired(), PasswordValidator(min_length=8)] + ) + confirm = PasswordField('Confirm password', [ + DataRequired(), + EqualTo('password', message='Passwords must match') + ]) + + +class UserRegistrationForm(RegisterUserForm, UserPasswordForm): + pass + + +class UserChangePasswordForm(UserPasswordForm): + old_password = StringField( + 'Old Password', [DataRequired()]) + + +class UserForm(UpdateForm): + is_admin = BooleanField('Admin', [Optional()]) + active = BooleanField('Active', [Optional()]) + + +class UserProfileDataForm(UpdateForm): + """ This form is for user profile update """ + receive_notifications = BooleanField( + 'Receive notifications', [Optional()]) + first_name = StringField('First Name', [Optional()], filters=(whitespace_filter, )) + last_name = StringField('Last Name', [Optional()], filters=(whitespace_filter, )) + email = StringField('Email', [Optional(), Email()], filters=(whitespace_filter, )) diff --git a/server/src/auth/models.py b/server/src/auth/models.py new file mode 100644 index 00000000..a94538a1 --- /dev/null +++ b/server/src/auth/models.py @@ -0,0 +1,97 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import datetime +import bcrypt +from .. import db +from ..mergin_utils import get_user_agent, get_ip + + +class User(db.Model): + id = db.Column(db.Integer, primary_key=True) + + username = db.Column(db.String(80), unique=True, info={'label': 'Username'}) + email = db.Column(db.String(120), unique=True) + + passwd = db.Column(db.String(80), info={'label': 'Password'}) #salted + hashed + + active = db.Column(db.Boolean, default=True) + is_admin = db.Column(db.Boolean) + verified_email = db.Column(db.Boolean, default=False) + inactive_since = db.Column(db.DateTime(), nullable=True, index=True) + + def __init__(self, username, email, passwd, is_admin=False): + + self.username = username + self.email = email + self.assign_password(passwd) + self.is_admin = is_admin + + def __repr__(self): + return '' % self.username + + def check_password(self, password): + if isinstance(password, str): + password = password.encode('utf-8') + return bcrypt.checkpw(password, self.passwd.encode('utf-8')) + + def assign_password(self, password): + if isinstance(password, str): + password = password.encode('utf-8') + self.passwd = bcrypt.hashpw(password, bcrypt.gensalt()).decode('utf-8') + + @property + def is_authenticated(self): + """ For Flask-Login """ + return True + + @property + def is_active(self): + """ For Flask-Login """ + return self.active + + @property + def is_anonymous(self): + """ For Flask-Login """ + return False + + def get_id(self): + """ For Flask-Login ... must return unicode user ID""" + return str(self.id) + + +class UserProfile(db.Model): + user_id = db.Column(db.Integer, db.ForeignKey("user.id", ondelete="CASCADE"), primary_key=True) + receive_notifications = db.Column(db.Boolean, default=True, index=True) + first_name = db.Column(db.String(256), nullable=True, info={'label': 'First name'}) + last_name = db.Column(db.String(256), nullable=True, info={'label': 'Last name'}) + registration_date = db.Column(db.DateTime(), nullable=True, info={'label': 'Date of creation of user account'}, default=datetime.datetime.utcnow) + + user = db.relationship("User", + uselist=False, + backref=db.backref("profile", single_parent=True, uselist=False, cascade="all,delete")) + + +class LoginHistory(db.Model): + id = db.Column(db.Integer, primary_key=True) + timestamp = db.Column(db.DateTime(), default=datetime.datetime.utcnow, index=True) + username = db.Column(db.String, index=True) + user_agent = db.Column(db.String, index=True) + ip_address = db.Column(db.String, index=True) + ip_geolocation_country = db.Column(db.String, index=True) + + def __init__(self, username, ua, ip): + self.username = username + self.user_agent = ua + self.ip_address = ip + + @staticmethod + def add_record(username, request): + ua = get_user_agent(request) + ip = get_ip(request) + # ignore login attempts coming from urllib - related to db sync tool + if "DB-sync" in ua: + return + lh = LoginHistory(username, ua, ip) + db.session.add(lh) + db.session.commit() + diff --git a/server/src/auth/schemas.py b/server/src/auth/schemas.py new file mode 100644 index 00000000..ba7f531d --- /dev/null +++ b/server/src/auth/schemas.py @@ -0,0 +1,86 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from marshmallow import fields + +from .. import ma +from .models import User, UserProfile + + +class UserProfileSchema(ma.ModelSchema): + name = ma.Function( + lambda obj: + f'{obj.first_name if obj.first_name else ""} {obj.last_name if obj.last_name else ""}'.strip(), dump_only=True) + storage = fields.Method("get_storage", dump_only=True) + disk_usage = fields.Method("get_disk_usage", dump_only=True) + organisations = fields.Method("get_user_organisations", dump_only=True) + has_project = fields.Method("_has_project", dump_only=True) + + def get_storage(self, obj): + from ..models.db_models import Namespace + + ns = Namespace.query.filter_by(name=obj.user.username).first() + return ns.storage + + def get_disk_usage(self, obj): + from ..models.db_models import Namespace + + ns = Namespace.query.filter_by(name=obj.user.username).first() + return ns.disk_usage() + + def get_user_organisations(self, obj): + """ Return dictionary of organisation name: role for organisations user is member of """ + from ..organisation.models import Organisation + org_map = {} + organisations = Organisation.find_by_member_id(obj.user_id) + for org in organisations: + org_map[org.name] = org.get_member_role(obj.user_id) + return org_map + + def _has_project(self, obj): + from ..models.db_models import Project + + projects_count = Project.query.filter(Project.creator_id == obj.user.id).filter_by( + namespace=obj.user.username).count() + return projects_count > 0 + + class Meta: + model = UserProfile + + +class UserSchema(ma.ModelSchema): + profile = fields.Nested(UserProfileSchema, exclude=("user", )) + account = fields.Method("get_account", dump_only=True) + + def get_account(self, obj): + from ..models.schemas import AccountSchema + from ..models.db_models import Namespace, Account, Project + + account = Account.query.filter_by(type='user', owner_id=obj.id).first() + return AccountSchema().dump(account) + + class Meta: + model = User + fields = ('id', 'username', 'email', 'active', 'is_admin', 'profile', 'account', 'verified_email') + + +class UserSearchSchema(ma.ModelSchema): + profile = fields.Nested(UserProfileSchema, only=( + 'first_name', 'last_name')) + + class Meta: + model = User + fields = ('id', 'username', 'profile') + + +class AccountSearchSchema(ma.ModelSchema): + id = fields.Integer() + type = fields.String() + name = fields.Method("get_name") + + def get_name(self, obj): + if obj.type == "user": + return obj.username + else: + return obj.name + diff --git a/server/src/auth/templates/_formhelpers.html b/server/src/auth/templates/_formhelpers.html new file mode 100644 index 00000000..9c033d40 --- /dev/null +++ b/server/src/auth/templates/_formhelpers.html @@ -0,0 +1,17 @@ +{% macro render_field(field) %} +
+ {{ field.label }} + {{ field(**kwargs)|safe }} + {% if field.description %} + i +
{{ field.description|safe }}
+ {% endif %} +
+ {% if field.errors %} +
    + {% for error in field.errors %} +
  • {{ error }}
  • + {% endfor %} +
+ {% endif %} +{% endmacro %} \ No newline at end of file diff --git a/server/src/auth/templates/base.css b/server/src/auth/templates/base.css new file mode 100644 index 00000000..09ed6ad3 --- /dev/null +++ b/server/src/auth/templates/base.css @@ -0,0 +1,47 @@ +body { + padding: 1em; + margin: 0; + font-family: Helvetica; + font-family: "Roboto","Helvetica","Lucida Grande","DejaVu Sans","Bitstream Vera Sans",Verdana,Arial; +} +.container { + max-width: 550px; + margin: 0 auto; +} +.center { + text-align: center; +} +.note { + font-weight: normal; + opacity: 0.5; + padding: 6px 0; + font-size: 15px; +} +.error { + color: #F4511E; +} +.md-button { + display: inline-block; + height: 36px; + line-height: 36px; + border: none; + padding: 0 16px; + text-transform: uppercase; + text-decoration: none; + font-size: 14px; + font-weight: 500; + border-radius: 2px; + cursor: pointer; + white-space: nowrap; + margin: 4px 8px; +} +.md-button.md-raised:not([disabled]) { + box-shadow: 0 1px 5px rgba(0,0,0,.2),0 2px 2px rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.12); +} +.md-button.md-primary:not([disabled]) { + background-color: #2196f3; + color: rgba(255, 255, 255, .87); +} +.md-button.md-primary:not([disabled]):hover { + background-color: #1e88e5; +} diff --git a/server/src/auth/templates/base.html b/server/src/auth/templates/base.html new file mode 100644 index 00000000..808f558c --- /dev/null +++ b/server/src/auth/templates/base.html @@ -0,0 +1,27 @@ + + + + + + + + + {% block header %} + {{ title }} + {% endblock %} + + + {% block extrastyle %} + {% endblock %} + + + + + {% block content %} +
+
+ {% endblock %} + + \ No newline at end of file diff --git a/server/src/auth/templates/change_password_form.html b/server/src/auth/templates/change_password_form.html new file mode 100644 index 00000000..3ed8c02a --- /dev/null +++ b/server/src/auth/templates/change_password_form.html @@ -0,0 +1,35 @@ +{% extends 'base.html' %} +{% from "_formhelpers.html" import render_field %} + +{% block header %} + Activate account +{% endblock %} + +{% block extrastyle %} + +{% endblock %} + +{% block content %} +
+

{{ title }}

+ +

Please enter your password.

+ +
+ + + {{ render_field(form.password) }} + {{ render_field(form.confirm) }} + +

+ +

+
+ +
+{% endblock %} diff --git a/server/src/auth/templates/email_verified.html b/server/src/auth/templates/email_verified.html new file mode 100644 index 00000000..d489da89 --- /dev/null +++ b/server/src/auth/templates/email_verified.html @@ -0,0 +1,15 @@ +{% extends 'base.html' %} + +{% block title %} Email address verified {% endblock %} + +{% block content %} +
+

+ Your email address has been verified. +

+
+

+ Continue +

+
+{% endblock %} diff --git a/server/src/auth/templates/form.css b/server/src/auth/templates/form.css new file mode 100644 index 00000000..f7f94f7e --- /dev/null +++ b/server/src/auth/templates/form.css @@ -0,0 +1,94 @@ +form { + padding: 16px 16px 16px 0; + display: flex; + flex-direction: column; +} +.md-input-container { + /*margin: 24px 0;*/ + margin-top: 24px; + display: flex; + justify-content: space-between; + align-items: center; + position: relative; +} + +.md-input-container label { + min-width: 140px; + display: inline-block; + font-size: 15px; + opacity: 0.75; + line-height: 32px; +} +input { + display: inline-block; + border-color: #ccc; + border-width: 0 0 1px 0; + font-size: 17px; + outline: none; + height: 32px; + transition: all .4s cubic-bezier(.25,.8,.25,1); + transition-property: border-color; + padding-bottom: 1px; + box-sizing: border-box; + background-color: transparent; + flex: 1; + max-width: 360px; + background-color: #f9f9f9; +} +input:focus { + border-color: #1e88e5; + padding-bottom: 0; + border-width: 0 0 2px 0; +} +.md-button[type="submit"] { + margin: 12px; + float: right; +} +ul.errorlist { + color: #F4511E; + padding-left: 0; + list-style: none; + font-size: 14px; + margin: 4px 0; +} +.info { + position: absolute; + display: block; + width: 16px; + height: 16px; + font-size: 14px; + line-height: 16px; + border-radius: 50%; + right: -24px; + bottom: 6px; + color: orange; + border: 1px solid currentColor; + text-align: center; + user-select: none; +} +.description { + position: absolute; + background-color: #fff; + border: 1px solid #aaa; + z-index: 100; + right: -20px; + bottom: 32px; + max-width: 300px; + min-width: 300px; + border-radius: 3px; +} +.info:not(:hover) + .description { + display: none; +} + +@media (max-width: 500px) { + .md-input-container { + flex-direction: column; + justify-content: flex-end; + align-items: flex-start; + } + .md-input-container input { + width: 100%; + max-width: initial; + } +} \ No newline at end of file diff --git a/server/src/auth/templates/password_reset_complete.html b/server/src/auth/templates/password_reset_complete.html new file mode 100644 index 00000000..622aa855 --- /dev/null +++ b/server/src/auth/templates/password_reset_complete.html @@ -0,0 +1,16 @@ +{% extends 'base.html' %} + +{% block title %} Password Changed {% endblock %} + +{% block content %} +
+

Password Changed!

+

+ Your password was changed. You can now Sign In +

+
+

+ Continue +

+
+{% endblock %} diff --git a/server/src/auth/templates/user_registration_form.html b/server/src/auth/templates/user_registration_form.html new file mode 100644 index 00000000..6ef418d5 --- /dev/null +++ b/server/src/auth/templates/user_registration_form.html @@ -0,0 +1,52 @@ +{% extends 'base.html' %} +{% from "_formhelpers.html" import render_field %} + +{% block header %} + Activate account +{% endblock %} + +{% block extrastyle %} + +{% endblock %} + +{% block content %} +
+

{{ title }}

+ +

Please complete the registration form:

+ +
+ + + {{ render_field(form.username) }} + {{ render_field(form.email) }} + {{ render_field(form.password) }} + {{ render_field(form.confirm) }} + +
+ + By registering you agree to the Mergin Terms of Service. + + +

+ +

+
+ + + + {% if error %} +

{{ error }}

+ {% endif %} + + {% if msg %} +

{{ msg }}

+ {% endif %} + +
+{% endblock %} diff --git a/server/src/celery.py b/server/src/celery.py new file mode 100644 index 00000000..ddcff993 --- /dev/null +++ b/server/src/celery.py @@ -0,0 +1,139 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import shutil +import os +from datetime import datetime, timedelta +from celery import Celery +from flask_mail import Message +from smtplib import SMTPException, SMTPServerDisconnected +from celery.schedules import crontab +from sqlalchemy import and_, false + +from .organisation import Organisation +from .auth import User +from .models.db_models import RemovedProject, Account, Project +from .config import Configuration +from . import mail, db +from .storages.disk import move_to_tmp + + +# create on flask app independent object +# we need this for defining tasks, and celery is then configured in run_celery.py +celery = Celery(__name__, broker=Configuration.CELERY_BROKER_URL, backend=Configuration.CELERY_RESULT_BACKEND) + + +@celery.on_after_configure.connect +def setup_periodic_tasks(sender, **kwargs): + sender.add_periodic_task(crontab(hour=2, minute=0), remove_temp_files, name='clean temp files') + sender.add_periodic_task(crontab(hour=2, minute=0), remove_projects_backups, name='remove old project backups') + sender.add_periodic_task(crontab(hour=1, minute=0), remove_accounts_data, name='remove personal data of inactive users') + + +@celery.task( + autoretry_for=(SMTPException, SMTPServerDisconnected, ), + retry_kwargs={'max_retries': 3, 'default_retry_delay': 300}, + ignore_result=True) +def send_email_async(**kwargs): + """ + Send flask mail (application context needed). + + :param email_data: content for flask mail Message + :param email_data: dict + """ + return send_email(**kwargs) + + +def send_email(**kwargs): + """ + Send flask mail (application context needed). + + :param email_data: content for flask mail Message + :param email_data: dict + """ + msg = Message(**kwargs) + # let's add default sender to BCC on production/staging server to make sure emails are in inbox + if not Configuration.MERGIN_TESTING: + msg.bcc.append(Configuration.MAIL_DEFAULT_SENDER) + mail.send(msg) + + +@celery.task +def remove_temp_files(): + """ Remove old temp folders in mergin temp directory. + This is clean up for storages.disk.move_to_tmp() function. + """ + for dir in os.listdir(Configuration.TEMP_DIR): + # ignore folder with apple notifications receipts which we want (temporarily) to maintain + if dir == 'notifications': + continue + path = os.path.join(Configuration.TEMP_DIR, dir) + if datetime.fromtimestamp(os.path.getmtime(path)) < datetime.utcnow() - timedelta(days=Configuration.TEMP_EXPIRATION): + try: + shutil.rmtree(path) + except OSError as e: + print(f"Unable to remove {path}: {str(e)}") + + +@celery.task +def remove_projects_backups(): + """ Permanently remove deleted projects. All data is lost, and project could not be restored anymore """ + projects = RemovedProject.query.filter( + RemovedProject.timestamp < datetime.utcnow() - timedelta(days=Configuration.DELETED_PROJECT_EXPIRATION) + ).all() + + for p in projects: + p_dir = os.path.abspath(os.path.join(Configuration.LOCAL_PROJECTS, p.properties["storage_params"]["location"])) + if os.path.exists(p_dir): + move_to_tmp(p_dir) + db.session.delete(p) + db.session.commit() + + +@celery.task +def remove_accounts_data(): + before_expiration = datetime.today() - timedelta(days=Configuration.CLOSED_ACCOUNT_EXPIRATION) + + # regex condition to account name to avoid process deleted accounts multiple times + subquery = db.session.query(User.id).filter(User.active == false(), User.inactive_since <= before_expiration, User.username.op("~")('^(?!deleted_\d{13})')).subquery() + subquery2 = db.session.query(Organisation.id).filter(Organisation.active == false(), Organisation.inactive_since <= before_expiration, Organisation.name.op("~")('^(?!deleted_\d{13})')).subquery() + accounts = Account.query.filter(and_(Account.owner_id.in_(subquery), Account.type == "user") | and_(Account.owner_id.in_(subquery2), Account.type == "organisation")) + + for account in accounts: + timestamp = round(datetime.now().timestamp() * 1000) + user = None + organisation = None + if account.type == 'user': + user = User.query.get(account.owner_id) + + user.username = f"deleted_{timestamp}" + user.email = f"deleted_{timestamp}" + user.verified_email = False + user.assign_password(f"deleted_{timestamp}") + user.profile.firs_name = "" + user.profile.last_name = "" + + else: + organisation = Organisation.query.get(account.owner_id) + organisation.name = f"deleted_{timestamp}" + organisation.description = "" + + # delete account's projects + projects = Project.query.filter_by(namespace=account.namespace.name).all() + for p in projects: + p_dir = p.storage.project_dir + if os.path.exists(p_dir): + move_to_tmp(p_dir) + db.session.delete(p) + + # delete account's removed projects + projects = RemovedProject.query.filter_by(namespace=account.namespace.name).all() + for p in projects: + p_dir = os.path.abspath(os.path.join(Configuration.LOCAL_PROJECTS, p.properties["storage_params"]["location"])) + if os.path.exists(p_dir): + move_to_tmp(p_dir) + db.session.delete(p) + + db.session.commit() + account.namespace.name = f"deleted_{timestamp}" + + db.session.commit() diff --git a/server/src/config.py b/server/src/config.py new file mode 100644 index 00000000..7b739d9f --- /dev/null +++ b/server/src/config.py @@ -0,0 +1,88 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +from tempfile import tempdir +from .version import get_version +from decouple import config, Csv + +config_dir = os.path.abspath(os.path.dirname(__file__)) + + +class Configuration(object): + DEBUG = config('FLASK_DEBUG', default=False, cast=bool) + TESTING = config('TESTING', default=False, cast=bool) + SECRET_KEY = config('SECRET_KEY') + PROXY_FIX = config('PROXY_FIX', default=True, cast=bool) + SWAGGER_UI = config('SWAGGER_UI', default=False, cast=bool) # to enable swagger UI console (for test only) + VERSION = config('VERSION', default=get_version()) + PUBLIC_DIR = config('PUBLIC_DIR', default=os.path.join(config_dir, os.pardir, 'build', 'static')) + # for local storage type + LOCAL_PROJECTS = config('LOCAL_PROJECTS', default=os.path.join(config_dir, os.pardir, os.pardir, 'projects')) + + # Mergin DB related + SQLALCHEMY_TRACK_MODIFICATIONS = config('SQLALCHEMY_TRACK_MODIFICATIONS', default=False, cast=bool) + SQLALCHEMY_ENGINE_OPTIONS = { + 'pool_size': 2, + 'pool_timeout': 300 + } + DB_USER = config('DB_USER', default='postgres') + DB_PASSWORD = config('DB_PASSWORD', default='postgres') + DB_HOST = config('DB_HOST', default='localhost') + DB_PORT = config('DB_PORT', default=5002, cast=int) + DB_DATABASE = config('DB_DATABASE', default='postgres') + DB_APPLICATION_NAME = config('DB_APPLICATION_NAME', default='mergin') + SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_DATABASE}?application_name={DB_APPLICATION_NAME}') + + # auth related + SECURITY_PASSWORD_SALT = config('SECURITY_PASSWORD_SALT') + WTF_CSRF_TIME_LIMIT = config('WTF_CSRF_TIME_LIMIT', default=3600 * 24, cast=int) # in seconds + BEARER_TOKEN_EXPIRATION = config('BEARER_TOKEN_EXPIRATION', default=3600 * 12, cast=int) # in seconds + WTF_CSRF_ENABLED = config('WTF_CSRF_ENABLED', default=True, cast=bool) + + # for flask mail + MAIL_SERVER = config('MAIL_SERVER', default='localhost') + MAIL_PORT = config('MAIL_PORT', default=587, cast=int) + MAIL_USE_TLS = config('MAIL_USE_TLS', default=True, cast=bool) + MAIL_DEFAULT_SENDER = config('MAIL_DEFAULT_SENDER') + MAIL_USERNAME = config('MAIL_USERNAME') + MAIL_PASSWORD = config('MAIL_PASSWORD') + MAIL_DEBUG = config('MAIL_SUPPRESS_SEND', default=False, cast=bool) + MAIL_SUPPRESS_SEND = config('MAIL_SUPPRESS_SEND', default=True, cast=bool) + + USER_SELF_REGISTRATION = config('USER_SELF_REGISTRATION', default=True, cast=bool) + + # locking file when backups are created + MAINTENANCE_FILE = config('MAINTENANCE_FILE', default=os.path.join(LOCAL_PROJECTS, 'MAINTENANCE')) + + # data sync + LOCKFILE_EXPIRATION = config('LOCKFILE_EXPIRATION', default=300, cast=int) # in seconds + MAX_CHUNK_SIZE = config('MAX_CHUNK_SIZE', default=10 * 1024 * 1024, cast=int) # in bytes + USE_X_ACCEL = config('USE_X_ACCEL', default=False, cast=bool) # use nginx (in front of gunicorn) to serve files (https://www.nginx.com/resources/wiki/start/topics/examples/x-accel/) + FILE_EXPIRATION = config('FILE_EXPIRATION', default=48 * 3600, cast=int) # for clean up of old files where diffs were applied, in seconds + BLACKLIST = config('BLACKLIST', default='.mergin/, .DS_Store, .directory', cast=Csv()) + + # celery + CELERY_IMPORTS = config('CELERY_IMPORTS', default="src.celery") + CELERY_BROKER_URL = config('CELERY_BROKER_URL', default='redis://172.17.0.1:6379/0') + CELERY_RESULT_BACKEND = config('CELERY_RESULT_BACKEND', default='redis://172.17.0.1:6379/0') + + # various life times + TRANSFER_EXPIRATION = config('TRANSFER_EXPIRATION', default=7 * 24 * 3600, cast=int) # in seconds + ORGANISATION_INVITATION_EXPIRATION = config('ORGANISATION_INVITATION_EXPIRATION', default=7 * 24 * 3600, cast=int) # in seconds + PROJECT_ACCESS_REQUEST = config('PROJECT_ACCESS_REQUEST', default=7 * 24 * 3600, cast=int) + + TEMP_EXPIRATION = config('TEMP_EXPIRATION', default=7, cast=int) # time in days after files are permanently deleted + CLOSED_ACCOUNT_EXPIRATION = config('CLOSED_ACCOUNT_EXPIRATION', default=5, cast=int) # time in days after user closed his account to all projects and files are permanently deleted + DELETED_PROJECT_EXPIRATION = config('DELETED_PROJECT_EXPIRATION', default=7, cast=int) # lifetime of deleted project, expired project are removed permanently without restore possibility, in days + + # trash dir for temp files being cleaned regularly + TEMP_DIR = config('TEMP_DIR', default=tempdir) + + # for links generated in emails + MERGIN_BASE_URL = config('MERGIN_BASE_URL', default="http://localhost:5000") + # for link to logo in emails + MERGIN_LOGO_URL = config('MERGIN_LOGO_URL', default="") + + MERGIN_SUBSCRIPTIONS = config('MERGIN_SUBSCRIPTIONS', default=False, cast=bool) + MERGIN_TESTING = config('MERGIN_TESTING', default=False, cast=bool) diff --git a/server/src/controllers/__init__.py b/server/src/controllers/__init__.py new file mode 100644 index 00000000..5bb08739 --- /dev/null +++ b/server/src/controllers/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. \ No newline at end of file diff --git a/server/src/controllers/account_controller.py b/server/src/controllers/account_controller.py new file mode 100644 index 00000000..da58090b --- /dev/null +++ b/server/src/controllers/account_controller.py @@ -0,0 +1,257 @@ +# Copyright (C) 2021 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from datetime import datetime +from operator import and_ + +from flask import Blueprint, abort, jsonify, request +from flask_login import current_user +from sqlalchemy import or_, desc, asc, true + +from src.auth import AccountSearchSchema +from .. import db +from ..auth import auth_required +from ..models.db_models import Account, Namespace +from ..models.schemas import AccountSchema, AccountExtendedSchema +from ..auth.models import User +from ..models.db_models import Project, ProjectAccess, ProjectTransfer +from ..organisation.models import OrganisationInvitation, Organisation + +account = Blueprint("account", __name__) + + +@account.route('/accounts/', methods=['GET']) +@auth_required(permissions=['admin']) +def get_account_by_id(account_id): # pylint: disable=W0613,W0612 + """ get account by id + :rtype: Account + """ + accounts = Account.query.get(account_id) + data = AccountSchema().dump(accounts) + return jsonify(data), 200 + + +@account.route('/accounts/', methods=['GET']) +@auth_required(permissions=['admin']) +def list_accounts(type): # pylint: disable=W0613,W0612 + """ List of either user or organisation paginated accounts with optional filters and sort. + + :param type: account type, either 'user' or 'organisation' + :type type: str + :returns: Total number of accounts and paginated results for accounts + :rtype: dict(total: int, accounts: List[Account]) + """ + try: + page = int(request.args.get('page', 1)) + per_page = int(request.args.get('per_page', 10)) + except ValueError: + abort(400, "Invalid query format") + order_by = request.args.get('order_by') + descending = str(request.args.get('descending', 'false')) == 'true' + name = str(request.args.get('name', '')) + if type == "user": + model = User + name_col = User.username + active_col = User.active + elif type == "organisation": + model = Organisation + name_col = Organisation.name + active_col = Organisation.active + else: + abort(400, 'Invalid account type') + + query = db.session.query( + Account.id, + Account.type, + name_col.label("name"), + active_col.label("active"), + Namespace.storage + )\ + .join(model, Account.owner_id == model.id) \ + .join(Namespace, Namespace.account_id == Account.id) \ + .filter(Account.type == type) + + if name: + query = query.filter(name_col.ilike(f'%{name}%')) + + # sort by some column + col = None + if order_by: + if order_by == 'name': + col = name_col + + if col: + query = query.order_by(desc(col)) if descending else query.order_by(asc(col)) + + paginate = query.paginate(page, per_page, max_per_page=100) + result = paginate.items + total = paginate.total + accounts = AccountExtendedSchema(many=True).dump(result) + return jsonify(accounts=accounts, total=total), 200 + + +@account.route('/change_account_status/', methods=['PATCH']) +@auth_required(permissions=['admin']) +def change_account_status(account_id): + """ + Change account active status to true or false + + :param account_id: + :return changed status: + """ + if request.json.get("status") is None: + abort(400, "Status is empty") + account = Account.query.get_or_404(account_id, "Account not found") + owner = account.owner() + owner.active = request.json.get("status") + owner.inactive_since = datetime.utcnow() if not owner.active else None + if request.json.get("status") is False: + account.inactivated("deactivate") + db.session.commit() + + return jsonify({'status': request.json.get("status")}), 200 + + +@account.route('/account/', methods=['DELETE']) +@auth_required +def close_account(id): # pylint: disable=W0613,W0612 + """ Close account. + All related objects remain (temporarily) in database and files on disk, following actions are done: + + - account holder is turned to inactive + - users's reference from 3rd parties integration is removed (e.g. Stripe) + - all references in projects' permissions are removed + - all pending project transfers related to account namespace are removed + - all membership in organisations and pending invitations are removed + + For permanent delete, account holder object needs to be deleted. + """ + account = Account.query.get_or_404(id, f'Account {id} not found') + if not account.can_edit(current_user.id) and not current_user.is_admin: + abort(403) + + user = None + organisation = None + if account.type == 'user': + user = User.query.get(account.owner_id) + # remove membership in organisations + organisations = Organisation.query.filter(or_( + Organisation.owners.contains([user.id]), + Organisation.admins.contains([user.id]), + Organisation.writers.contains([user.id]), + Organisation.readers.contains([user.id]) + )).all() + + user_organisation = next((o for o in organisations if o.owners == [user.id]), None) + if user_organisation: + abort(400, f"Can not close account because user is the only owner of organisation {user_organisation.name}") + + for org in organisations: + for key in ('owners', 'admins', 'writers', 'readers'): + value = set(getattr(org, key)) + if user.id in value: + value.remove(user.id) + setattr(org, key, list(value)) + db.session.add(org) + + # remove user reference from shared projects + shared_projects = Project.query \ + .filter(Project.namespace != account.namespace.name) \ + .filter(or_( + Project.access.has(ProjectAccess.owners.contains([user.id])), + Project.access.has(ProjectAccess.writers.contains([user.id])), + Project.access.has(ProjectAccess.readers.contains([user.id])) + )).all() + + for p in shared_projects: + for key in ('owners', 'writers', 'readers'): + value = set(getattr(p.access, key)) + if user.id in value: + value.remove(user.id) + setattr(p.access, key, list(value)) + db.session.add(p) + + # remove pending invitations + invitations = OrganisationInvitation.query.filter_by(username=user.username).all() + for i in invitations: + db.session.delete(i) + + else: + organisation = Organisation.query.get(account.owner_id) + invitations = OrganisationInvitation.query.filter_by(org_name=account.name()).all() + for i in invitations: + db.session.delete(i) + + # reset permissions for namespace's projects + projects = Project.query.filter_by(namespace=account.namespace.name).all() + for p in projects: + p.access.owners = [] + p.access.writers = [] + p.access.readers = [] + db.session.add(p) + + # remove pending project transfers (both directions) + transfers = ProjectTransfer.query.filter(or_( + ProjectTransfer.from_ns_name == account.namespace.name, + ProjectTransfer.to_ns_name == account.namespace.name + )).all() + for t in transfers: + db.session.delete(t) + + account.inactivated("delete") + + # inactivate account + owner = account.owner() + owner.active = False + owner.inactive_since = datetime.utcnow() + + db.session.add(account) + db.session.commit() + return '', 200 + +@account.route('/account/change_storage/', methods=['POST']) +@auth_required(permissions=['admin']) +def change_storage(account_id): # pylint: disable=W0613,W0612 + """ Change storage. + Change account storage with new value + - account_id account id + - storage: new storage value in bytes + """ + namespace = Namespace.query.filter(Namespace.account_id == account_id).first_or_404(f'Namespace for accountId: {account_id} not found') + if not request.json.get("storage"): + abort(400, "Storage is empty") + try: + storage = int(request.json.get("storage")) + except Exception as e: + abort(400, "Storage is not a number") + namespace.storage = storage + db.session.commit() + return '', 200 + + +@account.route('/accounts/search', methods=['GET']) +@auth_required(permissions=['admin']) +def search_accounts_by_name(): # pylint: disable=W0613,W0612 + """ + search by like param returns results in order: 1.) match is on start of words - ordered by id + 2.) match is anywhere - ordered by id + """ + from src.models.db_models import Account + from src.organisation import Organisation + + query = db.session.query( + Account.id, + Account.type, + Organisation.name, + User.username + ) \ + .outerjoin(Organisation, and_(Account.owner_id == Organisation.id, Account.type == "organisation")) \ + .outerjoin(User, and_(Account.owner_id == User.id, Account.type == "user")) \ + + like = request.args.get('like') + schema = AccountSearchSchema(many=True) + if like: + ilike = "{}%".format(like) + accounts = query.filter(and_(User.active == true(), (User.username.ilike(ilike) | User.username.op("~")(f'[\\.|\\-|_| ]{like}.*'))) | + and_(Organisation.active == true(), Organisation.name.ilike(ilike))).limit(10).all() + return jsonify(schema.dump(accounts)) diff --git a/server/src/controllers/forms.py b/server/src/controllers/forms.py new file mode 100644 index 00000000..63f3147c --- /dev/null +++ b/server/src/controllers/forms.py @@ -0,0 +1,10 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from ..auth.forms import LoginForm + + +class ApiLoginForm(LoginForm): + class Meta: + csrf = False + diff --git a/server/src/controllers/namespace_controller.py b/server/src/controllers/namespace_controller.py new file mode 100644 index 00000000..e5eb945f --- /dev/null +++ b/server/src/controllers/namespace_controller.py @@ -0,0 +1,29 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from flask import jsonify, abort + +from ..auth import auth_required +from ..auth.models import User +from ..models.db_models import Namespace, Account +from ..models.schemas import NamespaceSchema +from ..organisation.models import Organisation + + +def check_access_to_namespace(namespace, user): + Namespace.query.filter_by(name=namespace).first_or_404() + org = Organisation.query.filter_by(name=namespace).first() + if user.username != namespace and not (org and user.id in org.writers): + abort(403, "Permission denied.") + + +@auth_required +def search_namespace(namespace_type, q=None): # pylint: disable=W0613,W0612 + """ Search namespace by query """ + namespaces = [] + if namespace_type == "user": + namespaces = Namespace.query.join(Namespace.account).join(User, User.username == Namespace.name).filter(User.active, Account.type == "user", Namespace.name.ilike(f"{q}%")).limit(5).all() if q else [] + elif namespace_type == "organisation": + namespaces = Namespace.query.join(Namespace.account).join(Organisation, Organisation.name == Namespace.name).filter(Organisation.active, Account.type == "organisation", Namespace.name.ilike(f"{q}%")).limit(5).all() if q else [] + data = NamespaceSchema(many=True).dump(namespaces) + return jsonify(data) diff --git a/server/src/controllers/project_controller.py b/server/src/controllers/project_controller.py new file mode 100644 index 00000000..583adb77 --- /dev/null +++ b/server/src/controllers/project_controller.py @@ -0,0 +1,929 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import json +import mimetypes +import os +import logging +import copy +from urllib.parse import quote +import uuid +from time import time +from datetime import datetime, timedelta +import psycopg2 +from connexion import NoContent, request +from flask import abort, render_template, current_app, send_from_directory, jsonify, make_response +from pygeodiff import GeoDiffLibError +from sqlalchemy.orm import joinedload +from flask_login import current_user +from sqlalchemy.types import String +from sqlalchemy.dialects.postgresql import ARRAY +from sqlalchemy import cast, and_, or_, desc, asc +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm.attributes import flag_modified +from binaryornot.check import is_binary +from gevent import sleep + +from .. import db, wm, SIG_NEW_PROJECT +from ..auth import auth_required +from ..auth.models import User, UserProfile +from ..models.db_models import Project, ProjectAccess, ProjectVersion, Namespace, Upload, RemovedProject +from ..models.schemas import ProjectSchema, ProjectListSchema, ProjectVersionSchema, FileInfoSchema, ProjectSchemaForVersion +from ..organisation.models import Organisation +from ..storages.storage import FileNotFound, DataSyncError, InitializationError +from ..storages.disk import save_to_file, move_to_tmp +from ..permissions import require_project, projects_query, ProjectPermissions, get_upload +from ..mergin_utils import generate_checksum, Toucher, int_version, is_file_name_blacklisted, get_ip, get_user_agent, \ + generate_location, is_valid_uuid +from ..util import is_name_allowed, mergin_secure_filename, get_path_from_files +from ..celery import send_email_async +from .namespace_controller import check_access_to_namespace + + +def _project_version_files(project, version=None): + if version: + pv = next((v for v in project.versions if v.name == version), None) + if not pv: + abort(404, "Project version does not exist") + return pv.files + return project.files + + +@auth_required +def add_project(namespace, project): # noqa: E501 + """Add a new mergin project. + + # noqa: E501 + + :param project: Project object that needs to be added to the database. + :type project: dict | bytes + + :rtype: None + """ + if not is_name_allowed(project['name']): + abort(400, "Please use only alphanumeric or these -._~()'!*:@,; characters in project name.") + + project['name'] = project['name'].strip() + if request.is_json: + ua = get_user_agent(request) + check_access_to_namespace(namespace, current_user) + + proj = Project.query.filter_by(name=project['name'], namespace=namespace).first() + if proj: + abort(409, "Project {} already exists!".format(project['name'])) + + project['storage_params'] = {"type": "local", "location": generate_location()} + + p = Project(**project, creator=current_user, namespace=namespace) + p.updated = datetime.utcnow() + pa = ProjectAccess(p, public=project.get('public', False)) + + template = project.get('template', None) + ip = get_ip(request) + if template: + template = Project.query.\ + filter(Project.creator.has(username='TEMPLATES')).\ + filter(Project.name == template)\ + .first_or_404() + # create mutable object + temp_copy = copy.deepcopy(template) + temp_files = ProjectSchema(only=('files',)).dump(temp_copy) + changes = {"added": temp_files['files'], "renamed": [], "updated": [], "removed": []} + version = ProjectVersion(p, 'v1', current_user.username, changes, template.files, ip, ua) + p.latest_version = 'v1' + else: + changes = {"added": [], "renamed": [], "updated": [], "removed": []} + version = ProjectVersion(p, 'v0', current_user.username, changes, [], ip, ua) + p.latest_version = 'v0' + try: + p.storage.initialize(template_project=template) + except Exception as exc: + abort(400, f"Failed to initialize project: {str(exc)}") + + p.versions.append(version) + db.session.add(p) + db.session.add(pa) + db.session.commit() + + wm.emit_signal(SIG_NEW_PROJECT, request.path, msg=f'New project *{namespace}/{project["name"]}* has been created') + return NoContent, 200 + + +@auth_required +def delete_project(namespace, project_name): # noqa: E501 + """Delete a project. + + # noqa: E501 + + :param project_name: name of project to delete. + :type project_name: str + + :rtype: None + """ + project = require_project(namespace, project_name, ProjectPermissions.Delete) + rm_project = RemovedProject(project, current_user.username) + db.session.add(rm_project) + db.session.delete(project) + db.session.commit() + return NoContent, 200 + + +def download_project(namespace, project_name, format=None, version=None): # noqa: E501 # pylint: disable=W0622 + """Download dir for single project. + + # noqa: E501 + + :param project_name: name of project to return. + :type project_name: str + :param format: output format + :type format: str [zip] + :param version: version tag name + :type version: str + + :rtype: None + """ + project = require_project(namespace, project_name, ProjectPermissions.Read) + files = _project_version_files(project, version) + try: + return project.storage.download_files(files, format, version=version) + except FileNotFound as e: + abort(404, str(e)) + + +def download_project_file(project_name, namespace, file, version=None, diff=None): # noqa: E501 + """Download project file + + Download individual file or it's diff file from project. # noqa: E501 + + :param project_name: Project name. + :type project_name: str + :param namespace: Namespace for project to look into. + :type namespace: str + :param file: Path to file. + :type file: str + :param version: Version tag. + :type version: str + :param diff: Ask for diff file instead of full one. + :type diff: bool + + :rtype: None + """ + project = require_project(namespace, project_name, ProjectPermissions.Read) + files = _project_version_files(project, version) + + if diff and version: + # get specific version of geodiff file modified in requested version + file_obj = next((f for f in files if f['location'] == os.path.join(version, file)), None) + if not file_obj: + abort(404, file) + if 'diff' not in file_obj: + abort(404, f"No diff in particular file {file} version") + file_path = file_obj['diff']['location'] + elif diff: + abort(400, f"Changeset must be requested for particular file version") + else: + # get latest version of file + file_path = next((f['location'] for f in files if f['path'] == file), None) + + if not file_path: + abort(404, file) + + if version and not diff: + project.storage.restore_versioned_file(file, version) + + if current_app.config['USE_X_ACCEL']: + # encoding for nginx to be able to download file with non-ascii chars + encoded_file_path = quote(file_path.encode("utf-8")) + resp = make_response() + resp.headers['X-Accel-Redirect'] = f"/download/{project.storage_params['location']}/{encoded_file_path}" + resp.headers['X-Accel-Buffering'] = True + resp.headers['X-Accel-Expires'] = 'off' + else: + resp = send_from_directory(project.storage.project_dir, file_path) + abs_path = os.path.join(project.storage.project_dir, file_path) + if not is_binary(abs_path): + mime_type = "text/plain" + else: + mime_type = mimetypes.guess_type(abs_path)[0] + resp.headers['Content-Type'] = mime_type + resp.headers['Content-Disposition'] = 'attachment; filename={}'.format(quote(os.path.basename(file).encode("utf-8"))) + return resp + + +def get_project(project_name, namespace, since='', version=None): # noqa: E501 + """Find project by name. + + Returns a single project with details about files including history for versioned files (diffs) if needed. # noqa: E501 + + :param project_name: Name of project to return. + :type project_name: str + :param namespace: Namespace for project to look into. + :type namespace: str + :param since: Version to look up diff files history from. + :type since: str + :param version: Version to return files details for. + :type version: str + + :rtype: ProjectDetail + """ + project = require_project(namespace, project_name, ProjectPermissions.Read) + + if since and version: + abort(400, "Parameters 'since' and 'version' are mutually exclusive") + elif since: + # append history for versioned files + last_version = ProjectVersion.query.filter_by(project_id=project.id).order_by( + ProjectVersion.created.desc()).first() + for f in project.files: + f['history'] = project.file_history(f['path'], since, last_version.name) + data = ProjectSchema(exclude=['storage_params']).dump(project) + elif version: + # return project info at requested version + version_obj = next((v for v in project.versions if v.name == version), None) + if not version_obj: + abort(404, "Project at requested version does not exist") + data = ProjectSchemaForVersion().dump(version_obj) + else: + # return current project info + data = ProjectSchema(exclude=['storage_params']).dump(project) + return data, 200 + + +def get_project_versions(namespace, project_name, version_id=None): # noqa: E501 + """Get versions (history) of project. + + Returns a list of project versions with changes information. # noqa: E501 + + :param project_name: Name of project to return versions for. + :type project_name: str + :param version_id: + :type version_id: str + + :rtype: List[ProjectVersion] + """ + project = require_project(namespace, project_name, ProjectPermissions.Read) + query = ProjectVersion.query.filter(and_(ProjectVersion.project_id == project.id, ProjectVersion.name != "v0")) + if version_id: + query = ProjectVersion.query.filter_by(project_id=project.id).filter_by(name=version_id) + versions = query.order_by(ProjectVersion.created.desc()).all() + data = ProjectVersionSchema(exclude=['files'], many=True).dump(versions) + return data, 200 + + +def get_projects_by_names(data): # noqa: E501 + """List mergin projects by list of projects namespace and name. + Returns limited list of projects + :rtype: Dict{namespace/projectName: Project] + """ + + list_of_projects = data.get('projects', []) + if len(list_of_projects) > 50: + abort(400, "Too many projects") + results = {} + for project in list_of_projects: + projects = projects_query(ProjectPermissions.Read, as_admin=False) + splitted = project.split("/") + if len(splitted) != 2: + results[project] = {"error": 404} + continue + namespace = splitted[0] + name = splitted[1] + result = projects.filter(Project.namespace == namespace, Project.name == name).first() + if result: + user_ids = result.access.owners + result.access.writers + result.access.readers + users_map = {u.id: u.username for u in User.query.filter(User.id.in_(set(user_ids))).all()} + results[project] = ProjectListSchema(context={'users_map': users_map}).dump(result) + else: + if not current_user or not current_user.is_authenticated: + results[project] = {"error": 401} + else: + results[project] = {"error": 403} + return results, 200 + + +def get_projects_by_uuids(uuids): # noqa: E501 + """Get mergin projects by list of projects ids + + Returns a list of projects filtered by ids # noqa: E501 + :rtype: dict{project.id: project} + """ + proj_ids = [uuid for uuid in uuids.split(',') if is_valid_uuid(uuid)] + if len(proj_ids) > 10: + abort(400, "Too many projects") + + user_ids = [] + projects = projects_query(ProjectPermissions.Read, as_admin=False).filter(Project.id.in_(proj_ids)).all() + for p in projects: + user_ids.extend(p.access.owners+p.access.writers+p.access.readers) + users_map = {u.id: u.username for u in User.query.filter(User.id.in_(set(user_ids))).all()} + data = ProjectListSchema(many=True, context={'users_map': users_map}).dump(projects) + return data, 200 + + +def get_projects(tags=None, q=None, user=None, flag=None, limit=None): # noqa: E501 + """List mergin projects. + + Returns limited list of projects, optionally filtered by tags, search query, username. + + :rtype: List[Project] + """ + projects = projects_query(ProjectPermissions.Read) + + if flag: + user = User.query.filter_by(username=user).first_or_404() if user else current_user + if not user.is_anonymous: + orgs = Organisation.query.with_entities(Organisation.name).filter( + or_(Organisation.admins.contains([user.id]), Organisation.readers.contains([user.id]), + Organisation.writers.contains([user.id]), Organisation.owners.contains([user.id]))) + if flag == 'created': + projects = projects.filter(Project.creator_id == user.id).filter_by(namespace=user.username) + if flag == 'shared': + projects = projects.filter(or_(and_(Project.creator_id != user.id, + Project.access.has(ProjectAccess.readers.contains([user.id]))), Project.namespace.in_(orgs))) + else: + abort(401) + if tags: + projects = projects.filter(Project.tags.contains(cast(tags, ARRAY(String)))) + + if q: + projects = projects.filter(Project.name.ilike('%{}%'.format(q))) + + proj_limit = limit if limit and limit < 100 else 100 + projects = projects.options(joinedload(Project.access)).order_by(Project.namespace, Project.name).limit(proj_limit).all() + # create user map id:username passed to project schema to minimize queries to db + user_ids = [] + for p in projects: + user_ids.extend(p.access.owners+p.access.writers+p.access.readers) + users_map = {u.id: u.username for u in User.query.filter(User.id.in_(set(user_ids))).all()} + sleep(0) # temporary yield to gevent hub until serialization is fully resolved (#317) + data = ProjectListSchema(many=True, context={'users_map': users_map}).dump(projects) + return data, 200 + + +def get_paginated_projects(page, per_page, order_params=None, order_by=None, descending=False, name=None, namespace=None, user=None, flag=None, last_updated_in=None, only_namespace=None, as_admin=False, public=True, only_public=False): # noqa: E501 + """List mergin projects. + + Returns dictionary with paginated list of projects, optionally filtered by tags, project name, username, namespace, updated date. + and number of total filtered projects + + :param page:page number + :param per_page results per page + :param order_by order by field name -deprecated + :param descending order of sort -deprecated + :param order_params fields to sort e.g. name_asc, updated_desc + :param name filter by project name + :param namespace filter by project namespace + :param user Username for 'flag' filter. If not provided, in case that is not provided uses logged user + :param flag created or shared + :param: last_updated_in for filter projects by days from last update + :param only_namespace Filter namespace equality to in contrast with namespace attribute which is determinated to search (like) + :param as_admin User access as admin + :param public should return any public project, if false filter only projects where user has explicit read permission to project + :param only_public should return only public projects + + :rtype: Dictionary{ + projects: List[Project], + count: Integer + """ + if only_public: + projects = Project.query.filter(Project.access.has(public=only_public)) + else: + projects = projects_query(ProjectPermissions.Read, as_admin=as_admin, public=public) + + if flag: + user = User.query.filter_by(username=user).first_or_404() if user else current_user + if not user.is_anonymous: + if flag == 'created': + projects = projects.filter(Project.creator_id == user.id).filter_by(namespace=user.username) + if flag == 'shared': + orgs = Organisation.query.with_entities(Organisation.name).filter( + or_(Organisation.admins.contains([user.id]), Organisation.readers.contains([user.id]), + Organisation.writers.contains([user.id]), Organisation.owners.contains([user.id]))) + projects = projects.filter(or_(and_(Project.creator_id != user.id, + Project.access.has(ProjectAccess.readers.contains([user.id]))), Project.namespace.in_(orgs))) + else: + abort(401) + + if name: + projects = projects.filter(Project.name.ilike('%{}%'.format(name)) | Project.namespace.ilike('%{}%'.format(name))) + + if namespace: + projects = projects.filter(Project.namespace.ilike('%{}%'.format(namespace))) + + if only_namespace: + projects = projects.filter(Project.namespace == only_namespace) + + if last_updated_in: + projects = projects.filter(Project.updated >= datetime.utcnow() - timedelta(days=last_updated_in)) + + projects = projects.options(joinedload(Project.access)) + + if order_params: + order_by_params = [] + for p in order_params.split(","): + string_param = p.strip() + if "_asc" in string_param: + string_param = string_param.replace("_asc", "") + order_by_params.append(Project.__table__.c[string_param].asc()) + elif "_desc" in string_param: + string_param = string_param.replace("_desc", "") + order_by_params.append(Project.__table__.c[string_param].desc()) + projects = projects.order_by(*order_by_params) + elif descending and order_by: + projects = projects.order_by(desc(Project.__table__.c[order_by])) + elif not descending and order_by: + projects = projects.order_by(asc(Project.__table__.c[order_by])) + + result = projects.paginate(page, per_page).items + total = projects.paginate(page, per_page).total + + # create user map id:username passed to project schema to minimize queries to db + user_ids = [] + for p in result: + user_ids.extend(p.access.owners+p.access.writers+p.access.readers) + + users_map = {u.id: u.username for u in User.query.filter(User.id.in_(set(user_ids))).all()} + sleep(0) # temporary yield to gevent hub until serialization is fully resolved (#317) + data = ProjectListSchema(many=True, context={'users_map': users_map}).dump(result) + data = {'projects': data, + 'count': total} + return data, 200 + +@auth_required +def update_project(namespace, project_name, data): # noqa: E501 # pylint: disable=W0613 + """Update an existing project. + + # noqa: E501 + + :param project_name: Name of project that need to be updated. + :type project_name: str + :param data: Data to be updated. + :type data: dict | bytes + + :rtype: Project + """ + project = require_project(namespace, project_name, ProjectPermissions.Update) + access = data.get('access', {}) + id_diffs = [] + + #transform usernames from client to ids + if "ownersnames" in access: + owners = User.query.with_entities(User.id).filter(User.username.in_(access['ownersnames'])).all() + access["owners"] = [w.id for w in owners] + if "readersnames" in access: + readers = User.query.with_entities(User.id).filter(User.username.in_(access['readersnames'])).all() + access["readers"] = [w.id for w in readers] + if "writersnames" in access: + writers = User.query.with_entities(User.id).filter(User.username.in_(access['writersnames'])).all() + access["writers"] = [w.id for w in writers] + + # prevent to remove ownership of project creator + if 'owners' in access: + if project.creator_id not in access['owners']: + abort(400, str('Ownership of project creator cannot be removed.')) + + for key, value in access.items(): + if not hasattr(project.access, key): + continue + if isinstance(value, list): + id_diffs.append(set(value) ^ set(getattr(project.access, key))) + setattr(project.access, key, value) + + db.session.add(project) + db.session.commit() + + users_ids = set().union(*id_diffs) + user_profiles = UserProfile.query.filter(UserProfile.user_id.in_(users_ids)).all() + project_path = '/'.join([namespace, project.name]) + web_link = f"{request.url_root.strip('/')}/projects/{project_path}" + for user_profile in user_profiles: + privileges = [] + if user_profile.user.id in project.access.owners: + privileges += ['edit', 'remove'] + if user_profile.user.id in project.access.writers: + privileges.append('upload') + if user_profile.user.id in project.access.readers: + privileges.append('download') + subject = "Project access modified" + if len(privileges): + html = render_template('email/modified_project_access.html', subject=subject, project=project, user=user_profile.user, + privileges=privileges, link=web_link) + else: + html = render_template('email/removed_project_access.html', subject=subject, project=project, user=user_profile.user) + + if not (user_profile.receive_notifications and user_profile.user.verified_email): + continue + email_data = { + 'subject': f'Access to mergin project {project_path} has been modified', + 'html': html, + 'recipients': [user_profile.user.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + return ProjectSchema().dump(project), 200 + + +@auth_required +def project_push(namespace, project_name, data): + """ + Synchronize project data. + + Apply changes in project if no uploads required. Creates upload transaction for added/modified files. # noqa: E501 + + :param namespace: Namespace for project to look into. + :type namespace: str + :param project_name: Project name. + :type project_name: str + :param data: Description of project changes. + :type data: dict | bytes + + :rtype: None + """ + version = data.get('version') + changes = data['changes'] + project = require_project(namespace, project_name, ProjectPermissions.Upload) + pv = project.versions[0] if project.versions else None + if pv and pv.name != version: + abort(400, 'Version mismatch') + if not pv and version != 'v0': + abort(400, 'First push should be with v0') + + if all(len(changes[key]) == 0 for key in changes.keys()): + abort(400, 'No changes') + + # check if same file is not already uploaded + for item in changes["added"]: + if not all(ele['path'] != item['path'] for ele in project.files): + abort(400, 'File {} has been already uploaded'.format(item["path"])) + + # changes' files must be unique + changes_files = [] + sanitized_files = [] + blacklisted_files = [] + for change in changes.values(): + for f in change: + if is_file_name_blacklisted(f['path'], current_app.config['BLACKLIST']): + blacklisted_files.append(f) + # all file need to be unique after sanitized + f['sanitized_path'] = mergin_secure_filename(f['path']) + if f['sanitized_path'] in sanitized_files: + filename, file_extension = os.path.splitext(f['sanitized_path']) + f['sanitized_path'] = filename + f'.{str(uuid.uuid4())}' + file_extension + sanitized_files.append(f['sanitized_path']) + if 'diff' in f: + f['diff']['sanitized_path'] = mergin_secure_filename(f['diff']['path']) + if f['diff']['sanitized_path'] in sanitized_files: + filename, file_extension = os.path.splitext(f['diff']['sanitized_path']) + f['diff']['sanitized_path'] = filename + f'.{str(uuid.uuid4())}' + file_extension + changes_files.append(f['path']) + if len(set(changes_files)) != len(changes_files): + abort(400, 'Not unique changes') + + # remove blacklisted files from changes + for key, change in changes.items(): + files_to_upload = [f for f in change if f not in blacklisted_files] + changes[key] = files_to_upload + + # Convert datetimes to UTC + for key in changes.keys(): + for f in changes[key]: + f['mtime'] = datetime.utcnow() + + num_version = int_version(version) + + # Check user data limit + updates = [f['path'] for f in changes['updated']] + updated_files = list(filter(lambda i: i['path'] in updates, project.files)) + additional_disk_usage = sum(file['size'] for file in changes['added'] + changes['updated']) - \ + sum(file['size'] for file in updated_files) - sum(file['size'] for file in changes["removed"]) + ns = Namespace.query.filter_by(name=project.namespace).first() + if ns.disk_usage() + additional_disk_usage > ns.storage: + abort(400, 'You have reached a data limit') + + upload = Upload(project, num_version, changes, current_user.id) + db.session.add(upload) + try: + # Creating upload transaction with different project's version is possible. + db.session.commit() + except IntegrityError: + db.session.rollback() + # check and clean dangling uploads or abort + for current_upload in project.uploads.all(): + upload_dir = os.path.join(project.storage.project_dir, 'tmp', current_upload.id) + upload_lockfile = os.path.join(upload_dir, 'lockfile') + if os.path.exists(upload_lockfile): + if time() - os.path.getmtime(upload_lockfile) < current_app.config['LOCKFILE_EXPIRATION']: + abort(400, 'Another process is running. Please try later.') + db.session.delete(current_upload) + db.session.commit() + + + # Try again after cleanup + db.session.add(upload) + try: + db.session.commit() + move_to_tmp(upload_dir) + except IntegrityError: + abort(422, 'Failed to create upload session. Please try later.') + + # Create transaction folder and lockfile + folder = os.path.join(project.storage.project_dir, "tmp", upload.id) + os.makedirs(folder) + open(os.path.join(folder, 'lockfile'), 'w').close() + + # Update immediately without uploading of new/modified files, and remove transaction/lockfile + if not(changes['added'] or changes['updated']): + next_version = "v{}".format(num_version + 1) + project.storage.apply_changes(changes, next_version, upload.id) + flag_modified(project, 'files') + project.disk_usage = sum(file['size'] for file in project.files) + user_agent = get_user_agent(request) + pv = ProjectVersion(project, next_version, current_user.username, changes, project.files, get_ip(request), user_agent) + project.latest_version = next_version + db.session.add(pv) + db.session.add(project) + db.session.delete(upload) + db.session.commit() + move_to_tmp(folder) + return jsonify(ProjectSchema().dump(project)), 200 + + return {'transaction': upload.id} + + +@auth_required +def chunk_upload(transaction_id, chunk_id): + """Upload file chunk as defined in upload transaction. + + # noqa: E501 + + :param transaction_id: Transaction id. + :type transaction_id: str + :param chunk_id: Chunk id. + :type chunk_id: str + + :rtype: None + """ + upload, upload_dir = get_upload(transaction_id) + for f in upload.changes["added"] + upload.changes["updated"]: + if "chunks" in f and chunk_id in f["chunks"]: + dest = os.path.join(upload_dir, "chunks", chunk_id) + lockfile = os.path.join(upload_dir, "lockfile") + with Toucher(lockfile, 30): + try: + # we could have used request.data here but it could eventually cause OOM issue + save_to_file(request.stream, dest, current_app.config['MAX_CHUNK_SIZE']) + except IOError: + move_to_tmp(dest, transaction_id) + abort(400, "Too big chunk") + if os.path.exists(dest): + checksum = generate_checksum(dest) + size = os.path.getsize(dest) + return jsonify({ + "checksum": checksum, + "size": size + }), 200 + else: + abort(400, 'Upload was probably canceled') + abort(404) + + +@auth_required +def push_finish(transaction_id): + """Finalize project data upload. + + Steps involved in finalization: + - merge chunks together (if there are some) + - do integrity check comparing uploaded file sizes with what was expected + - move uploaded files to new version dir and applying sync changes (e.g. geodiff apply_changeset) + - bump up version in database + - remove artifacts (chunks, lockfile) by moving them to tmp directory + + # noqa: E501 + + :param transaction_id: Transaction id. + :type transaction_id: str + + :rtype: None + """ + upload, upload_dir = get_upload(transaction_id) + changes = upload.changes + upload_files = changes["added"] + changes["updated"] + project = upload.project + project_path = os.path.join(project.namespace, project.name) + corrupted_files = [] + + for f in upload_files: + if "diff" in f: + dest_file = os.path.join( + upload_dir, "files", get_path_from_files( + upload_files, f["diff"]["path"], is_diff=True)) + expected_size = f["diff"]["size"] + else: + dest_file = os.path.join( + upload_dir, "files", get_path_from_files(upload_files, f["path"])) + expected_size = f["size"] + if "chunks" in f: + # Concatenate chunks into single file + # TODO we need to move this elsewhere since it can fail for large files (and slow FS) + os.makedirs(os.path.dirname(dest_file), exist_ok=True) + with open(dest_file, "wb") as dest: + try: + for chunk_id in f["chunks"]: + sleep(0) # to unblock greenlet + chunk_file = os.path.join(upload_dir, "chunks", chunk_id) + with open(chunk_file, "rb") as src: + data = src.read(8192) + while data: + dest.write(data) + data = src.read(8192) + except IOError: + logging.exception("Failed to process chunk: %s in project %s" % (chunk_id, project_path)) + corrupted_files.append(f["path"]) + continue + + if expected_size != os.path.getsize(dest_file): + logging.error("Data integrity check has failed on file %s in project %s" % (f["path"], project_path), exc_info=True) + corrupted_files.append(f["path"]) + + if corrupted_files: + move_to_tmp(upload_dir) + abort(422, {"corrupted_files": corrupted_files}) + + next_version = "v{}".format(upload.version + 1) + files_dir = os.path.join(upload_dir, "files") + target_dir = os.path.join(project.storage.project_dir, next_version) + if os.path.exists(target_dir): + pv = project.versions[0] if project.versions else None + if pv and pv.name == next_version: + abort(409, {"There is already version with this name %s" % next_version}) + logging.info("Upload transaction: Target directory already exists. Overwriting %s" % target_dir) + move_to_tmp(target_dir) + + try: + # let's move uploaded files where they are expected to be + os.renames(files_dir, target_dir) + project.storage.apply_changes(changes, next_version, transaction_id) + flag_modified(project, "files") + project.disk_usage = sum(file['size'] for file in project.files) + + user_agent = get_user_agent(request) + pv = ProjectVersion(project, next_version, current_user.username, changes, project.files, get_ip(request), user_agent) + project.latest_version = next_version + db.session.add(pv) + db.session.add(project) + db.session.delete(upload) + db.session.commit() + # remove artifacts + move_to_tmp(upload_dir, transaction_id) + except (psycopg2.Error, FileNotFoundError, DataSyncError) as err: + move_to_tmp(upload_dir) + abort(422, "Failed to create new version: {}".format(str(err))) + + project.storage.optimize_storage() + return jsonify(ProjectSchema().dump(project)), 200 + + +@auth_required +def push_cancel(transaction_id): + """Cancel upload transaction. + + # noqa: E501 + + :param transaction_id: Transaction id. + :type transaction_id: str + + :rtype: None + """ + upload, upload_dir = get_upload(transaction_id) + db.session.delete(upload) + db.session.commit() + move_to_tmp(upload_dir) + return jsonify({"success": True}), 200 + + +@auth_required +def clone_project(namespace, project_name, destination=None): # noqa: E501 + """Clone project. + + Clone project to another namespace. Only recent version is copied over and history is lost. + Destination namespace and project name are optionally set in query parameters + otherwise request user is used with the same project name as cloned project. # noqa: E501 + + :param namespace: Namespace for project to look into. + :type namespace: str + :param project_name: Project name. + :type project_name: str + :param destination: Destination (namespace and project name) where project should be cloned. + :type destination: dict | bytes + + :rtype: None + """ + cloned_project = require_project(namespace, project_name, ProjectPermissions.Read) + dest_ns = destination.get('namespace', current_user.username).strip() + dest_project = destination.get('project', cloned_project.name).strip() + + check_access_to_namespace(dest_ns, current_user) + + _project = Project.query.filter_by(name=dest_project, namespace=dest_ns).first() + if _project: + abort(409, "Project {}/{} already exists!".format(dest_ns, dest_project)) + + p = Project( + name=dest_project, + storage_params={"type": "local", "location": generate_location()}, + creator=current_user, + namespace=dest_ns + ) + p.updated = datetime.utcnow() + pa = ProjectAccess(p, public=False) + + try: + p.storage.initialize(template_project=cloned_project) + except InitializationError as e: + abort(400, f"Failed to clone project: {str(e)}") + + version = "v1" if p.files else "v0" + changes = {"added": p.files, "renamed": [], "updated": [], "removed": []} + user_agent = get_user_agent(request) + p.latest_version = version + version = ProjectVersion(p, version, current_user.username, changes, p.files, get_ip(request), user_agent) + p.versions.append(version) + db.session.add(p) + db.session.add(pa) + db.session.commit() + wm.emit_signal(SIG_NEW_PROJECT, request.path, msg=f'New project *{dest_ns}/{dest_project}* has been cloned') + return NoContent, 200 + + +def get_resource_history(project_name, namespace, path): # noqa: E501 + """History of project resource (file) + + Lookup in project versions to get history of changes for particular file # noqa: E501 + + :param project_name: Project name. + :type project_name: str + :param namespace: Namespace project belong to. + :type namespace: str + :param path: Path to file in project. + :type path: str + + :rtype: FileInfo + """ + project = require_project(namespace, project_name, ProjectPermissions.Read) + file = next((f for f in project.files if f['path'] == path), None) + if not file: + abort(404, path) + + last_version = ProjectVersion.query.filter_by(project_id=project.id).order_by( + ProjectVersion.created.desc()).first_or_404() + file['history'] = project.file_history(file['path'], 'v1', last_version.name) + file_info = FileInfoSchema(context={'project_dir': project.storage.project_dir}).dump(file) + return file_info, 200 + + +def get_resource_changeset(project_name, namespace, version_id, path): # noqa: E501 + """ Changeset of the resource (file) + + Calculate geodiff changeset for particular file and particular project version # noqa: E501 + + :param project_name: Project name. + :type project_name: str + :param namespace: Namespace project belong to. + :type namespace: str + :param version_id: Version id of the file. + :type version_id: str + :param path: Path to file in project. + :type path: str + + :rtype: [GeodiffChangeset] + """ + project = require_project(namespace, project_name, ProjectPermissions.Read) + if not project: + abort(404, f"Project {namespace}/{project_name} not found") + + version = ProjectVersion.query.filter_by(project_id=project.id, name=version_id).first() + if not version: + abort(404, f"Version {version_id} in project {namespace}/{project_name} not found") + + file = next((f for f in version.files if f['location'] == os.path.join(version_id, path)), None) + if not file: + abort(404, f"File {path} not found") + + if 'diff' not in file: + abort(404, "Diff not found") + + changeset = os.path.join(version.project.storage.project_dir, file['diff']['location']) + json_file = os.path.join(version.project.storage.project_dir, file['location'] + '-diff-changeset') + if not os.path.exists(json_file): + try: + version.project.storage.geodiff.list_changes(changeset, json_file) + except GeoDiffLibError as e: + abort(422, f"Change set could not be calculated: {str(e)}") + + with open(json_file, 'r') as jf: + content = json.load(jf) + if 'geodiff' not in content: + abort(422, "Expected format does not match response from Geodiff") + + return content['geodiff'], 200 diff --git a/server/src/controllers/project_transfer_controller.py b/server/src/controllers/project_transfer_controller.py new file mode 100644 index 00000000..fde405f0 --- /dev/null +++ b/server/src/controllers/project_transfer_controller.py @@ -0,0 +1,208 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from datetime import datetime, timedelta +from connexion import NoContent, request +from flask import abort, render_template, current_app +from flask_login import current_user +from sqlalchemy.orm.attributes import flag_modified +from sqlalchemy.orm.session import make_transient +from sqlalchemy import or_ + +from .. import db +from ..models.db_models import Project, ProjectTransfer, Namespace, Account, AccessRequest +from ..models.schemas import ProjectTransferSchema + +from ..auth import auth_required +from ..auth.models import User +from ..permissions import require_project, ProjectPermissions, check_namespace_permissions +from .. import wm, SIG_PROJECT_TRANSFERED +from ..organisation.models import Organisation + + +@auth_required +def get_project_transfers(namespace): # noqa: E501 + """List project transfers. + + :rtype: List[ProjectTransfer] + """ + if not (check_namespace_permissions(namespace, current_user, 'admin') or current_user.is_admin): + abort(403) + transfers = ProjectTransfer.query.filter(or_( + ProjectTransfer.to_ns_name == namespace, + ProjectTransfer.from_ns_name == namespace) + ).all() + data = ProjectTransferSchema(many=True).dump(transfers) + return data, 200 + + +@auth_required +def request_transfer(namespace, project_name, data=None): + """ Request transfer project. + + Request transfer project to another namespace. + + :param namespace: Namespace for project to look into. + :type namespace: str + :param project_name: Project name. + :type project_name: str + :param data: Request payload - destination namespace. + :type data: dict | bytes + :rtype: None + """ + from ..celery import send_email_async + + project = require_project(namespace, project_name, ProjectPermissions.All) + dest_ns = data.get('namespace', None) + if not dest_ns: + abort(400, "Missing destination namespace") + to_ns = Namespace.query.filter_by(name=dest_ns).first_or_404(f"{dest_ns} namespace not found") + pt = ProjectTransfer.query.filter_by(project_id=project.id, from_ns_name=project.namespace).first() + if pt: + abort(409, f"The project {project.namespace}/{project.name} is already in a transfer process") + try: + transfer = ProjectTransfer(project, to_ns, current_user.id) + db.session.add(transfer) + db.session.commit() + + if to_ns.account.type == "user": + user = User.query.get(to_ns.account.owner_id) + users = [user] + link = f"{request.url_root.rstrip('/')}/users/{user.username}/projects" + else: + org = Organisation.query.get(to_ns.account.owner_id) + users = User.query.filter(User.id.in_(org.admins)).all() + link = f"{request.url_root.rstrip('/')}/organisations/{org.name}/projects" + for user in users: + body = render_template( + 'email/project_transfer_request.html', + subject="Project transfer requested", + username=user.username, + project_name=project_name, + namescape_to=dest_ns, + namespace_from=namespace, + link=link, + expire=datetime.utcnow() + timedelta(seconds=current_app.config['TRANSFER_EXPIRATION']) + ) + email_data = { + 'subject': 'Mergin project transfer request', + 'html': body, + 'recipients': [user.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + return NoContent, 201 + except ProjectTransfer.TransferError as e: + abort(400, str(e)) + + +@auth_required +def delete_transfer_project(id): + """ Delete transfer project of Project Transfer data. + + Delete transfer project on the project transfer data + + :param id: project transfer id. + :type id: str + + :rtype: None + """ + project_transfer = ProjectTransfer.query.filter_by(id=id).first_or_404("Project transfer is not found") + if not check_namespace_permissions(project_transfer.from_ns_name, current_user, 'admin') and not check_namespace_permissions(project_transfer.to_ns_name, current_user, 'admin'): + abort(403, "You don't have access for transferring this project") + db.session.delete(project_transfer) + db.session.commit() + return NoContent, 200 + + +@auth_required +def execute_transfer_project(id, data=None): + """ Execute transfer project of Project Transfer data. + + Only project namespace/name is modified. Files are saved on disk independently on project owner, hence not touched. + + :param id: project transfer id. + :type id: str + :param data: payload of post request + :type data: dict + :rtype: None + """ + project_transfer = ProjectTransfer.query.filter_by(id=id).first_or_404("Project transfer not found") + if not check_namespace_permissions(project_transfer.to_ns_name, current_user, 'admin'): + abort(403, "You don't have access for transferring this project") + + # we check if user use new project name + old_project_name = project_transfer.project.name + old_project_id = project_transfer.project.id + old_namespace = project_transfer.from_ns_name + new_project_name = data.get('name', project_transfer.project.name) + new_namespace = project_transfer.to_ns_name + transfer_permission = data.get('transfer_permissions', True) + + # we validate if the project already exist in new namespace + if Project.query.filter_by(name=new_project_name, namespace=project_transfer.to_ns.name).first(): + abort(409, f"Project {project_transfer.to_ns.name}/{new_project_name} already exists") + + # check if there is ongoing upload + if project_transfer.project.uploads.first(): + abort(400, f"There is ongoing upload for {project_transfer.from_ns_name}/{project_transfer.project.name}. " + f"Please try later") + + # check if expired + if project_transfer.is_expired(): + abort(400, "The request is already expired") + + # check if new owner has enough disk space to host new project + new_ns = Namespace.query.filter_by(name=project_transfer.to_ns_name).first() + if new_ns.disk_usage() + project_transfer.project.disk_usage > new_ns.storage: + abort(400, "Disk quota reached") + + new_owner = new_ns.account.owner() + if isinstance(new_owner, User): + new_owner_id = new_owner.id + elif isinstance(new_owner, Organisation): + owner_user = User.query.filter_by(id=new_owner.owners[0]).first() + if not owner_user: + abort(400, "Target organisation does not have an owner to accept transfer") + new_owner_id = owner_user.id + else: + assert False + + # all checks passed - let's transfer it + # delete ongoing project access requests + AccessRequest.query.filter(AccessRequest.namespace == old_namespace, AccessRequest.project_id == old_project_id).delete() + db.session.commit() + + # change namespace/name + project = project_transfer.project + project.name = new_project_name + project.namespace = project_transfer.to_ns.name + + # we change creator id to the new owner, either new user or first owner of organisation + project.creator_id = new_owner_id + + # clean permissions if new owner decided for it or just append new owner + if not transfer_permission: + project.access.owners = [new_owner_id] + project.access.readers = [new_owner_id] + project.access.writers = [new_owner_id] + else: + if new_owner_id not in project.access.owners: + project.access.owners.append(new_owner_id) + if new_owner_id not in project.access.readers: + project.access.readers.append(new_owner_id) + if new_owner_id not in project.access.writers: + project.access.writers.append(new_owner_id) + + db.session.add(project) + flag_modified(project.access, "owners") + flag_modified(project.access, "writers") + flag_modified(project.access, "readers") + db.session.commit() + + wm.emit_signal( + SIG_PROJECT_TRANSFERED, + request.path, + msg=f'Project *{old_namespace}/{old_project_name}* has been transferred to *{new_namespace}/{new_project_name}*') + return NoContent, 200 diff --git a/server/src/controllers/user_controller.py b/server/src/controllers/user_controller.py new file mode 100644 index 00000000..fec7e729 --- /dev/null +++ b/server/src/controllers/user_controller.py @@ -0,0 +1,103 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import pytz +from datetime import datetime, timedelta +from flask_login import current_user +from flask import current_app, abort, request + +from .forms import ApiLoginForm +from ..models.db_models import Namespace +from ..auth import auth_required, authenticate +from ..auth.bearer import encode_token +from ..auth.models import User, LoginHistory +from ..auth.schemas import UserProfileSchema + + +def user_profile(user, return_all=True): + """ Return user profile in json format + + Will return just some public profile if not return_all + + :param user: User data that will be returned + :type user: User + + :return: extended user profile with mergin service info + :rtype: dict + """ + data = UserProfileSchema().dump(user.profile) + data['username'] = user.username + data['id'] = user.id + + if return_all: + ns = Namespace.query.filter_by(name=user.username).first() + user_disk_space = ns.disk_usage() + data.update({ + "email": user.email, + "disk_usage": user_disk_space, + "storage_limit": ns.storage, + "receive_notifications": user.profile.receive_notifications, + "verified_email": user.verified_email, + "tier": "free", + "registration_date": user.profile.registration_date + }) + return data + + +@auth_required +def get_user(username=None): # noqa: E501 + """ Return user profile """ + return user_profile(current_user, return_all=True) + + +def _extract_first_error(errors): + """ + For now, if the response is plain string, + InputApp displays it in the nice + notification window. Extract first error + and just send that one to client + """ + for key, value in errors.items(): + val = str(value[0]) + if key.lower() in val.lower(): + # e.g. Passwords must contain special character. + # in this case we do not need to add key "password" + # since it is obvious from where it comes from + return val + elif val.startswith("Field"): + # e.g. Field must be longer than 4 characters + # In this case the generic validator use Field + # replace with the key (e.g. "Username") + return val.replace("Field", key.capitalize()) + else: + # show both key and value + return val + "(" + key + ")" + + return "Unknown error in input fields" + + +def login(): # noqa: E501 + form = ApiLoginForm() + if form.validate(): + user = authenticate(form.login.data, form.password.data) + if user and user.active: + expire = datetime.now(pytz.utc) + timedelta(seconds=current_app.config['BEARER_TOKEN_EXPIRATION']) + token_data = { + "user_id": user.id, + "username": user.username, + "email": user.email, + "expire": str(expire) + } + token = encode_token(current_app.config['SECRET_KEY'], token_data) + + data = user_profile(user) + data["session"] = { + "token": token, + "expire": expire + } + LoginHistory.add_record(user.username, request) + return data + elif not user: + abort(401, 'Invalid username or password') + elif not user.active: + abort(401, 'Account is not activated') + abort(400, _extract_first_error(form.errors)) diff --git a/server/src/db_events.py b/server/src/db_events.py new file mode 100644 index 00000000..6a30ba96 --- /dev/null +++ b/server/src/db_events.py @@ -0,0 +1,191 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import os +from flask import render_template, current_app, abort +from sqlalchemy import event + +from . import db +from .auth.models import User, UserProfile +from .models.db_models import Namespace, Project, ProjectAccess, Account, RemovedProject +from .organisation import Organisation, OrganisationInvitation +from .celery import send_email_async +from .storages.disk import move_to_tmp + + +def before_namespace_delete(mapper, connection, namespace): # pylint: disable=W0612 + """ Remove namespace projects including project files on disk. Also remove project backups for restore """ + projects = Project.query.filter_by(namespace=namespace.name).all() + for project in projects: + if os.path.exists(project.storage.project_dir): + project.storage.delete() + + removed_projects = RemovedProject.query.filter_by(namespace=namespace.name).all() + rp_table = RemovedProject.__table__ + for rp in removed_projects: + rp_dir = os.path.abspath(os.path.join(current_app.config['LOCAL_PROJECTS'], rp.properties["storage_params"]["location"])) + if os.path.exists(rp_dir): + move_to_tmp(rp_dir) + connection.execute(removed_projects.delete().where(rp_table.c.id == rp.id)) + + +def add_user_namespace(mapper, connection, user): # pylint: disable=W0612 + ns = Namespace.query.filter_by(name=user.username).first() + if ns: + abort(400, "Namespace already exists") + account_table = Account.__table__ + connection.execute(account_table.insert().values(owner_id=user.id, type="user")) + account = Account.query.filter_by(type='user', owner_id=user.id).first() + ns_table = Namespace.__table__ + connection.execute(ns_table.insert().values(name=user.username, account_id=account.id)) + # emit signal that account has been created + account.created(connection) + + +def remove_user_references(mapper, connection, user): # pylint: disable=W0612 + q = Project.access.has(ProjectAccess.owners.contains([user.id])) \ + | Project.access.has(ProjectAccess.readers.contains([user.id])) \ + | Project.access.has(ProjectAccess.readers.contains([user.id])) + projects = Project.query.filter(q).all() + + def filter_user(ids): + return filter(lambda i: i != user.id, ids) + + if projects: + pa_table = ProjectAccess.__table__ + for p in projects: + pa = p.access + connection.execute( + pa_table.update().where(pa_table.c.project_id == p.id), + owners=filter_user(pa.owners), + writers=filter_user(pa.writers), + readers=filter_user(pa.readers) + ) + + # remove pending invitations for user + inv_table = OrganisationInvitation.__table__ + connection.execute(inv_table.delete().where(inv_table.c.username == user.username)) + + # remove from organisations + q = Organisation.owners.contains([user.id]) \ + | Organisation.readers.contains([user.id]) \ + | Organisation.admins.contains([user.id]) \ + | Organisation.writers.contains([user.id]) + organisations = Organisation.query.filter(q).all() + + if organisations: + o_table = Organisation.__table__ + for o in organisations: + # in case of user is the only owner, remove also whole organisation + if o.owners == [user.id]: + connection.execute(inv_table.delete().where(inv_table.c.org_name == o.name)) + connection.execute(o_table.delete().where(o_table.c.name == o.name)) + + connection.execute( + o_table.update().where(o_table.c.name == o.name), + owners=filter_user(o.owners), + writers=filter_user(o.writers), + readers=filter_user(o.readers), + admins=filter_user(o.admins) + ) + + +def project_post_delete_actions(mapper, connection, project): # pylint: disable=W0612 + """ + After project is deleted inform users by sending email. + + :param project: Project object + """ + if not project.access: + return + + users_ids = list(set(project.access.owners + project.access.writers + project.access.readers)) + users_profiles = UserProfile.query.filter(UserProfile.user_id.in_(users_ids)).all() + for profile in users_profiles: + if not (profile.receive_notifications and profile.user.verified_email): + continue + + email_data = { + 'subject': f'Mergin project {"/".join([project.namespace, project.name])} has been deleted', + 'html': render_template('email/removed_project.html', subject="Project deleted", project=project, username=profile.user.username), + 'recipients': [profile.user.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + +def check(session): + if os.path.isfile(current_app.config['MAINTENANCE_FILE']): + abort(503, "Service unavailable due to maintenance, please try later") + + +def before_user_profile_updated(mapper, connection, target): + """ + Before profile updated, inform user by sending email about that profile that changed + Just send email if user want to receive notifications + """ + if target.receive_notifications and target.user.verified_email: + state = db.inspect(target) + changes = {} + + for attr in state.attrs: + hist = attr.load_history() + if not hist.has_changes(): + continue + + before = hist.deleted[0] + after = hist.added[0] + field = attr.key + + # if boolean, show Yes or No + if before is not None and isinstance(before, bool): + before = 'Yes' if before is True else 'No' + if after is not None and isinstance(after, bool): + after = 'Yes' if after is True else 'No' + + profile_key = field.title().replace('_', ' ') + changes[profile_key] = { + 'before': before, + 'after': after + } + + # inform user + if changes: + email_data = { + 'subject': 'Profile has been changed', + 'html': render_template('email/profile_changed.html', subject="Profile update", user=target.user, changes=changes), + 'recipients': [target.user.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + + +def add_org_namespace(mapper, connection, organisation): # pylint: disable=W0612 + ns = Namespace.query.filter_by(name=organisation.name).first() + if ns: + abort(400, "Namespace already exists") + account_table = Account.__table__ + connection.execute(account_table.insert().values(owner_id=organisation.id, type="organisation")) + account = Account.query.filter_by(type='organisation', owner_id=organisation.id).first() + ns_table = Namespace.__table__ + connection.execute(ns_table.insert().values(name=organisation.name, account_id=account.id)) + account.created(connection) + + +def register_events(): + event.listen(User, "after_insert", add_user_namespace) + event.listen(User, "before_delete", remove_user_references) + event.listen(Project, "after_delete", project_post_delete_actions) + event.listen(db.session, 'before_commit', check) + event.listen(UserProfile, 'after_update', before_user_profile_updated) + event.listen(Namespace, "before_delete", before_namespace_delete) + event.listen(Organisation, "after_insert", add_org_namespace) + + +def remove_events(): + event.remove(User, "after_insert", add_user_namespace) + event.remove(User, "before_delete", remove_user_references) + event.remove(Project, "after_delete", project_post_delete_actions) + event.remove(db.session, 'before_commit', check) + event.remove(UserProfile, 'after_update', before_user_profile_updated) + event.remove(Namespace, "before_delete", before_namespace_delete) + event.remove(Organisation, "after_insert", add_org_namespace) diff --git a/server/src/encoder.py b/server/src/encoder.py new file mode 100644 index 00000000..67d3c698 --- /dev/null +++ b/server/src/encoder.py @@ -0,0 +1,20 @@ +from connexion.apps.flask_app import FlaskJSONEncoder +import six + +from .models.base_model_ import Model + + +class JSONEncoder(FlaskJSONEncoder): + include_nulls = False + + def default(self, o): + if isinstance(o, Model): + dikt = {} + for attr, _ in six.iteritems(o.swagger_types): + value = getattr(o, attr) + if value is None and not self.include_nulls: + continue + attr = o.attribute_map[attr] + dikt[attr] = value + return dikt + return FlaskJSONEncoder.default(self, o) diff --git a/server/src/forms.py b/server/src/forms.py new file mode 100644 index 00000000..e479a8ee --- /dev/null +++ b/server/src/forms.py @@ -0,0 +1,31 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from wtforms import StringField, Field, SelectField +from wtforms.validators import DataRequired, Length, ValidationError +from flask_wtf import FlaskForm +from .util import is_name_allowed + + +def namespace_validation(form, field): + if field.data and (not is_name_allowed(field.data) or '@' in field.data): + raise ValidationError("Please use only alphanumeric or these -._~()'!*:,; characters in {}.".format(field.name)) + + +class IntegerListField(Field): + def _value(self): + return self.data + + def process_formdata(self, valuelist): + self.data = valuelist + + +class SendEmailForm(FlaskForm): + users = IntegerListField() # FieldList(IntegerField()) was not working + subject = StringField(validators=[DataRequired(), Length(max=50)]) + message = StringField(validators=[DataRequired()]) + + +class AccessPermissionForm(FlaskForm): + permissions = SelectField("permissions", [DataRequired()], choices=[ + ('read', 'read'), ('write', 'write'), ('owner', 'owner')]) diff --git a/server/src/mergin_utils.py b/server/src/mergin_utils.py new file mode 100644 index 00000000..5d6b1d08 --- /dev/null +++ b/server/src/mergin_utils.py @@ -0,0 +1,166 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import os +import hashlib +import re +import secrets +from threading import Timer +from uuid import UUID + +from gevent import sleep + + +def generate_checksum(file, chunk_size=4096): + """ + Generate checksum for file from chunks. + + :param file: file to calculate checksum + :param chunk_size: size of chunk + :return: sha1 checksum + """ + checksum = hashlib.sha1() + with open(file, 'rb') as f: + while True: + chunk = f.read(chunk_size) + sleep(0) # to unblock greenlet + if not chunk: + return checksum.hexdigest() + checksum.update(chunk) + + +class Toucher: + """ + Helper class to periodically update modification time of file during + execution of longer lasting task. + + Example of usage: + ----------------- + with Toucher(file, interval): + do_something_slow + + """ + def __init__(self, lockfile, interval): + self.lockfile = lockfile + self.interval = interval + self.running = False + self.timer = None + + def __enter__(self): + self.acquire() + + def __exit__(self, type, value, tb): # pylint: disable=W0612,W0622 + self.release() + + def release(self): + self.running = False + if self.timer: + self.timer.cancel() + self.timer = None + + def acquire(self): + self.running = True + self.touch_lockfile() + + def touch_lockfile(self): + # do an NFS ACCESS procedure request to clear the attribute cache (for various pods to actually see the file) + # https://docs.aws.amazon.com/efs/latest/ug/troubleshooting-efs-general.html#custom-nfs-settings-write-delays + os.access(self.lockfile, os.W_OK) + with open(self.lockfile, 'a'): + os.utime(self.lockfile, None) + if self.running: + self.timer = Timer(self.interval, self.touch_lockfile) + self.timer.start() + + +def resolve_tags(files): + def _is_qgis(filename): + _, ext = os.path.splitext(filename) + return ext in ['.qgs', '.qgz'] + + tags = [] + qgis_count = 0 + for f in files: + if _is_qgis(f['path']): + qgis_count += 1 + #TODO add some rules for intput validity and mappin validity + if qgis_count == 1: + tags.extend(['valid_qgis', 'input_use']) + return tags + + +def int_version(version): + """ Convert v format of version to integer representation. """ + return int(version.lstrip('v')) if re.match(r'v\d', version) else None + + +def is_versioned_file(file): + """ Check if file is compatible with geodiff lib and hence suitable for versioning. """ + diff_extensions = ['.gpkg', '.sqlite'] + f_extension = os.path.splitext(file)[1] + return f_extension in diff_extensions + + +def is_file_name_blacklisted(path, blacklist): + blacklisted_dirs = get_blacklisted_dirs(blacklist) + blacklisted_files = get_blacklisted_files(blacklist) + if blacklisted_dirs: + regexp_dirs = re.compile(r'({})'.format('|'.join(".*" + re.escape(x) + ".*" for x in blacklisted_dirs))) + if regexp_dirs.search(os.path.dirname(path)): + return True + if blacklisted_files: + regexp_files = re.compile(r'({})'.format('|'.join(".*" + re.escape(x) + ".*" for x in blacklisted_files))) + if regexp_files.search(os.path.basename(path)): + return True + + return False + + +def get_blacklisted_dirs(blacklist): + return [p.replace("/", "") for p in blacklist if p.endswith("/")] + + +def get_blacklisted_files(blacklist): + return [p for p in blacklist if not p.endswith("/")] + + +def get_user_agent(request): + """ Return user agent from request headers + + In case of browser client a parsed version from werkzeug utils is returned else raw value of header. + """ + if request.user_agent.browser and request.user_agent.platform: + client = request.user_agent.browser.capitalize() + version = request.user_agent.version + system = request.user_agent.platform.capitalize() + return f"{client}/{version} ({system})" + else: + return request.user_agent.string + + +def get_ip(request): + """ Returns request's IP address based on X_FORWARDED_FOR header + from proxy webserver (which should always be the case) + """ + forwarded_ips = request.environ.get('HTTP_X_FORWARDED_FOR', request.environ.get('REMOTE_ADDR', 'untrackable')) + # seems like we get list of IP addresses from AWS infra (beginning with external IP address of client, followed by some internal IP) + ip = forwarded_ips.split(",")[0] + return ip + + +def generate_location(): + """ Return random location where project is saved on disk + + Example: + >>> generate_location() + '1c/624c6af4d6d2710bbfe1c128e8ca267b' + """ + return os.path.join(secrets.token_hex(1), secrets.token_hex(16)) + + +def is_valid_uuid(uuid): + """ Check object can be parse as valid UUID """ + try: + UUID(uuid) + return True + except (ValueError, AttributeError): + return False diff --git a/server/src/models/__init__.py b/server/src/models/__init__.py new file mode 100644 index 00000000..9bba2f8f --- /dev/null +++ b/server/src/models/__init__.py @@ -0,0 +1,4 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import diff --git a/server/src/models/base_model_.py b/server/src/models/base_model_.py new file mode 100644 index 00000000..a61cf285 --- /dev/null +++ b/server/src/models/base_model_.py @@ -0,0 +1,68 @@ +import pprint +import typing +import six + +from .. import util + +T = typing.TypeVar('T') + + +class Model(object): + # swaggerTypes: The key is attribute name and the + # value is attribute type. + swagger_types = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr, _ in six.iteritems(self.swagger_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/server/src/models/db_models.py b/server/src/models/db_models.py new file mode 100644 index 00000000..6d890548 --- /dev/null +++ b/server/src/models/db_models.py @@ -0,0 +1,465 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import json +import os +import uuid +from datetime import datetime, timedelta +from blinker import signal +from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, ENUM, UUID +from sqlalchemy.types import String +from collections import OrderedDict +from pygeodiff.geodifflib import GeoDiffLibError + +from .. import current_app, db +from ..storages import DiskStorage +from ..auth.models import User # pylint: disable=W0611 +from ..mergin_utils import int_version, is_versioned_file + +Storages = { + "local": DiskStorage +} + +account_created = signal('account_created') +account_inactivated = signal('account_inactivated') + + +class Project(db.Model): + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name = db.Column(db.String, index=True) + storage_params = db.Column(db.JSON) + created = db.Column(db.DateTime, default=datetime.utcnow, index=True) + creator_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=True, index=True) + updated = db.Column(db.DateTime, onupdate=datetime.utcnow) + # metadata for project files (see also FileInfoSchema) + files = db.Column(db.JSON, default=[]) + tags = db.Column(ARRAY(String), server_default="{}") + disk_usage = db.Column(BIGINT, nullable=False, default=0) + latest_version = db.Column(db.String, index=True) + + creator = db.relationship("User", uselist=False, backref=db.backref("projects", cascade="all,delete")) + namespace = db.Column(db.String, db.ForeignKey("namespace.name", ondelete="CASCADE"), index=True) + __table_args__ = (db.UniqueConstraint('name', 'namespace'),) + + def __init__(self, name, storage_params, creator, namespace, **kwargs): # pylint: disable=W0613 + self.name = name + self.storage_params = storage_params + self.creator = creator + self.namespace = namespace + self.latest_version = "v0" + + @property + def storage(self): + if not hasattr(self, '_storage'): # best approach, seriously + StorageBackend = Storages[self.storage_params['type']] + self._storage = StorageBackend(self) # pylint: disable=W0201 + return self._storage + + def file_history(self, file, since, to): + """ + Look up in project versions for history of versioned file. + Returns ordered (from latest) dict with versions where some change happened and corresponding metadata. + + :Example: + + >>> self.file_history('mergin/base.gpkg', 'v1', 'v2') + {'v2': {'checksum': '08b0e8caddafe74bf5c11a45f65cedf974210fed', 'location': 'v2/base.gpkg', 'path': 'base.gpkg', + 'size': 2793, 'change': 'updated'}, 'v1': {checksum': '89469a6482267de394c7c7270cb7ffafe694ea76', + 'location': 'v1/base.gpkg', 'mtime': '2019-07-18T07:52:38.770113Z', 'path': 'base.gpkg', 'size': 98304, + 'change': 'added'}} + + :param file: file path + :type file: str + :param since: start version for history (e.g. v1) + :type since: str + :param to: end version for history (e.g. v2) + :type to: str + :returns: changes metadata for versions where some file change happened + :rtype: dict + """ + since = int_version(since) + to = int_version(to) + if not (is_versioned_file(file) and since is not None and to is not None): + return {} + + history = OrderedDict() + versions = sorted(self.versions, key=lambda v: int_version(v.name)) + # version v0 was added as initial version later and some older projects may not have it + if versions[0].name == "v0": + to = to + 1 + since = since + 1 + + for version in reversed(versions[since-1:to]): + f_change = version.find_file_change(file) + if not f_change: + continue + # make sure we find with correct filename next time + if f_change['change'] == 'renamed': + file = f_change['path'] + history[version.name] = f_change + # end of file history + if f_change['change'] in ['added', 'removed']: + break + + return history + + +class ProjectAccess(db.Model): + project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id", ondelete="CASCADE"), primary_key=True, index=True) + public = db.Column(db.Boolean, default=False, index=True) + owners = db.Column(ARRAY(db.Integer), server_default="{}") + readers = db.Column(ARRAY(db.Integer), server_default="{}") + writers = db.Column(ARRAY(db.Integer), server_default="{}") + + project = db.relationship("Project", + uselist=False, + backref=db.backref("access", single_parent=True, uselist=False, cascade="all,delete", lazy='joined')) + + __table_args__ = (db.Index('ix_project_access_owners', owners, postgresql_using="gin"), + db.Index('ix_project_access_readers', readers, postgresql_using="gin"), + db.Index('ix_project_access_writers', writers, postgresql_using="gin"),) + + def __init__(self, project, public=False): + self.project = project + self.owners = [project.creator.id] + self.writers = [project.creator.id] + self.readers = [project.creator.id] + self.project_id = project.id + self.public = public + + +class ProjectVersion(db.Model): + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + name = db.Column(db.String, index=True) + project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id", ondelete="CASCADE"), index=True) + created = db.Column(db.DateTime, default=datetime.utcnow, index=True) + author = db.Column(db.String, index=True) + # metadata with files changes + # {"added": [{"checksum": "c9a4fd2afd513a97aba19d450396a4c9df8b2ba4", "path": "test.qgs", "size": 31980}], + # "removed": [], "renamed": [], "updated": []} + changes = db.Column(db.JSON) + # metadata (see also FileInfoSchema) for files in actual version + files = db.Column(db.JSON) + user_agent = db.Column(db.String, index=True) + ip_address = db.Column(db.String, index=True) + ip_geolocation_country = db.Column(db.String, index=True) # geolocation country derived from IP (with celery job) + project_size = db.Column(BIGINT, nullable=False, default=0, index=True) # size of project at current version (incl. files from older versions) + + project = db.relationship( + "Project", + uselist=False, + backref=db.backref("versions", single_parent=True, lazy='subquery', cascade="all,delete", order_by="desc(ProjectVersion.created)") + ) + + def __init__(self, project, name, author, changes, files, ip, user_agent=None): + self.project_id = project.id + self.name = name + self.author = author + self.changes = changes + self.files = files + self.user_agent = user_agent + self.ip_address = ip + self.project_size = sum(f["size"] for f in self.files) if self.files else 0 + + def find_file_change(self, file): + """ + Browse version changes and return requested file change metadata (if any). Append type of change. + + :Example: + + >>> self.find_file_change('data/test.gpkg') + {'checksum': '89469a6482267de394c7c7270cb7ffafe694ea76', 'location': 'v1/data/test.gpkg', + 'mtime': '2019-07-18T07:52:38.770113Z', 'path': 'base.gpkg', 'size': 98304, 'change': 'added'} + + :param file: file path + :type file: str + :returns: change metadata + :rtype: dict + """ + for k, v in self.changes.items(): + match_key = 'new_path' if k == 'renamed' else 'path' + changed_item = next((item for item in v if item.get(match_key) == file), None) + if changed_item: + changed_item['change'] = k + changed_item['location'] = next((f['location'] for f in self.files if f['path'] == changed_item[match_key]), None) + # append location of diff file + if 'diff' in changed_item: + changed_item['diff']['location'] = next( + (f['diff']['location'] for f in self.files if f['path'] == changed_item[match_key]), None) + return changed_item + + def diff_summary(self): + """ Calculate diff summary for versioned files updated with geodiff + + :Example: + + >>> self.diff_summary() + { + 'base.gpkg': { + 'summary': [ + {'table': 'gpkg_contents', 'insert': 0, 'update': 1, 'delete': 0}, + {'table': 'simple', 'insert': 2, 'update': 0, 'delete': 0} + ], + 'size': 278 + }, + 'fail.gpkg': { + 'error': 'some geodiff error', + 'size': 278 + } + } + + :return: diffs' summaries for all updated files + :rtype: dict + """ + output = {} + for f in self.changes["updated"]: + if 'diff' not in f: + continue + json_file = os.path.join(self.project.storage.project_dir, f['location'] + '-diff-summary') + changeset = os.path.join(self.project.storage.project_dir, f['diff']['location']) + if not os.path.exists(json_file): + try: + self.project.storage.geodiff.list_changes_summary(changeset, json_file) + except GeoDiffLibError as e: + output[f['path']] = { + "error": str(e), + "size": f['diff']['size'] + } + continue + + with open(json_file, 'r') as jf: + content = json.load(jf) + if 'geodiff_summary' not in content: + continue + + output[f['path']] = { + "summary": content["geodiff_summary"], + "size": f['diff']['size'] + } + + return output + + +class Namespace(db.Model): + name = db.Column(db.String, primary_key=True) + account_id = db.Column(db.Integer, db.ForeignKey("account.id", ondelete="CASCADE")) + storage = db.Column(BIGINT, nullable=False, default=os.environ.get('DEFAULT_STORAGE_SIZE', 100 * 1024 * 1024)) + + account = db.relationship("Account", uselist=False, backref=db.backref("namespace", single_parent=True, uselist=False, cascade="all,delete")) + + def __init__(self, name, account_id): + self.name = name + self.account_id = account_id + + def projects(self): + return Project.query.filter_by(namespace=self.name).all() + + def owner(self): + self.account.owner() + + def disk_usage(self): + return sum(p.disk_usage for p in self.projects()) + + +class Upload(db.Model): + id = db.Column(db.String, primary_key=True) + project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id", ondelete="CASCADE"), index=True) + version = db.Column(db.Integer, index=True) + changes = db.Column(db.JSON) + user_id = db.Column(db.Integer, db.ForeignKey("user.id", ondelete="CASCADE"), nullable=True) + created = db.Column(db.DateTime, default=datetime.utcnow) + + user = db.relationship("User") + project = db.relationship( + "Project", + uselist=False, + backref=db.backref("uploads", single_parent=True, lazy='dynamic', cascade="all,delete") + ) + __table_args__ = ( + db.UniqueConstraint('project_id', 'version'), + ) + + def __init__(self, project, version, changes, user_id): + self.id = str(uuid.uuid4()) + self.project_id = project.id + self.version = version + self.changes = changes + self.user_id = user_id + + +class ProjectTransfer(db.Model): + id = db.Column(db.String, primary_key=True) + project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id", ondelete="CASCADE"), index=True) + from_ns_name = db.Column(db.String, nullable=False, index=True) # cached value for easier lookups + to_ns_name = db.Column(db.String, db.ForeignKey("namespace.name", ondelete="CASCADE"), nullable=False, index=True) + requested_by = db.Column(db.Integer, db.ForeignKey("user.id", ondelete="CASCADE"), nullable=True) + expire = db.Column(db.DateTime) + + project = db.relationship( + "Project", + uselist=False, + backref=db.backref("transfers", single_parent=True, lazy='dynamic', cascade="all,delete") + ) + to_ns = db.relationship( + "Namespace", + backref=db.backref("transfers", single_parent=True, lazy='dynamic', cascade="all,delete") + ) + user = db.relationship("User") + + __table_args__ = (db.UniqueConstraint('project_id'),) + + class TransferError(Exception): + def __init__(self, reason=None): + error = 'Project transfer failed' + if reason: + error = '{} : {}'.format(error, reason) + self.errors = error + + def __init__(self, project, to_namespace, requested_by): + """ Initiate project transfer to different namespace + + :param project: project to be transferred + :type project: Project + + :param to_namespace: the namespace for project to be transferred + :type to_namespace: Namespace + + :param requested_by: requested by + :type requested_by: User.id + """ + self.id = str(uuid.uuid4()) + self.project_id = project.id + self.from_ns_name = project.namespace + self.to_ns_name = to_namespace.name + self.requested_by = requested_by + self.expire = datetime.utcnow() + timedelta(seconds=current_app.config['TRANSFER_EXPIRATION']) + + if to_namespace.name == project.namespace: + raise self.TransferError('origin and destination namespaces are the same') + + def is_expired(self): + """ Check if transfer request is expired + :rtype: bool + """ + return datetime.utcnow() > self.expire + + +class Account(db.Model): + """ Reference class to claim service ownership either by user or organisation """ + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + type = db.Column(ENUM("user", "organisation", name="account_type"), nullable=False, index=True) + owner_id = db.Column(db.Integer, nullable=False, index=True) + + def __init__(self, type, owner_id): + self.type = type + self.owner_id = owner_id + + def owner(self): + from ..organisation.models import Organisation + + if self.type == 'organisation': + return Organisation.query.get(self.owner_id) + elif self.type == 'user': + return User.query.get(self.owner_id) + else: + return + + def can_edit(self, user_id): + from ..organisation.models import Organisation + owner = self.owner() + if isinstance(owner, User): + return owner.id == user_id + elif isinstance(owner, Organisation): + return user_id in owner.owners + else: + return False + + def email(self): + from ..organisation.models import Organisation + owner = self.owner() + + if isinstance(owner, User): + return owner.email + elif isinstance(owner, Organisation): + owner_id = owner.owners[0] + user = User.query.get(owner_id) + return user.email + else: + return '' + + def name(self): + from ..organisation.models import Organisation + + owner = self.owner() + if isinstance(owner, User): + return owner.username + elif isinstance(owner, Organisation): + return owner.name + else: + return '' + + def created(self, connection=None): + """ Emit blinker.signal event that account has been created """ + account_created.send(self, connection=connection) + + def inactivated(self, action): + account_inactivated.send(self, action=action) + + +class AccessRequest(db.Model): + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + user_id = db.Column(db.Integer, db.ForeignKey("user.id", ondelete="CASCADE"), index=True) + project_id = db.Column(UUID(as_uuid=True), db.ForeignKey("project.id", ondelete="CASCADE"), index=True) + namespace = db.Column(db.String, nullable=False, index=True) # cached value for easier lookups + expire = db.Column(db.DateTime) + + user = db.relationship("User", uselist=False) + + project = db.relationship( + "Project", + uselist=False, + backref=db.backref("access_requests", single_parent=True, cascade="all,delete") + ) + + def __init__(self, project, user_id): + self.project_id = project.id + self.namespace = project.namespace + self.user_id = user_id + self.expire = datetime.utcnow() + timedelta(seconds=current_app.config['PROJECT_ACCESS_REQUEST']) + + def accept(self, permissions): + """ The accept to project access request + """ + # user = User.query.filter(User.username == self.username) + project_access = self.project.access + readers = project_access.readers.copy() + writers = project_access.writers.copy() + owners = project_access.owners.copy() + readers.append(self.user_id) + project_access.readers = readers + if permissions == "write" or permissions == "owner": + writers.append(self.user_id) + project_access.writers = writers + if permissions == "owner": + owners.append(self.user_id) + project_access.owners = owners + + db.session.delete(self) + db.session.commit() + + +class RemovedProject(db.Model): + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + name = db.Column(db.String, nullable=False, index=True) + namespace = db.Column(db.String, nullable=False, index=True) + properties = db.Column(db.JSON, nullable=False) + timestamp = db.Column(db.DateTime, default=datetime.utcnow, index=True) + removed_by = db.Column(db.String, nullable=False) + + def __init__(self, project, removed_by): + from .schemas import ProjectSchemaForDelete + + self.name = project.name + self.namespace = project.namespace + self.properties = ProjectSchemaForDelete().dump(project) + self.removed_by = removed_by diff --git a/server/src/models/schemas.py b/server/src/models/schemas.py new file mode 100644 index 00000000..655d7dac --- /dev/null +++ b/server/src/models/schemas.py @@ -0,0 +1,267 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import copy +import os +import re +from datetime import datetime +from marshmallow import fields, pre_dump, post_dump +from flask_login import current_user +from flask import current_app + +from ..auth.schemas import UserSchema +from .. import ma +from ..mergin_utils import resolve_tags +from ..permissions import ProjectPermissions +from .db_models import Project, ProjectVersion, ProjectTransfer, Namespace, Account, AccessRequest, RemovedProject +from ..auth.models import User + + +class DateTimeWithZ(fields.DateTime): + def __init__(self, **kwargs): + super(DateTimeWithZ, self).__init__('%Y-%m-%dT%H:%M:%S%zZ', **kwargs) + + +class ProjectAccessSchema(ma.ModelSchema): + owners = fields.List(fields.Integer()) + writers = fields.List(fields.Integer()) + readers = fields.List(fields.Integer()) + public = fields.Boolean() + + @post_dump + def insert_usernames(self, data, **kwargs): + """ Convert list of user ids in access levels to corresponding usernames + Adds fields 'ownersnames', 'writersnames' and 'readersnames' to serialized data + """ + if 'users_map' in self.context: + # user map can be pass as context to save db query + users_map = self.context['users_map'] + else: + user_ids = data['owners'] + data['writers'] + data['readers'] + users_map = {u.id: u.username for u in User.query.filter(User.id.in_(set(user_ids))).all()} + + for field in ('owners', 'writers', 'readers'): + new_key = field + 'names' + data[new_key] = [] + users_ids = data[field] + for uid in users_ids: + if uid not in users_map: + continue + username = users_map[uid] + data[new_key].append(username) + return data + + +def project_user_permissions(project): + return { + "upload": ProjectPermissions.Upload.check(project, current_user), + "update": ProjectPermissions.Update.check(project, current_user), + "delete": ProjectPermissions.Delete.check(project, current_user) + } + +class FileInfoSchema(ma.ModelSchema): + path = fields.String() + size = fields.Integer() + checksum = fields.String() + location = fields.String(load_only=True) + mtime = fields.String() + diff = fields.Nested('self', required=False, missing={}) + history = fields.Dict(required=False, dump_only=True, missing={}) + + @pre_dump + def patch_history_field(self, data, **kwargs): + """ + Append expiration to materialized versioned files and remove internal server metadata from final response. + This is because history is general dict with yet unknown structure. + #TODO resolve once marshmallow 3.0 is released. + history = fields.Dict(keys=fields.String(), values=fields.Nested('self', exclude=['location', 'chunks'])) + """ + # diff field (self-nested does not contain history) + if 'history' not in data: + return data + + _data = copy.deepcopy(data) # create deep copy to avoid messing around with original object + for item in _data['history'].values(): + if 'diff' in item: + item['diff'].pop('location', None) + item['diff'].pop('sanitized_path', None) + if self.context and 'project_dir' in self.context: + abs_path = os.path.join(self.context['project_dir'], item['location']) + if os.path.exists(abs_path): + expiration = os.path.getmtime(abs_path) + current_app.config['FILE_EXPIRATION'] + item.update(expiration=datetime.utcfromtimestamp(expiration)) + item.pop('location', None) + item.pop('chunks', None) + item.pop('sanitized_path', None) + return _data + + +class ProjectSchemaForVersion(ma.ModelSchema): + """ Equivalent of ProjectSchema when version object is serialized """ + id = fields.UUID() + created = DateTimeWithZ(attribute="project.created") + creator = fields.Int(attribute="project.creator_id") + uploads = fields.Function(lambda obj: obj.project.uploads.all()) + name = fields.Function(lambda obj: obj.project.name) + namespace = fields.Function(lambda obj: obj.project.namespace) + access = fields.Method("_access") + permissions = fields.Method("_permissions") + disk_usage = fields.Method("_disk_usage") + files = fields.Nested(FileInfoSchema(), many=True) + tags = fields.Method("_tags") + updated = DateTimeWithZ(attribute="created") + version = fields.Function(lambda obj: obj.name) + + def _access(self, obj): + return ProjectAccessSchema().dump(obj.project.access) + + def _permissions(self, obj): + return project_user_permissions(obj.project) + + def _disk_usage(self, obj): + return sum(f["size"] for f in obj.files) + + def _tags(self, obj): + return resolve_tags(obj.files) + + +class ProjectAccessRequestSchema(ma.ModelSchema): + user = fields.Nested(UserSchema(), exclude=['profile', 'is_admin', 'email', 'id', 'is_admin', 'verified_email']) + project_name = fields.Function(lambda obj: obj.project.name) + namespace = fields.Str() + expire = DateTimeWithZ() + + class Meta: + model = AccessRequest + + +class ProjectSchema(ma.ModelSchema): + id = fields.UUID() + files = fields.Nested(FileInfoSchema(), many=True) + access = fields.Nested(ProjectAccessSchema()) + access_requests = fields.Nested(ProjectAccessRequestSchema(), many=True, exclude=['project']) + permissions = fields.Function(project_user_permissions) + version = fields.String(attribute='latest_version') + namespace = fields.Str() + created = DateTimeWithZ() + + class Meta: + model = Project + exclude = ['versions', 'transfers', 'latest_version', 'storage_params'] + + +class ProjectListSchema(ma.ModelSchema): + id = fields.UUID() + name = fields.Str() + namespace = fields.Str() + access = fields.Nested(ProjectAccessSchema()) + permissions = fields.Function(project_user_permissions) + version = fields.String(attribute='latest_version') + updated = fields.Method("get_updated") + created = DateTimeWithZ() + creator = fields.Integer(attribute='creator_id') + disk_usage = fields.Integer() + tags = fields.List(fields.Str()) + has_conflict = fields.Method("get_has_conflict") + + + def get_updated(self, obj): + return obj.updated if obj.updated else obj.created + + + def get_has_conflict(self, obj): + # check if there is any conflict file in project + files = obj.files + for file in [f for f in files if '_conflict' in f.get('path')]: + if len([f for f in files if f.get('path') == re.sub(r"(\.gpkg)(.*conflict.*)", r"\1", file.get('path'))]): + return True + return False + + +class ProjectVersionSchema(ma.ModelSchema): + project_name = fields.Function(lambda obj: obj.project.name) + namespace = fields.Function(lambda obj: obj.project.namespace) + author = fields.String() + project = fields.Nested(ProjectSchema()) + changesets = fields.Method("get_diff_summary") + files = fields.String() + created = DateTimeWithZ() + + def get_diff_summary(self, obj): + return obj.diff_summary() + + class Meta: + model = ProjectVersion + exclude = ['id', 'ip_address', 'ip_geolocation_country'] + + +class NamespaceSchema(ma.ModelSchema): + type = fields.Method("namespace_type") + + class Meta: + model = Namespace + fields = ('name', 'type') + + def namespace_type(self, obj): + return obj.account.type + + +class ProjectTransferSchema(ma.ModelSchema): + requested_by = fields.Method("requested_by_username") + project = fields.Nested(ProjectSchema()) + project_name = fields.Function(lambda obj: obj.project.name) + + class Meta: + model = ProjectTransfer + fields = ('id', 'project_name', 'from_ns_name', 'to_ns_name', 'requested_by', 'requested_at', 'project', 'expire') + + def requested_by_username(self, obj): + return obj.user.username + + +class AccountSchema(ma.ModelSchema): + name = fields.Method('get_owner_name') + email = fields.Method('get_owner_email') + + def get_owner_name(self, obj): + return obj.name() + + def get_owner_email(self, obj): + return obj.email() + + class Meta: + model = Account + fields = ('id', 'type', 'owner_id', 'name', 'email', ) + + +class AccountExtendedSchema(ma.ModelSchema): + id = fields.Integer() + name = fields.String() + type = fields.String() + active = fields.Boolean() + storage = fields.Integer() + + +class FullVersionSchema(ma.ModelSchema): + project_name = fields.Function(lambda obj: obj.project.name) + namespace = fields.Function(lambda obj: obj.project.namespace) + + class Meta: + model = ProjectVersion + exclude = ['id'] + + +class ProjectSchemaForDelete(ma.ModelSchema): + versions = fields.Nested(FullVersionSchema(), many=True) + creator_id = fields.Method("_creator_id") + + def _creator_id(self, obj): + return obj.creator_id + + class Meta: + model = Project + exclude = ['transfers', 'uploads', 'access_requests', 'access'] # these fields will be lost + + +class RemovedProjectSchema(ma.ModelSchema): + class Meta: + model = RemovedProject diff --git a/server/src/organisation/__init__.py b/server/src/organisation/__init__.py new file mode 100644 index 00000000..ea0b05b0 --- /dev/null +++ b/server/src/organisation/__init__.py @@ -0,0 +1,325 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from flask import Blueprint, jsonify, request, abort, render_template, current_app +from flask_login import current_user +from sqlalchemy import or_, func + +from ..auth import auth_required +from ..auth.models import User +from .forms import UpdateOrganisationForm, AccessForm, CreateOrganisationForm, OwnerAccessForm, OrganisationInvitationForm +from .models import Organisation, OrganisationInvitation +from ..models.db_models import Namespace, Account +from .schemas import OrganisationSchema, OrganisationInvitationSchema +from .. import db, wm, SIG_NEW_ORGANISATION, SIG_DELETED_ORGANISATION + + +def find_organisations_by_username(username): + user = User.query.filter_by(username=username).first_or_404() + + organisations = Organisation.query.filter( + or_( + Organisation.owners.contains([user.id]), + Organisation.admins.contains([user.id]), + Organisation.writers.contains([user.id]), + Organisation.readers.contains([user.id]) + ) + ).filter_by(active=True).all() + + return organisations + + +def init_app(app): + organisation = Blueprint("organisation", __name__, template_folder='templates') + + def get_org_by_name(name, only_active=True): + query = Organisation.query.filter_by(name=name) + if only_active: + query = query.filter_by(active=True) + return query.first_or_404(f"Organisation {name} not found") + + @organisation.route('/', methods=['GET']) + @auth_required + def get_organisations(): # noqa: E501 + """List mergin organisations a current user has (at least read) access to. + + :rtype: List[Organisation] + """ + organisations = find_organisations_by_username(current_user.username) + data = OrganisationSchema(many=True, context={'user': current_user}).dump(organisations) + return jsonify(data), 200 + + @organisation.route('/', methods=['POST']) + @auth_required + def create_organisation(): # noqa: E501 + """ Create a new organisation. + :rtype: None + """ + free_orgs = Organisation.query.join(Account, Account.owner_id == Organisation.id).join(Namespace, Namespace.account_id == Account.id)\ + .filter(Organisation.owners.contains([current_user.id]))\ + .filter((Namespace.storage == 0))\ + .filter(Organisation.active)\ + .count() + if free_orgs > 2: + abort(400, "Too many free organisations") + + form = CreateOrganisationForm.from_json(request.json) + if not form.validate_on_submit(): + return jsonify(form.errors), 400 + + name = form.name.data + ns = Namespace.query.filter(func.lower(Organisation.name) == func.lower(name)).first() + if ns: + abort(409, f"Namespace {name} already exist, please choose another name.") + + org = Organisation(creator_id=current_user.id, **form.data) + db.session.add(org) + db.session.commit() + wm.emit_signal(SIG_NEW_ORGANISATION, request.path, msg=f'New organisation *{name}* has been created') + return jsonify({"success": True}), 201 + + @organisation.route('/', methods=['GET']) + @auth_required + def get_organisation_by_name(name): # noqa: E501 + """ Return organisation by name. + + :param name: name of organisation + :type name: str + :rtype: Organisation + """ + org = get_org_by_name(name, only_active=not current_user.is_admin) + if current_user.id not in org.readers and not current_user.is_admin: + abort(403, "You do not have permissions to get organisation") + data = OrganisationSchema(context={'user': current_user}).dump(org) + return data, 200 + + @organisation.route('/', methods=['PATCH']) + @auth_required + def update_organisation(name): # noqa: E501 + """ Update organisation. + + Information fields (name, description) and owners to be updated only by organisation owners. + + :param name: name of organisation + :type name: str + :rtype: Organisation + """ + org = get_org_by_name(name) + if current_user.id not in org.owners and not current_user.is_admin: + abort(403, "You do not have permissions to update organisation") + + form = UpdateOrganisationForm.from_json(request.json) + if not form.validate_on_submit(): + return jsonify(form.errors), 400 + + form.populate_obj(org) + db.session.add(org) + db.session.commit() + data = OrganisationSchema(context={'user': current_user}).dump(org) + return data, 200 + + @organisation.route('//access', methods=['PATCH']) + @auth_required + def update_access(name): # noqa: E501 + """ Update access fields of organisation. + + Access fields: admins, writers, readers to be amended by organisation admins. + + :param name: name of organisation + :type name: str + :rtype: Organisation + """ + if not request.is_json: + abort(400, "Payload format should be json") + + org = get_org_by_name(name) + usernames = list( + set(request.json['owners']) | + set(request.json['admins']) | + set(request.json['writers']) | + set(request.json['readers']) + ) + users = User.query.with_entities(User.username, User.id).filter(User.username.in_(usernames)).all() + users_map = {u.username: u.id for u in users} + access = {} + for key in ('owners', 'admins', 'writers', 'readers'): + access[key] = [] + for username in request.json[key]: + if username not in users_map: + continue + access[key].append(users_map[username]) + + if current_user.id in org.owners or current_user.is_admin: + form = OwnerAccessForm().from_json(access) + elif current_user.id in org.admins: + form = AccessForm().from_json(access) + else: + abort(403, "You do not have permissions to update organisation members") + + if not form.validate_on_submit(): + return jsonify(form.errors), 400 + + form.populate_obj(org) + db.session.add(org) + db.session.commit() + data = OrganisationSchema(context={"user": current_user}).dump(org) + return data, 200 + + @organisation.route('/', methods=['DELETE']) + @auth_required + def delete_organisation(name): # noqa: E501 + """ Delete organisation. + + :param name: name of organisation + :type name: str + :rtype: None + """ + org = Organisation.query.filter_by(name=name).first_or_404() + if not current_user.is_admin and current_user.id not in org.owners: + abort(403, "You do not have permissions to delete organisation") + + account = Account.query.filter_by(type='organisation', owner_id=org.id).first() + db.session.delete(account) + db.session.commit() + db.session.delete(org) # make sure to delete namespace and all projects + db.session.commit() + wm.emit_signal(SIG_DELETED_ORGANISATION, request.path, msg=f'Organisation *{name}* has been deleted') + return jsonify({"success": True}), 200 + + @organisation.route('/invitation/create', methods=['POST']) + @auth_required + def create_invitation(): # noqa: E501 + """ Create invitation to organisation. + """ + from ..celery import send_email_async + + if not request.is_json: + abort(400, "Payload format should be json") + + form = OrganisationInvitationForm.from_json(request.json) + if not form.validate_on_submit(): + return jsonify(form.errors), 400 + + username = form.data.get('username') + org_name = form.data.get('org_name') + invitation = OrganisationInvitation.query.filter_by(username=username, org_name=org_name).first() + if invitation: + abort(409, "Invitation already exist.") + + user = User.query.filter_by(username=username).first_or_404(f"User {username} not found") + organisation = get_org_by_name(org_name) + if current_user.id not in organisation.admins and current_user.id not in organisation.owners: + abort(403, "You do not have permissions to create an invitation.") + + invitation = OrganisationInvitation(org_name=org_name, username=username, role=form.data.get('role')) + db.session.add(invitation) + db.session.commit() + body = render_template( + 'email/organisation_invitation.html', + subject='Organisation invitation', + username=username, + invitation=invitation, + link=f"{request.url_root.rstrip('/')}/users/{username}/organisations" + ) + email_data = { + 'subject': 'Organisation invitation', + 'html': body, + 'recipients': [user.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + return jsonify(OrganisationInvitationSchema().dump(invitation)), 201 + + @organisation.route('/invitations//', methods=['GET']) + @auth_required + def get_invitations(type, name): # noqa: E501 + """ Get invitations of user. + :param name: username or organisation name + :type name: str + :param type: type of subject user or org + :type type: enumerate + """ + data = None + if type == "user": + if current_user.username != name and not current_user.is_admin: + abort(403, "You do not have permissions to list invitations") + data = OrganisationInvitationSchema(many=True).dump(OrganisationInvitation.query.filter_by(username=name).all()) + if not data: + User.query.filter_by(username=name).first_or_404(f"User {name} not found") + elif type == "org": + organisation = get_org_by_name(name) + if (current_user.id not in organisation.admins and current_user.id not in organisation.owners) and not current_user.is_admin: + abort(403, "You do not have permissions to list invitations.") + data = OrganisationInvitationSchema(many=True).dump(OrganisationInvitation.query.filter_by(org_name=name)) + else: + abort(400, "Invalid account type") + + return jsonify(data), 200 + + @organisation.route('/invitation/', methods=['GET']) + @auth_required + def get_invitation(id): # noqa: E501 + """ Get invitation detail. + :param id: invitation id + :type id: int + """ + invitation = OrganisationInvitation.query.filter_by(id=id).first_or_404(f"Invitation {id} not found") + if invitation.username != current_user.username and \ + current_user.id not in invitation.organisation.owners and \ + current_user.id not in invitation.organisation.admins: + abort(403, "You do not have permissions to access invitation") + + data = OrganisationInvitationSchema().dump(invitation) + return jsonify(data), 200 + + @organisation.route('/invitation/confirm/', methods=['POST']) + @auth_required + def accept_invitation(id): # noqa: E501 + """ Accept invitation. + :param id: invitation id + :type id: int + """ + invitation = OrganisationInvitation.query.get_or_404(id, "Invitation does not exist") + if invitation.username != current_user.username: + abort(403, "You do not have permissions to accept invitation") + if invitation.is_expired(): + abort(400, "This invitation is already expired.") + + invitation.accept() + org = OrganisationSchema(context={"user": current_user}).dump(invitation.organisation) + return jsonify(org), 200 + + @organisation.route('/invitation/', methods=['DELETE']) + @auth_required + def delete_invitation(id): # noqa: E501 + """ Delete/reject organisation invitation. + :param id: invitation id + :type id: int + """ + from ..celery import send_email_async + + invitation = OrganisationInvitation.query.get_or_404(id, "Invitation does not exist") + if invitation.username != current_user.username and \ + current_user.id not in invitation.organisation.owners + invitation.organisation.admins: + abort(403, "You do not have permissions to delete invitation") + + db.session.delete(invitation) + db.session.commit() + user = User.query.filter(User.username == invitation.username).first() + + body = render_template( + 'email/organisation_invitation_revoke.html', + subject='Organisation invitation revoked', + username=invitation.username, + org_name=invitation.org_name + ) + email_data = { + 'subject': 'Your organisation invitation has been revoked', + 'html': body, + 'recipients': [user.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + send_email_async.delay(**email_data) + return '', 200 + + app.register_blueprint(organisation, url_prefix='/orgs') diff --git a/server/src/organisation/forms.py b/server/src/organisation/forms.py new file mode 100644 index 00000000..4eff9930 --- /dev/null +++ b/server/src/organisation/forms.py @@ -0,0 +1,39 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from flask_wtf import FlaskForm +from wtforms import StringField, validators, IntegerField, SelectField, BooleanField +from wtforms.validators import Optional, DataRequired +from ..forms import namespace_validation +from ..forms import IntegerListField + + +class CreateOrganisationForm(FlaskForm): + """ This form is for create/update organisation """ + name = StringField('Name', [validators.Length(min=4, max=25), namespace_validation]) + description = StringField('Description', [validators.Length(max=256), Optional()]) + + +class AccessForm(FlaskForm): + """ Form to update access to organisation up to admin level. """ + admins = IntegerListField("Admins", [DataRequired()]) + writers = IntegerListField("Writers", [DataRequired()]) + readers = IntegerListField("Readers", [DataRequired()]) + + +class OwnerAccessForm(AccessForm): + owners = IntegerListField("Owners", [DataRequired()]) + + +class UpdateOrganisationForm(FlaskForm): + """ Form to update of organisation by owner """ + description = StringField('Description', [validators.Length(max=256), Optional()]) + + +class OrganisationInvitationForm(FlaskForm): + """ Form to create/update organisation invitation. """ + org_name = StringField('Organisation name', validators=[DataRequired()]) + username = StringField('Username', validators=[DataRequired()]) + role = SelectField('role', choices=[ + ('reader', 'reader'), ('writer', 'writer'), ('admin', 'admin'), ('owner', 'owner')]) + diff --git a/server/src/organisation/models.py b/server/src/organisation/models.py new file mode 100644 index 00000000..2ef2b27e --- /dev/null +++ b/server/src/organisation/models.py @@ -0,0 +1,112 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from datetime import datetime, timedelta + +from sqlalchemy import or_ +from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, ENUM +from .. import db, current_app + + +class Organisation(db.Model): + """ Organization db class. + + Organisation is one-to-one with Mergin namespace (which is unique). + + Organization supports tiers, with default 'free' which means organisation is not ready to use. + + Organization access is managed by access list control: + Owners: users who are allowed remove organisation or change billing + Admins: users who can administer users for organisation (except owners) + Writers: writers have read-write access to organisation namespace + Readers: reader have read-only access to organisation namespace + + """ + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + # modified only by owners + name = db.Column(db.String, nullable=False, index=True) + description = db.Column(db.String, nullable=True) + owners = db.Column(ARRAY(db.Integer), server_default="{}") + # access modified also by admins + admins = db.Column(ARRAY(db.Integer), server_default="{}") + writers = db.Column(ARRAY(db.Integer), server_default="{}") + readers = db.Column(ARRAY(db.Integer), server_default="{}") + registration_date = db.Column(db.DateTime(), nullable=True, default=datetime.utcnow) + active = db.Column(db.Boolean, default=True) + inactive_since = db.Column(db.DateTime(), nullable=True, index=True) + + __table_args__ = (db.UniqueConstraint('name'), + db.Index('ix_org_owners', owners, postgresql_using="gin"), + db.Index('ix_org_readers', readers, postgresql_using="gin"), + db.Index('ix_org_writers', writers, postgresql_using="gin"), + db.Index('ix_org_admins', admins, postgresql_using="gin"),) + + def __init__(self, creator_id, name, **kwargs): + self.name = name + self.owners = [creator_id] + self.admins = [creator_id] + self.writers = [creator_id] + self.readers = [creator_id] + self.description = kwargs.get('description', None) + self.active = True + + @staticmethod + def find_by_member_id(user_id): + return Organisation.query.filter( + or_( + Organisation.owners.contains([user_id]), + Organisation.admins.contains([user_id]), + Organisation.writers.contains([user_id]), + Organisation.readers.contains([user_id]) + ) + ).filter_by(active=True).all() + + def get_member_role(self, user_id): + for role in ('owners', 'admins', 'writers', 'readers'): + if user_id not in getattr(self, role): + continue + return role.rstrip('s') + + +class OrganisationInvitation(db.Model): + """ Organization Invitations db class. + + Adding new users to Organization is invitation based with required confirmation. + """ + id = db.Column(db.Integer, primary_key=True) + org_name = db.Column(db.String, db.ForeignKey("organisation.name", ondelete="CASCADE")) + username = db.Column(db.String, db.ForeignKey("user.username", ondelete="CASCADE")) + role = db.Column(ENUM('reader', 'writer', 'admin', 'owner', name='role'), nullable=False) + expire = db.Column(db.DateTime) + + organisation = db.relationship( + "Organisation", + uselist=False, + backref=db.backref("invitations", single_parent=True, uselist=False, cascade="all,delete") + ) + + user = db.relationship("User", uselist=False) + + def __init__(self, org_name, username, role): + self.org_name = org_name + self.username = username + self.role = role + self.expire = datetime.utcnow() + timedelta(seconds=current_app.config['ORGANISATION_INVITATION_EXPIRATION']) + + def accept(self): + """ The invitation accepted + """ + attribute = self.role + 's' + roles = getattr(self.organisation, attribute) + roles.append(self.user.id) + db.session.refresh(self.organisation) + setattr(self.organisation, attribute, roles) + db.session.add(self.organisation) + db.session.delete(self) + db.session.commit() + + def is_expired(self): + """ Check if invitation is expired + :rtype: bool + """ + return datetime.utcnow() > self.expire diff --git a/server/src/organisation/permission.py b/server/src/organisation/permission.py new file mode 100644 index 00000000..fa4821f2 --- /dev/null +++ b/server/src/organisation/permission.py @@ -0,0 +1,42 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from flask_login import current_user +from ..organisation import Organisation + + +class OrganisationPermissions: + """ Get or check organisation by permission """ + + @staticmethod + def _query(user, field): + """ return query of organisation """ + if user.is_authenticated and user.is_admin: + return Organisation.query + if not user.is_authenticated: + return Organisation.query.filter(False) + return Organisation.query.filter(field.any(user.id)) + + class Owner: + @staticmethod + def query(user): + return OrganisationPermissions._query(user, Organisation.owners) + + class Admin: + @staticmethod + def query(user): + return OrganisationPermissions._query(user, Organisation.admins) + + class Writer: + @staticmethod + def query(user): + return OrganisationPermissions._query(user, Organisation.writers) + + class Reader: + @staticmethod + def query(user): + return OrganisationPermissions._query(user, Organisation.readers) + + +def organisations_query(permission): + return permission.query(current_user) diff --git a/server/src/organisation/schemas.py b/server/src/organisation/schemas.py new file mode 100644 index 00000000..2d115eb7 --- /dev/null +++ b/server/src/organisation/schemas.py @@ -0,0 +1,80 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from marshmallow import fields +from .. import ma +from ..models.db_models import Project, Namespace +from ..auth.models import User +from .models import Organisation, OrganisationInvitation + + +class OrganisationSchema(ma.ModelSchema): + name = fields.Str() + disk_usage = fields.Method("get_disk_usage") + project_count = fields.Method("get_project_count") + owners = fields.Method("get_owners") + admins = fields.Method("get_admins") + writers = fields.Method("get_writers") + readers = fields.Method("get_readers") + storage = fields.Method("get_storage") + role = fields.Method("get_role", dump_only=True) + account = fields.Method("get_account", dump_only=True) + + def get_owners(self, obj): + return self.get_access_usernames(obj, 'owners') + + def get_admins(self, obj): + return self.get_access_usernames(obj, 'admins') + + def get_writers(self, obj): + return self.get_access_usernames(obj, 'writers') + + def get_readers(self, obj): + return self.get_access_usernames(obj, 'readers') + + def get_access_usernames(self, obj, role): + users = User.query.filter(User.id.in_(getattr(obj, role))).all() + return [u.username for u in users] + + def get_disk_usage(self, obj): + return sum([p.disk_usage for p in Project.query.filter_by(namespace=obj.name)]) + + def get_project_count(self, obj): + return Project.query.filter_by(namespace=obj.name).count() + + def get_storage(self, obj): + ns = Namespace.query.filter_by(name=obj.name).first() + return ns.storage + + def get_role(self, obj): + if self.context and 'user' in self.context: + return obj.get_member_role(self.context['user'].id) + else: + return "unknown" + + def _is_owner(self, obj): + return self.context and 'user' in self.context and obj.get_member_role(self.context['user'].id) == "owner" + + def _is_mergin_admin(self, obj): + return self.context and 'user' in self.context and self.context['user'].is_admin + + def get_account(self, obj): + from ..models.db_models import Account + from ..models.schemas import AccountSchema + account = Account.query.filter_by(type='organisation', owner_id=obj.id).first() + if self._is_owner(obj) or self._is_mergin_admin(obj): + return AccountSchema().dump(account) + else: + return AccountSchema(only=('email',)).dump(account) # do not send private information + + class Meta: + model = Organisation + exclude = ('invitations', ) + + +class OrganisationInvitationSchema(ma.ModelSchema): + org_name = fields.Str() + username = fields.Str() + + class Meta: + model = OrganisationInvitation diff --git a/server/src/permissions.py b/server/src/permissions.py new file mode 100644 index 00000000..be2cf065 --- /dev/null +++ b/server/src/permissions.py @@ -0,0 +1,146 @@ +# Copyright (C) 2019 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +from flask import abort +from flask_login import current_user +from sqlalchemy import or_ + +from .auth.models import User +from .organisation import Organisation +from .organisation.permission import organisations_query, OrganisationPermissions +from .models.db_models import ProjectAccess, Project, Upload, Namespace + + +class ProjectPermissions: + class Read: + @staticmethod + def check(project, user): + pa = project.access + return pa.public or (user.is_authenticated and (user.is_admin or user.id in pa.readers)) or (check_namespace_permissions(project.namespace, user, "read")) + + @staticmethod + def query(user, as_admin=True, public=True): + if user.is_authenticated and user.is_admin and as_admin: + return Project.query + query = Project.access.has(public=public) + if user.is_authenticated: + orgs = Organisation.query.with_entities(Organisation.name).filter( + or_(Organisation.admins.contains([user.id]), Organisation.readers.contains([user.id]), + Organisation.writers.contains([user.id]), Organisation.owners.contains([user.id]))) + if public: + query = query | Project.access.has(ProjectAccess.readers.contains([user.id]) | Project.namespace.in_(orgs)) + else: + query = Project.access.has(ProjectAccess.readers.contains([user.id]) | Project.namespace.in_(orgs)) + return Project.query.filter(query) + + class Upload: + @staticmethod + def check(project, user): + return user.is_authenticated and (user.id in project.access.writers or check_namespace_permissions(project.namespace, user, "write")) + + class Update: + @staticmethod + def check(project, user): + return user.is_authenticated and (user.is_admin or user.id in project.access.owners or user.username in project.access.owners or check_namespace_permissions(project.namespace, user, "write")) + + class Delete: + @staticmethod + def check(project, user): + return user.is_authenticated and (user.is_admin or user.id in project.access.owners or check_namespace_permissions(project.namespace, user, "write")) + + class All: + @staticmethod + def check(project, user): + return user.is_authenticated and (user.is_admin or user.id in project.access.owners or check_namespace_permissions(project.namespace, user, "admin")) + + +def require_project(ns, project_name, permission): + project = Project.query.filter_by(name=project_name, namespace=ns).first_or_404() + if not permission.check(project, current_user): + abort(403, "You do not have permissions for this project") + return project + + +def get_upload(transaction_id): + upload = Upload.query.get_or_404(transaction_id) + if upload.user_id != current_user.id: + abort(403, "You do not have permissions for ongoing upload") + + upload_dir = os.path.join(upload.project.storage.project_dir, "tmp", transaction_id) + return upload, upload_dir + + +def projects_query(permission, as_admin=True, public=True): + return permission.query(current_user, as_admin, public) + + +def check_namespace_permissions(ns, user, permissions): + """ check if user has permission to namespace granted from organisation or by itself + + :param ns: namespace + :type ns: str + + :param user: user + :type user: User + + :param permissions: permissions to access to namespace + :type permissions: str + + :return: true if user has same username with namespace, otherwise check for organisation + :rtype: bool + """ + if user.is_anonymous: + return False + if user.username == ns: + return True + organisation = Organisation.query.filter_by(name=ns).first() + if not organisation: + return False + if permissions == "read": + return user.id in organisation.readers + elif permissions == "write": + return user.id in organisation.writers + elif permissions == "admin": + return user.id in organisation.admins + else: + return False + + +def namespaces_query(permission): + return permission.query(current_user) + + +class NamespacePermissions: + """ Get or check namespace by permission """ + + @staticmethod + def _query(user, permission): + """ return query of organisation """ + if current_user.is_authenticated and current_user.is_admin: + return Namespace.query + if not current_user.is_authenticated: + return Namespace.query.filter(False) + namespaces = [org.name for org in organisations_query(permission)] + namespaces.append(user.username) + return Namespace.query.filter(Namespace.name.in_(namespaces)).all() + + class Owner: + @staticmethod + def query(user): + return NamespacePermissions._query(user, OrganisationPermissions.Owner) + + class Admin: + @staticmethod + def query(user): + return NamespacePermissions._query(user, OrganisationPermissions.Admin) + + class Writer: + @staticmethod + def query(user): + return NamespacePermissions._query(user, OrganisationPermissions.Writer) + + class Reader: + @staticmethod + def query(user): + return NamespacePermissions._query(user, OrganisationPermissions.Reader) \ No newline at end of file diff --git a/server/src/run_celery.py b/server/src/run_celery.py new file mode 100644 index 00000000..35ab2ffe --- /dev/null +++ b/server/src/run_celery.py @@ -0,0 +1,20 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from . import create_app +from .celery import celery + +app = create_app() +celery.conf.update(app.config) + + +# configure celery with flask context https://flask.palletsprojects.com/en/1.1.x/patterns/celery/ +# e.g. for using flask-mail +class ContextTask(celery.Task): + """ Attach flask app context to celery task """ + def __call__(self, *args, **kwargs): + with app.app_context(): + return self.run(*args, **kwargs) + + +celery.Task = ContextTask diff --git a/server/src/storages/__init__.py b/server/src/storages/__init__.py new file mode 100644 index 00000000..c18440bb --- /dev/null +++ b/server/src/storages/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from .disk import DiskStorage +from .storage import InvalidProject, FileNotFound diff --git a/server/src/storages/disk.py b/server/src/storages/disk.py new file mode 100644 index 00000000..ad45d2d4 --- /dev/null +++ b/server/src/storages/disk.py @@ -0,0 +1,357 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +import io +import time +import uuid +import logging +from datetime import datetime +from flask import current_app +from pygeodiff import GeoDiff, GeoDiffLibError +from pygeodiff.geodifflib import GeoDiffLibConflictError +from gevent import sleep +from .storage import ProjectStorage, FileNotFound, DataSyncError, InitializationError +from ..mergin_utils import resolve_tags, generate_checksum, int_version, is_versioned_file +from ..util import mergin_secure_filename + + +def save_to_file(stream, path, max_size=None): + """ Save readable object in file while yielding to gevent hub. + + :param stream: object implementing readable interface + :param path: destination file path + :param max_size: limit for file size + """ + directory = os.path.abspath(os.path.dirname(path)) + os.makedirs(directory, exist_ok=True) + with open(path, 'wb') as output: + writer = io.BufferedWriter(output, buffer_size=32768) + size = 0 + while True: + part = stream.read(4096) + sleep(0) # to unblock greenlet + if part: + size += len(part) + if max_size and size > max_size: + raise IOError() + writer.write(part) + else: + writer.flush() + break + + +def copy_file(src, dest): + """ Custom implementation of copying file by chunk with yielding to gevent hub. + + see save_to_file + + :params src: abs path to file + :type src: str, path-like object + :params dest: abs path to destination file + :type dest: str, path-like object + """ + if not os.path.isfile(src): + raise FileNotFoundError(src) + directory = os.path.abspath(os.path.dirname(dest)) + os.makedirs(directory, exist_ok=True) + with open(src, 'rb') as input: + save_to_file(input, dest) + + +def copy_dir(src, dest): + """ Custom implementation of recursive copy of directory with yielding to gevent hub. + + :params src: abs path to dir + :type src: str, path-like object + :params dest: destination folder + :type dest: str, path-like object + """ + if not os.path.isdir(src): + raise NotADirectoryError(src) + for root, dirs, files in os.walk(src): + for file in files: + abs_path = os.path.abspath(os.path.join(root, file)) + rel_path = os.path.relpath(abs_path, start=src) + copy_file(abs_path, os.path.join(dest, rel_path)) + + +def move_to_tmp(src, dest=None): + """ Custom handling of file/directory removal by moving it to regularly cleaned tmp folder. + This is mainly to avoid using standard tools which could cause blocking gevent hub for large files. + + :params src: abs path to file/directory + :type src: str, path-like object + :params dest: subdir in temp folder (e.g. transaction_id), defaults to None + :type dest: str, path-like object + :returns: path where file is moved to + :rtype: str, path-like object + """ + if not os.path.exists(src): + return + dest = dest if dest else str(uuid.uuid4()) + rel_path = os.path.relpath(src, start=current_app.config['LOCAL_PROJECTS']) # take relative path from parent of all project files + temp_path = os.path.join(current_app.config['TEMP_DIR'], dest, rel_path) + os.renames(src, temp_path) + return temp_path + + +class DiskStorage(ProjectStorage): + + def __init__(self, project): + super(DiskStorage, self).__init__(project) + self.projects_dir = current_app.config['LOCAL_PROJECTS'] + self.project_dir = self._project_dir() + self.geodiff = GeoDiff() + + def _project_dir(self): + project_dir = os.path.abspath( + os.path.join(self.projects_dir, self.project.storage_params["location"]) + ) + return project_dir + + def initialize(self, template_project=None): + if os.path.exists(self.project_dir): + raise InitializationError("Project directory already exists: {}".format(self.project_dir)) + + os.makedirs(self.project_dir) + + if template_project: + from ..models.db_models import Namespace + ns = Namespace.query.filter_by(name=self.project.namespace).first() + if ns.disk_usage() + template_project.disk_usage > ns.storage: + self.delete() + raise InitializationError("Disk quota reached") + forked_files = [] + + for file in template_project.files: + forked_file = dict(file) + forked_file['location'] = os.path.join('v1/', file['path']) + forked_file['mtime'] = datetime.utcnow() + forked_files.append(forked_file) + + src = os.path.join(template_project.storage.project_dir, file['location']) + dest = os.path.join(self.project_dir, forked_file['location']) + try: + copy_file(src, dest) + except (FileNotFoundError, IOError): + self.delete() + raise InitializationError("IOError: failed to copy '{}' to '{}'", src, dest) + except Exception as e: + self.delete() + raise InitializationError(str(e)) + + self.project.files = forked_files + self.project.tags = template_project.tags + self.project.disk_usage = sum([f['size'] for f in self.project.files]) + + def file_size(self, file): + file_path = os.path.join(self.project_dir, file) + if not os.path.exists(file_path): + raise FileNotFound("File {} not found.".format(file)) + return os.path.getsize(file_path) + + def file_path(self, file): + path = os.path.join(self.project_dir, file) + if not os.path.exists(path): + raise FileNotFound("File {} not found.".format(file)) + return path + + def read_file(self, path, block_size=4096): + file_path = os.path.join(self.project_dir, path) + + # do input validation outside generator to execute immediately + if not os.path.exists(file_path): + raise FileNotFound("File {} not found.".format(path)) + + def _generator(): + with open(file_path, 'rb') as f: + while True: + data = f.read(block_size) + sleep(0) + if data: + yield data + else: + break + + return _generator() + + def apply_changes(self, changes, version, transaction_id): + sync_errors = {} + modified_files = [] + + to_remove = [i['path'] for i in changes['removed']] + files = list(filter(lambda i: i['path'] not in to_remove, self.project.files)) + for item in changes['renamed']: + renamed = next((i for i in files if i['path'] == item['path']), None) + if renamed: + renamed['path'] = item['new_path'] + else: + sync_errors[item['new_path']] = "renaming error" + continue + + for f in changes['updated']: + sleep(0) # yield to gevent hub since geodiff action can take some time to prevent worker timeout + old_item = next((i for i in files if i["path"] == f["path"]), None) + if not old_item: + sync_errors[f['path']] = "file does not found on server " + continue + if 'diff' in f: + basefile = os.path.join(self.project_dir, old_item["location"]) + changeset = os.path.join(self.project_dir, version, f['diff']['path']) + patchedfile = os.path.join(self.project_dir, version, f['path']) + modified_files.append(changeset) + modified_files.append(patchedfile) + # create copy of basefile which will be updated in next version + # TODO this can potentially fail for large files + logging.info(f"Apply changes: copying {basefile} to {patchedfile}") + start = time.time() + copy_file(basefile, patchedfile) + logging.info(f"Copying finished in {time.time()-start} s") + try: + logging.info(f"Geodiff: apply changeset {changeset} of size {os.path.getsize(changeset)} to {patchedfile}") + start = time.time() + self.geodiff.apply_changeset(patchedfile, changeset) + logging.info(f"Changeset applied in {time.time() - start} s") + except (GeoDiffLibError, GeoDiffLibConflictError) as err: + sync_errors[f["path"]] = f"project: {self.project.namespace}/{self.project.name}, geodiff error {str(err)}" + continue + + f["diff"]["location"] = os.path.join( + version, f['diff']['sanitized_path'] if 'sanitized_path' in f['diff'] else mergin_secure_filename(f['diff']['path'])) + + # we can now replace old basefile metadata with the new one (patchedfile) + # TODO this can potentially fail for large files + logging.info(f"Apply changes: calculating checksum of {patchedfile}") + start = time.time() + f['checksum'] = generate_checksum(patchedfile) + logging.info(f"Checksum calculated in {time.time() - start} s") + f['size'] = os.path.getsize(patchedfile) + else: + old_item.pop("diff", None) + + if 'chunks' in f: + f.pop("chunks") + f['location'] = os.path.join( + version, + f['sanitized_path'] if 'sanitized_path' in f else mergin_secure_filename(f['path'])) + if not sync_errors: + old_item.update(f) + + if sync_errors: + for file in modified_files: + move_to_tmp(file, transaction_id) + msg = "" + for key, value in sync_errors.items(): + msg += key + " error=" + value + "\n" + raise DataSyncError(msg) + + for item in changes['added']: + files.append({ + 'path': item['path'], + 'size': item['size'], + 'checksum': item['checksum'], + 'mtime': item['mtime'], + 'location': os.path.join( + version, + item['sanitized_path'] if 'sanitized_path' in item else mergin_secure_filename(item['path'])) + }) + + self.project.files = files + self.project.tags = resolve_tags(files) + + def delete(self): + move_to_tmp(self.project_dir) + + def optimize_storage(self): + """ Optimize disk storage for project. + + Clean up for recently updated versioned files. Removes expired file versions. + It applies only on files that can be recreated when needed. + """ + files = [f for f in self.project.files if 'diff' in f.keys()] + last_version = sorted(self.project.versions, key=lambda ver: int_version(ver.name))[-1] + for f in files: + f_history = self.project.file_history(f['path'], 'v1', last_version.name) + if not f_history: + continue + for item in f_history.values(): + if 'diff' in item: + if item['location'] == f['location']: + continue # skip latest file version + abs_path = os.path.join(self.project_dir, item['location']) + if not os.path.exists(abs_path): + continue # already removed + age = time.time() - os.path.getmtime(abs_path) + if age > current_app.config['FILE_EXPIRATION']: + move_to_tmp(abs_path) + + def restore_versioned_file(self, file, version): + """ + For removed versioned files tries to restore full file in particular project version + using file diffs history (latest basefile and sequence of diffs). + + :param file: path of file in project to recover + :type file: str + :param version: project version (e.g. v2) + :type version: str + """ + if not is_versioned_file(file): + return + + # if project version is not found, return it + project_version = next((v for v in self.project.versions if v.name == version), None) + if not project_version: + return + + # check actual file from the version files + file_found = next((i for i in project_version.files if i['path'] == file), None) + + # check the location that we found on the file + if not file_found or os.path.exists(os.path.join(self.project_dir, file_found['location'])): + return + + basefile_meta = {} + diffs = [] + f_history = self.project.file_history(file, 'v1', version) + if not f_history: + return + # history starts from the latest change, we stop when reaching basefile + for item in f_history.values(): + if item['change'] in ['added', 'updated']: + if 'diff' in item: + diffs.append(item['diff']) + else: + basefile_meta = item + break + else: + continue + + if not (basefile_meta and diffs): + return + + basefile = os.path.join(self.project_dir, basefile_meta['location']) + tmp_dir = os.path.join(current_app.config['TEMP_DIR'], str(uuid.uuid4())) + os.mkdir(tmp_dir) + restored_file = os.path.join(tmp_dir, os.path.basename(basefile)) # this is final restored file + logging.info(f"Restore file: copying {basefile} to {restored_file}") + start = time.time() + copy_file(basefile, restored_file) + logging.info(f"File copied in {time.time() - start} s") + logging.info(f"Restoring gpkg file with {len(diffs)} diffs") + for diff in reversed(diffs): + sleep(0) # yield to gevent hub since geodiff action can take some time, and in case of a lot of diffs it could time out + changeset = os.path.join(self.project_dir, diff['location']) + try: + logging.info(f"Geodiff: apply changeset {changeset} of size {os.path.getsize(changeset)}") + start = time.time() + self.geodiff.apply_changeset(restored_file, changeset) + logging.info(f"Changeset applied in {time.time() - start} s") + except (GeoDiffLibError, GeoDiffLibConflictError) as e: + logging.exception(f"Failed to restore file: {str(e)} from project {self.project.namespace}/{self.project.name}") + return + # move final restored file to place where it is expected (only after it is successfully created) + logging.info(f"Copying restored file to expected location {file_found['location']}") + start = time.time() + copy_file(restored_file, os.path.join(self.project_dir, file_found['location'])) + logging.info(f"File copied in {time.time() - start} s") diff --git a/server/src/storages/storage.py b/server/src/storages/storage.py new file mode 100644 index 00000000..3dd9b5c2 --- /dev/null +++ b/server/src/storages/storage.py @@ -0,0 +1,108 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +from urllib.parse import quote + +from flask import Response +from requests_toolbelt import MultipartEncoder +from gevent import sleep +import zipfly + + +class InvalidProject(Exception): + pass + + +class FileNotFound(Exception): + pass + + +class DataSyncError(Exception): + pass + + +class InitializationError(Exception): + pass + + +class StorageFile(object): + def __init__(self, storage, file): + self.storage = storage + self.file = file + self.fp = 0 + self._stream = None + + @property + def len(self): + if not hasattr(self, '_total_len'): + self._total_len = self.storage.file_size(self.file) + return self._total_len - self.fp + + def read(self, chunk_size): + if not self._stream: + self._preload = b'' + self._stream = self.storage.read_file(self.file, chunk_size) + + data = self._preload + while len(data) < chunk_size: + try: + chunk = next(self._stream) + except StopIteration: + chunk = None + if not chunk: + self._preload = b'' + self.fp += len(data) + return data + data += chunk + + self._preload = data[chunk_size:] + data = data[:chunk_size] + self.fp += len(data) + return data + + +class ProjectStorage: + + def __init__(self, project): + self.project = project + + def read_file(self, path, block_size=4096): + raise NotImplementedError + + def file_size(self, file): + raise NotImplementedError + + def file_path(self, file): + raise NotImplementedError + + def restore_versioned_file(self, file, version): + raise NotImplementedError + + def download_files(self, files, files_format=None, version=None): + """ Download files + :type files: list of dict + """ + if version: + for f in files: + self.restore_versioned_file(f['path'], version) + if files_format == 'zip': + paths = [{'fs': self.file_path(f['location']), 'n': f['path']} for f in files] + z = zipfly.ZipFly(mode='w', paths=paths) + response = Response(z.generator(), mimetype='application/zip') + response.headers['Content-Disposition'] = 'attachment; filename={}{}.zip'.format( + quote(self.project.name.encode("utf-8")), '-' + version if version else '') + return response + files_dict = {} + for f in files: + path = f['path'] + files_dict[path] = (path, StorageFile(self, f['location'])) + encoder = MultipartEncoder(files_dict) + + def _generator(): + while True: + data = encoder.read(4096) + sleep(0) + if data: + yield data + else: + break + return Response(_generator(), mimetype=encoder.content_type) diff --git a/server/src/templates/email/account_has_been_closed_warning.html b/server/src/templates/email/account_has_been_closed_warning.html new file mode 100644 index 00000000..f28a90bd --- /dev/null +++ b/server/src/templates/email/account_has_been_closed_warning.html @@ -0,0 +1,8 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ account_name }},


+

{% if account_type == "user" %} We're sorry you're leaving us. {% endif %} Your account has now been deactivated and will be permanently deleted in {{ days }} days. + Should you change your mind in the meantime, please contact us at {{ contact_email }} and we can reactivate your account for you. + After {{ days }} days your account will have been deleted and will not be recoverable.

+ +{% endblock %} \ No newline at end of file diff --git a/server/src/templates/email/components/base.html b/server/src/templates/email/components/base.html new file mode 100644 index 00000000..fd97a251 --- /dev/null +++ b/server/src/templates/email/components/base.html @@ -0,0 +1,258 @@ +{% set base_url = config['MERGIN_BASE_URL'] or "https://public.cloudmergin.com" %} +{% set logo_url = config['MERGIN_LOGO_URL'] %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + + + + + +
+ + +
+ + + + + + + +
+ + + + + + + {% if subject is defined %} + + + + + {% endif %} + + + {% block content %} + + + + + + + + + + {% endblock %} + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ Mergin +{# {% include 'email/components/logo.html' %}#} +
+
+
+

+ + {{ subject }} + +

+
+
+
+

Dear user,

+

 

+

Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+
+
+
+

  Lorem ipsum dolor sit amet.

+
+
+
+

Let us know if you have more questions.

+

 

+

Kind regards,

+

The Mergin team

+
+
+ +
 
+ +
+

+

+ +
+
+

You can disable notifications + in your profile.

+
+
+ +
+ +
+ + +
+ +
+ + + + +
+ + \ No newline at end of file diff --git a/server/src/templates/email/components/content.html b/server/src/templates/email/components/content.html new file mode 100644 index 00000000..50faf29c --- /dev/null +++ b/server/src/templates/email/components/content.html @@ -0,0 +1,8 @@ +{% extends "email/components/base.html" %} +{% import "email/components/content_row.html" as content_row with context %} + +{% block content %} + {% call content_row.content() %} + {% block html %} {% endblock %} + {% endcall %} +{% endblock %} \ No newline at end of file diff --git a/server/src/templates/email/components/content_row.html b/server/src/templates/email/components/content_row.html new file mode 100644 index 00000000..0a283eee --- /dev/null +++ b/server/src/templates/email/components/content_row.html @@ -0,0 +1,9 @@ +{% macro content() -%} + + +
+ {{ caller() }} +
+ + +{%- endmacro %} \ No newline at end of file diff --git a/server/src/templates/email/components/logo.html b/server/src/templates/email/components/logo.html new file mode 100644 index 00000000..d65966a1 --- /dev/null +++ b/server/src/templates/email/components/logo.html @@ -0,0 +1,470 @@ +Mergin \ No newline at end of file diff --git a/server/src/templates/email/email_confirmation.html b/server/src/templates/email/email_confirmation.html new file mode 100644 index 00000000..74cbc154 --- /dev/null +++ b/server/src/templates/email/email_confirmation.html @@ -0,0 +1,6 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ user.username }},


+

To verify your email address please follow this link:

+

{{ confirm_url }}

+{% endblock %} diff --git a/server/src/templates/email/modified_project_access.html b/server/src/templates/email/modified_project_access.html new file mode 100644 index 00000000..32e46d92 --- /dev/null +++ b/server/src/templates/email/modified_project_access.html @@ -0,0 +1,11 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ user.username }},


+

Your access privileges to Mergin project {{ project.namespace }}/{{ project.name }} have been modified.

+

You can now + {% for item in privileges %} + {{ item }}{% if not loop.last %}, {% endif %} + {% endfor %} + the project. +

+{% endblock %} diff --git a/server/src/templates/email/notification_error.html b/server/src/templates/email/notification_error.html new file mode 100644 index 00000000..5985b944 --- /dev/null +++ b/server/src/templates/email/notification_error.html @@ -0,0 +1,18 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Inconsistent webhook event detected.

+

Event ID: {{ event.id }} +

Event type: {{ event.type }} +

Status: {{ event.status }} +

Reason: {{ event.msg }} +

Account: {{ account }}

+ +
+ {% if event.source == 'stripe' %} +

Please check both Stripe and Mergin dashboards to resolve the issue.

+ {% else %} +

Receipt original transaction id: {{ original_id }}

+

Please check the Mergin dashboard and contact the user to resolve the issue.

+ {% endif %} +{% endblock %} + diff --git a/server/src/templates/email/organisation_invitation.html b/server/src/templates/email/organisation_invitation.html new file mode 100644 index 00000000..2b735477 --- /dev/null +++ b/server/src/templates/email/organisation_invitation.html @@ -0,0 +1,15 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ username }},

+
+

You have been invited to join the organisation {{ invitation.org_name }} as + {% if invitation.role in ['owner', 'admin'] %} + an {{ invitation.role }}. + {% else %} + a {{ invitation.role }}. + {% endif %} + + The invitation will expire on {{ invitation.expire.strftime('%Y-%m-%d %H:%M') }}. + You can manage your invitations here. +

+{% endblock %} diff --git a/server/src/templates/email/organisation_invitation_revoke.html b/server/src/templates/email/organisation_invitation_revoke.html new file mode 100644 index 00000000..1e6ae237 --- /dev/null +++ b/server/src/templates/email/organisation_invitation_revoke.html @@ -0,0 +1,6 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ username }},

+
+

Your invitation to organisation {{ org_name }} has been revoked by its sender.

+{% endblock %} diff --git a/server/src/templates/email/password_reset.html b/server/src/templates/email/password_reset.html new file mode 100644 index 00000000..4dba00e2 --- /dev/null +++ b/server/src/templates/email/password_reset.html @@ -0,0 +1,6 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ user.username }},


+

Please follow this link to reset your password:

+

{{ confirm_url }}

+{% endblock %} diff --git a/server/src/templates/email/profile_changed.html b/server/src/templates/email/profile_changed.html new file mode 100644 index 00000000..8307eab8 --- /dev/null +++ b/server/src/templates/email/profile_changed.html @@ -0,0 +1,20 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ user.username }},


+

Your profile has been updated as follows:

+

+

    + {% for key, value in changes.items() %} +
  • + {{ key }} : + {{ value['before'] }} → + {% if value['after'] == None %} + N/A + {% else %} + {{ value['after'] }} + {% endif %} +
  • + {% endfor %} +
+

+{% endblock %} diff --git a/server/src/templates/email/project_access_request.html b/server/src/templates/email/project_access_request.html new file mode 100644 index 00000000..68ee8fbd --- /dev/null +++ b/server/src/templates/email/project_access_request.html @@ -0,0 +1,7 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ username }},


+

A user {{ user }} has requested access to the project {{ project_name }}. + This request will expire on {{ expire.strftime('%Y-%m-%d %H:%M') }}. You can manage your access requests here. +

+{% endblock %} diff --git a/server/src/templates/email/project_transfer_request.html b/server/src/templates/email/project_transfer_request.html new file mode 100644 index 00000000..6bfb1445 --- /dev/null +++ b/server/src/templates/email/project_transfer_request.html @@ -0,0 +1,7 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ username }},


+

A request has been made to transfer the project {{ project_name }} from namespace {{ namespace_from }} to namespace {{ namescape_to }}. + This project transfer request will expire on {{ expire.strftime('%Y-%m-%d %H:%M') }}. You can manage your projects transfers here. +

+{% endblock %} diff --git a/server/src/templates/email/removed_project.html b/server/src/templates/email/removed_project.html new file mode 100644 index 00000000..d230a692 --- /dev/null +++ b/server/src/templates/email/removed_project.html @@ -0,0 +1,5 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ username }},


+

Project {{ project.namespace }}/{{ project.name }} which you had access to has now been removed.

+{% endblock %} diff --git a/server/src/templates/email/removed_project_access.html b/server/src/templates/email/removed_project_access.html new file mode 100644 index 00000000..deb8731b --- /dev/null +++ b/server/src/templates/email/removed_project_access.html @@ -0,0 +1,5 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ user.username }},


+

Your access to the Mergin project {{ project.namespace }}/{{ project.name }} has been removed.

+{% endblock %} diff --git a/server/src/templates/email/simple_template.html b/server/src/templates/email/simple_template.html new file mode 100644 index 00000000..08c3a1ad --- /dev/null +++ b/server/src/templates/email/simple_template.html @@ -0,0 +1,5 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ username }},


+

{{ message }}

+{% endblock %} diff --git a/server/src/templates/email/user_registration.html b/server/src/templates/email/user_registration.html new file mode 100644 index 00000000..67aad4b1 --- /dev/null +++ b/server/src/templates/email/user_registration.html @@ -0,0 +1,6 @@ +{% extends "email/components/content.html" %} +{% block html %} +

Dear {{ user.username }},


+

Thank you! You have successfully registered with Mergin. To verify your email address please follow this link:

+

{{ confirm_url }}

+{% endblock %} diff --git a/server/src/templates/email/welcome_email.html b/server/src/templates/email/welcome_email.html new file mode 100644 index 00000000..7b024a0b --- /dev/null +++ b/server/src/templates/email/welcome_email.html @@ -0,0 +1,16 @@ +{% extends "email/components/content.html" %} +{% block html %} +

+ Not sure where to start? Have a look on our quick start guide: + https://help.cloudmergin.com/quick-start.html +


+

+ + https://help.cloudmergin.com/faq.html +
+ For some of the common questions, see:
+ + https://help.inputapp.io/faq + +

+{% endblock %} \ No newline at end of file diff --git a/server/src/util.py b/server/src/util.py new file mode 100644 index 00000000..eb3e861b --- /dev/null +++ b/server/src/util.py @@ -0,0 +1,236 @@ +import datetime +import math +import os +import re +import six +import typing +from pathvalidate import sanitize_filename + + +def _deserialize(data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if klass in six.integer_types or klass in (float, str, bool): + return _deserialize_primitive(data, klass) + elif klass == object: + return _deserialize_object(data) + elif klass == datetime.date: + return deserialize_date(data) + elif klass == datetime.datetime: + return deserialize_datetime(data) + elif type(klass) == typing.GenericMeta: + if klass.__extra__ == list: + return _deserialize_list(data, klass.__args__[0]) + if klass.__extra__ == dict: + return _deserialize_dict(data, klass.__args__[1]) + else: + return deserialize_model(data, klass) + + +def _deserialize_primitive(data, klass): + """Deserializes to primitive type. + + :param data: data to deserialize. + :param klass: class literal. + + :return: int, long, float, str, bool. + :rtype: int | long | float | str | bool + """ + try: + value = klass(data) + except UnicodeEncodeError: + value = six.u(data) + except TypeError: + value = data + return value + + +def _deserialize_object(value): + """Return a original value. + + :return: object. + """ + return value + + +def deserialize_date(string): + """Deserializes string to date. + + :param string: str. + :type string: str + :return: date. + :rtype: date + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + + +def deserialize_datetime(string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :type string: str + :return: datetime. + :rtype: datetime + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + + +def deserialize_model(data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :type data: dict | list + :param klass: class literal. + :return: model object. + """ + instance = klass() + + if not instance.swagger_types: + return data + + for attr, attr_type in six.iteritems(instance.swagger_types): + if data is not None \ + and instance.attribute_map[attr] in data \ + and isinstance(data, (list, dict)): + value = data[instance.attribute_map[attr]] + setattr(instance, attr, _deserialize(value, attr_type)) + + return instance + + +def _deserialize_list(data, boxed_type): + """Deserializes a list and its elements. + + :param data: list to deserialize. + :type data: list + :param boxed_type: class literal. + + :return: deserialized list. + :rtype: list + """ + return [_deserialize(sub_data, boxed_type) + for sub_data in data] + + +def _deserialize_dict(data, boxed_type): + """Deserializes a dict and its elements. + + :param data: dict to deserialize. + :type data: dict + :param boxed_type: class literal. + + :return: deserialized dict. + :rtype: dict + """ + return {k: _deserialize(v, boxed_type) + for k, v in six.iteritems(data)} + + +def get_byte_string(size_bytes): + """ Return string of size_bytes in string + + :param size_bytes: size_bytes to string. + :type size_bytes: int + + :return: size bytes in string. + :rtype: str + """ + + if size_bytes == 0: + return "0B" + size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB") + i = int(math.floor(math.log(size_bytes, 1024))) + power = math.pow(1024, i) + size = round(size_bytes / power, 2) + return "%s %s" % (size, size_name[i]) + + +def convert_byte(size_bytes, unit): + """ Convert byte into other unit + + :param size_bytes: size_bytes to target. + :type size_bytes: int + + :param unit: target unit . + :type unit: str + + :return: size in target unit. + :rtype: float + """ + + if size_bytes == 0: + return "0B" + units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"] + i = 0 + try: + i = units.index(unit.upper()) + except ValueError: + pass + if i > 0: + power = math.pow(1024, i) + size_bytes = round(size_bytes / power, 2) + return size_bytes + + +def is_name_allowed(string): + """ Check if string is just has whitelisted character + + :param string: string to be checked. + :type string: str + + :return: boolean of has just whitelisted character + :rtype: bool + """ + return re.match('^[\w ._\-~()\'!*:@,;]+$', string) + + +def mergin_secure_filename(filename): + """ Change filename to be secured filename + + :param filename: string to be checked. + :type filename: str + + :return: secured filename + :rtype: str + """ + filename = os.path.normpath(filename) + return os.path.join(*[sanitize_filename(path, replacement_text="_") for path in filename.split(os.sep)]) + + +def get_path_from_files(files, path, is_diff=False): + """ Get path from files between getting sanitized or mergin_secure_filename + + :param files: list of files + :type files: list + + :param path: path that will be checked + :type path: str + + :return: secured filename + :rtype: str + """ + for file in files: + if file['path'] == path: + if is_diff: + return file['diff']['sanitized_path'] if 'sanitized_path' in file else file['diff']['path'] + else: + return file['sanitized_path'] if 'sanitized_path' in file else file['path'] + return mergin_secure_filename(path) diff --git a/server/src/webhooks/__init__.py b/server/src/webhooks/__init__.py new file mode 100644 index 00000000..13b28cd2 --- /dev/null +++ b/server/src/webhooks/__init__.py @@ -0,0 +1,4 @@ +# Copyright (C) 2019 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +from .webhook import Webhook, WebhookManager \ No newline at end of file diff --git a/server/src/webhooks/webhook.py b/server/src/webhooks/webhook.py new file mode 100644 index 00000000..d73dd61e --- /dev/null +++ b/server/src/webhooks/webhook.py @@ -0,0 +1,82 @@ +# Copyright (C) 2019 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import requests +import logging +from urllib.parse import urlparse +import json +import functools +from blinker import Namespace +import concurrent.futures + + +def is_valid_url(url): + parsed = urlparse(url) + return parsed.scheme and parsed.netloc + + +class Webhook: + """ + Base class for using web hook. + + Please note you want either subclass and override data format as desired + by target service or make sure you pass them correctly. + """ + def __init__(self, name, url): + self.name = name + self.url = url + + def format_data(self, data): + return data # to be overwritten + + def request(self, data): + return requests.post(self.url, data=json.dumps(data), headers={'Content-Type': 'application/json'}) + + def send(self, sender, **kwargs): + if not is_valid_url(self.url): + logging.warning(f'Invalid url {self.url}, webhook {self.name} from sender {sender} failed: {str(kwargs)}') + return + data = kwargs.get('msg', '') + msg = self.format_data(data) + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(self.request, msg) + resp = future.result() + if not resp.ok: + logging.warning(f'Webhook {self.name} from sender {sender} failed: {resp.text}') + return resp + + +class WebhookManager: + """ Base class to handle (blinker) signals connected to Webhook handlers. """ + def __init__(self): + self.signals = Namespace() + + def check_signal(func): + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + if args[0] not in self.signals: + return + return func(self, *args, **kwargs) + return wrapper + + def register_signal(self, signal): + self.signals.signal(signal) + + @check_signal + def remove_signal(self, signal): + self.signals.pop(signal) + + @check_signal + def connect_handler(self, signal, handler): + if isinstance(handler, Webhook): + self.signals[signal].connect(handler.send, weak=False) + + @check_signal + def disconnect_handler(self, signal, handler): + self.signals[signal].disconnect(handler.send) + + @check_signal + def emit_signal(self, signal, sender, **kwargs): + return self.signals[signal].send(sender, **kwargs) + + check_signal = staticmethod(check_signal) diff --git a/server/swagger.yaml b/server/swagger.yaml new file mode 100644 index 00000000..68896e33 --- /dev/null +++ b/server/swagger.yaml @@ -0,0 +1,1372 @@ +swagger: '2.0' +info: + description: Mergin API to synchronize your GIS data. + version: '0.6' + title: Mergin API +basePath: /v1 +tags: + - name: project + description: Mergin project +schemes: + - https + - http + +definitions: + Project: + type: object + required: + - name + properties: + name: + type: string + example: mergin + created: + type: string + format: date-time + example: '2018-11-30T08:47:58.636074Z' + FileInfo: + type: object + required: + - path + - size + - checksum + properties: + path: + type: string + example: media/favicon.ico + checksum: + description: sha1 hash + type: string + example: '9adb76bf81a34880209040ffe5ee262a090b62ab' + # will be removed in future + mtime: + type: string + format: date-time + example: '2018-11-30T08:47:58.636074Z' + size: + type: integer + format: int64 + example: 1024 + HistoryFileInfo: + type: object + properties: + path: + type: string + example: data/survey.gpkg + checksum: + description: sha1 hash + type: string + example: '9adb76bf81a34880209040ffe5ee262a090b62ab' + mtime: + type: string + format: date-time + example: '2019-01-30T08:47:58.636074Z' + size: + type: integer + format: int64 + example: 102450086 + history: + type: object + additionalProperties: + type: object + $ref: '#/definitions/UpdateFileInfo' + UploadFileInfo: + allOf: + - $ref: '#/definitions/FileInfo' + - type: object + properties: + chunks: + type: array + items: + type: string + example: "d17a60eb-6581-431c-adfc-3451231455bb" + UpdateFileInfo: + allOf: + - $ref: '#/definitions/UploadFileInfo' + - type: object + properties: + diff: + type: object + required: + - path + - checksum + properties: + path: + type: string + description: unique diff filename + example: 'survey.gpkg-diff-15eqn2q' + checksum: + type: string + example: '45dfdfbf81a34asdf209040ffe5fasdf2a090bfa' + size: + type: integer + example: 512 + ProjectListItem: + allOf: + - $ref: '#/definitions/Project' + - type: object + properties: + name: + type: string + example: mergin + namespace: + type: string + example: mergin + creator: + type: integer + example: 1 + description: project creator ID + disk_usage: + type: integer + example: 25324373 + description: project size in bytes + permissions: + type: object + properties: + delete: + type: boolean + example: false + update: + type: boolean + example: false + upload: + type: boolean + example: true + tags: + type: array + items: + $ref: '#/definitions/MerginTag' + updated: + type: string + format: date-time + example: '2018-11-30T08:47:58.636074Z' + description: last modified + version: + type: string + example: 'v2' + description: last project version + uploads: + type: array + items: + type: string + example: "669b838e-a30b-4338-b2b6-3da144742a82" + description: uuid for ongoing upload + access: + $ref: '#/definitions/Access' + ProjectDetail: + allOf: + - $ref: '#/definitions/ProjectListItem' + - type: object + properties: + files: + type: array + items: + allOf: + - $ref: '#/definitions/FileInfo' + - $ref: '#/definitions/HistoryFileInfo' + MerginTag: + type: string + enum: &MERGIN_TAG + - valid_qgis + - mappin_use + - input_use + PaginatedProjects: + type: object + properties: + projects: + type: array + items: + $ref: '#/definitions/ProjectListItem' + count: + type: integer + Access: + type: object + properties: + owners: + type: array + items: + type: integer + example: [] + writers: + type: array + items: + type: integer + example: [] + readers: + type: array + items: + type: integer + ownersnames: + type: array + items: + type: string + example: [] + writersnames: + type: array + items: + type: string + example: [] + readersnames: + type: array + items: + type: string + example: [] + public: + type: boolean + example: true + ProjectVersion: + type: object + properties: + name: + type: string + example: 'v_1' + author: + type: string + example: 'mergin' + created: + type: string + format: date-time + example: '2018-11-30T08:47:58.636074Z' + changes: + type: object + properties: + added: + type: array + items: + type: string + example: media/favicon.ico + removed: + type: array + items: + type: string + example: [] + updated: + type: array + items: + type: string + example: [] + renamed: + type: array + items: + type: string + example: [] + project: + type: object + properties: + name: + type: string + example: 'test' + namespace: + type: string + example: 'mergin' + user_agent: + type: string + example: 'Python-client/0.5.3 Plugin/2020.5.2 QGIS/3.14.0 (Linux/Ubuntu)' + changesets: + type: object + additionalProperties: + type: object + properties: + sumamary: + type: object + size: + type: integer + UserDetail: + type: object + properties: + id: + type: integer + format: int64 + example: 1 + username: + type: string + example: 'johnny' + disk_usage: + type: integer + format: int64 + example: 64052 + storage_limit: + type: integer + format: int64 + example: 104857600 + + LoginResponse: + allOf: + - $ref: '#/definitions/UserDetail' + - type: object + properties: + token: + type: string + example: '.eJyrVkrNTczMUbJSyk0sSk7NccgpLSlKTM7PKy7NKcnMS9dLztcrzVbSUSotTi2Kz0xRsjKCsPMSc1PhupRqAbKNGXw.XMvndw.XeJ1F7ch2zQvNVEz_zvIPgj4iWY' + exipre: + type: string + format: date-time + example: '2019-05-04T14:21:56.695035Z' + GeodiffChangeset: + type: object + properties: + changes: + type: array + items: + type: object + properties: + column: + type: integer + new: + type: string + old: + type: string + example: + column: 1 + new: 'Point (-0.5031055901 0.1639751553)' + old: 'null' + table: + type: string + example: trees + type: + type: string + example: 'insert' +paths: + /project/paginated: + get: + tags: + - project + summary: List mergin projects. + description: Returns limited list of projects, optionally filtered by tags, search query, username. + operationId: get_paginated_projects + produces: + - application/json + parameters: + - name: page + in: query + description: page number + required: true + type: integer + format: int64 + - name: order_params + in: query + description: sorting fields e.g. name_asc,updated_desc + required: false + type: string + - name: order_by + in: query + description: order by field + required: false + type: string + - name: descending + in: query + description: order of sorting + required: false + type: boolean + - name: per_page + in: query + description: number of results per page + required: true + type: integer + maximum: 100 + format: int64 + - name: namespace + in: query + description: filter projects with namespaces like a namespace + required: false + type: string + - name: only_namespace + in: query + description: Filter namespace equality to in contrast with namespace attribute which is determinated to search (like) + required: false + type: string + - name: name + in: query + description: filter projects with names like a name or namespaces like a name + required: false + type: string + - name: user + in: query + description: Username for 'flag' filter. If not provided, it means user executing request. + required: false + type: string + - name: last_updated_in + in: query + description: for filter projects by days from last update + required: false + type: integer + - name: flag + in: query + description: Predefined filter flag. + required: false + type: string + enum: + - created + - shared + - name: as_admin + in: query + description: User access as admin + required: false + type: boolean + - name: public + in: query + description: should return any public project + required: false + type: boolean + - name: only_public + in: query + description: should return only public projects + required: false + type: boolean + responses: + '200': + description: Success. + schema: + type: object + $ref: '#/definitions/PaginatedProjects' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + /project: + get: + tags: + - project + summary: List mergin projects. + description: Returns limited list of projects, optionally filtered by tags, search query, username. + operationId: get_projects + produces: + - application/json + parameters: + - name: tags + in: query + description: Filter by mergin tags. + required: false + type: array + items: + type: string + enum: *MERGIN_TAG + - name: q + in: query + description: Search query string. + required: false + type: string + - name: user + in: query + description: Username for 'flag' filter. If not provided, it means user executing request. + required: false + type: string + - name: flag + in: query + description: Predefined filter flag. + required: false + type: string + enum: + - created + - shared + - name: limit + in: query + description: Maximum number of returned projects. + required: false + type: integer + minimum: 1 + responses: + '200': + description: Success. + schema: + type: array + items: + $ref: '#/definitions/ProjectListItem' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/{namespace}': + post: + tags: + - project + summary: Add a new mergin project. + description: Add new project to database and create empty project directory. Project name is project dir at the same time. + operationId: add_project + consumes: + - application/json + produces: + - application/json + parameters: + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - in: body + name: project + description: Project object that needs to be added to the database. + required: true + schema: + type: object + required: + - name + properties: + name: + type: string + example: mergin + public: + type: boolean + example: true + template: + type: string + example: Template + responses: + '200': + $ref: '#/responses/Success' + '401': + $ref: '#/responses/UnauthorizedError' + '405': + description: Invalid input + '422': + $ref: '#/responses/InvalidDataResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/{namespace}/{project_name}': + get: + tags: + - project + summary: Find project by name. + description: Returns a single project of specified version with details about files including history for versioned files (diffs) if needed. + operationId: get_project + produces: + - application/json + parameters: + - name: project_name + in: path + description: Name of project to return. + required: true + type: string + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: since + in: query + description: Version to look up diff files history from. + required: false + type: string + - name: version + in: query + description: Project version. + required: false + type: string + responses: + '200': + description: Success. + schema: + $ref: '#/definitions/ProjectDetail' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + put: + tags: + - project + summary: Update an existing project. + description: Updates 'public' flag and access list for project. + operationId: update_project + consumes: + - application/json + produces: + - application/json + parameters: + - name: project_name + in: path + description: Name of project that need to be updated. + required: true + type: string + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - in: body + name: data + description: Data to be updated. + required: true + schema: + type: object + properties: + access: + type: object + $ref: '#/definitions/Access' + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + '405': + description: Validation exception. + x-swagger-router-controller: src.controllers.project_controller + delete: + tags: + - project + summary: Delete a project. + description: Remove project from database and project directory with all files. + operationId: delete_project + produces: + - application/json + parameters: + - name: project_name + in: path + description: Name of project to delete. + required: true + type: string + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/by_names': + post: + tags: + - project + summary: Find projects by name. + description: Returns list of requested projects + operationId: get_projects_by_names + produces: + - application/json + parameters: + - in: body + name: data + description: List of requested projects. + required: false + schema: + type: object + properties: + projects: + type: array + items: + type: string + example: + projects: [mergin_namespace/project_name] + responses: + '200': + description: Success. + schema: + type: object + additionalProperties: + type: object + $ref: '#/definitions/ProjectListItem' + '400': + $ref: '#/responses/BadStatusResp' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/by_uuids': + get: + tags: + - project + summary: Find projects by ids. + description: Returns list of requested projects + operationId: get_projects_by_uuids + produces: + - application/json + parameters: + - in: query + name: uuids + description: List of requested projects uuids. + required: true + type: string + responses: + '200': + description: Success. + schema: + type: object + additionalProperties: + type: object + $ref: '#/definitions/ProjectListItem' + '400': + $ref: '#/responses/BadStatusResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/version/{namespace}/{project_name}': + get: + tags: + - project + summary: Get versions (history) of project. + description: Returns a list of project versions with changes information. + operationId: get_project_versions + produces: + - application/json + parameters: + - name: project_name + in: path + description: Name of project to return versions for. + required: true + type: string + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: version_id + in: query + required: false + type: string + responses: + '200': + description: Success. + schema: + type: array + items: + $ref: '#/definitions/ProjectVersion' + '400': + $ref: '#/responses/BadStatusResp' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/download/{namespace}/{project_name}': + get: + tags: + - project + summary: Download dir for single project. + description: Download whole project folder as zip file or produces stream to parse. + operationId: download_project + produces: + - application/zip + - multipart/form-data + parameters: + - name: project_name + in: path + description: Name of project to download. + required: true + type: string + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: format + in: query + description: Output format (only zip available). + required: false + type: string + enum: + - zip + - name: version + in: query + description: Version tag. + required: false + type: string + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + '422': + $ref: '#/responses/InvalidDataResp' + x-swagger-router-controller: src.controllers.project_controller + '/project/raw/{namespace}/{project_name}': + get: + tags: + - project + summary: Download project file + description: Download individual file or it's diff file from project. + operationId: download_project_file + produces: + - application/octet-stream + parameters: + - name: project_name + in: path + description: Project name. + required: true + type: string + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: file + in: query + description: Path to file. + required: true + type: string + - name: version + in: query + description: Version tag. + required: false + type: string + - name: diff + in: query + description: Ask for diff file instead of full one. + required: false + type: boolean + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + '422': + $ref: '#/responses/InvalidDataResp' + x-swagger-router-controller: src.controllers.project_controller + /project/push/{namespace}/{project_name}: + post: + tags: + - project + summary: Synchronize project data. + description: Apply changes in project if no uploads required. Creates upload transaction for added/modified files. + operationId: project_push + consumes: + - application/json + produces: + - application/json + parameters: + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: project_name + in: path + description: Project name. + required: true + type: string + - in: body + name: data + required: true + description: Description of project changes. + schema: + type: object + required: + - version + - changes + properties: + version: + type: string + example: v1 + changes: + type: object + required: + - added + - updated + - renamed + - removed + properties: + added: + type: array + items: + $ref: '#/definitions/UploadFileInfo' + updated: + type: array + items: + $ref: '#/definitions/UpdateFileInfo' + renamed: + type: array + items: + $ref: '#/definitions/FileInfo' + removed: + type: array + items: + $ref: '#/definitions/FileInfo' + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + /project/clone/{namespace}/{project_name}: + post: + tags: + - project + summary: Clone project. + description: Clone project to another namespace. Only recent version is copied over and history is lost. Destination namespace and project name are optionally set in query parameters othewise request user is used with the same project name as cloned project. + operationId: clone_project + consumes: + - application/json + parameters: + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: project_name + in: path + description: Project name. + required: true + type: string + - in: body + name: destination + description: Destination (namespace and project name) where project should be cloned. + required: false + schema: + type: object + properties: + namespace: + type: string + example: mergin + project: + type: string + example: mergin + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + + #not implemented in connexion, going directly to flask endpoint +# /project/push/chunk/{transaction_id}/{chunk_id}: +# post: +# tags: +# - project +# summary: Upload file chunk as defined in upload transaction. +# operationId: chunk_upload +# consumes: +# - application/octet-stream +# produces: +# - application/json +# parameters: +# - name: transaction_id +# in: path +# description: Transaction id. +# required: true +# type: string +# - name: chunk_id +# in: path +# description: Chunk id. +# required: true +# type: string +# responses: +# '200': +# $ref: '#/responses/Success' +# '400': +# $ref: '#/responses/BadStatusResp' +# '401': +# $ref: '#/responses/UnauthorizedError' +# '403': +# $ref: '#/responses/Forbidden' +# '404': +# $ref: '#/responses/NotFoundResp' +# x-swagger-router-controller: src.controllers.project_controller + + /project/push/finish/{transaction_id}: + post: + tags: + - project + summary: Finalize project data upload. + operationId: push_finish + parameters: + - name: transaction_id + in: path + description: Transaction id. + required: true + type: string + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + /project/push/cancel/{transaction_id}: + post: + tags: + - project + summary: Cancel upload transaction. + operationId: push_cancel + parameters: + - name: transaction_id + in: path + description: Transaction id. + required: true + type: string + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_controller + /project/transfer/{namespace}: + get: + tags: + - Project transfers for namespace + summary: List of incoming/outcoming project transfers for namespace. + operationId: get_project_transfers + produces: + - application/json + parameters: + - name: namespace + in: path + description: mergin namespace to filter transfers for + required: true + type: string + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '403': + $ref: '#/responses/Forbidden' + x-swagger-router-controller: src.controllers.project_transfer_controller + /project/transfer/{namespace}/{project_name}: + post: + tags: + - project transfer + summary: Transfer project. + description: Transfer project to another namespace. + operationId: request_transfer + consumes: + - application/json + parameters: + - name: namespace + in: path + description: Namespace for project to look into. + required: true + type: string + - name: project_name + in: path + description: Project name. + required: true + type: string + - in: body + name: data + description: Request payload - destination namespace. + required: true + schema: + type: object + properties: + namespace: + type: string + example: mergin + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_transfer_controller + /project/transfer/{id}: + post: + tags: + - project transfer + summary: Execute transfer project. + description: Execute transfer project + operationId: execute_transfer_project + consumes: + - application/json + parameters: + - name: id + in: path + description: ID of transfer project data. + required: true + type: string + - in: body + name: data + description: Destination project name and whether to keep permissions. + required: false + schema: + type: object + properties: + name: + type: string + example: mergin + transfer_permission: + type: boolean + example: True + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_transfer_controller + delete: + tags: + - project transfer + summary: Delete transfer project. + description: Remove transfer project from database. + operationId: delete_transfer_project + produces: + - application/json + parameters: + - name: id + in: path + description: ID of transfer project data. + required: true + type: string + responses: + '200': + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.project_transfer_controller + + /user/{username}: + get: + deprecated: true + tags: + - user + summary: User profile info. + description: Returns user profile info. + operationId: get_user + parameters: + - name: username + in: path + description: Username. + required: true + type: string + produces: + - application/json + responses: + '200': + description: Success. + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + x-swagger-router-controller: src.controllers.user_controller + /auth/login: + post: + deprecated: true + tags: + - login + summary: Login user. + description: Returns session token, expiration time and user profile info. + operationId: login + parameters: + - name: credentials + in: body + description: User credentials. + required: true + schema: + type: object + required: + - login + - password + properties: + login: + type: string + example: johnny + password: + type: string + example: pass123 + consumes: + - application/json + produces: + - application/json + responses: + '200': + description: Success. + schema: + $ref: '#/definitions/LoginResponse' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + '415': + $ref: '#/responses/UnsupportedMediaType' + x-swagger-router-controller: src.controllers.user_controller + /resource/history/{namespace}/{project_name}: + get: + tags: + - resource + summary: History of project resource (file) + description: Lookup in project versions to get history of changes for particular file + operationId: get_resource_history + produces: + - application/json + parameters: + - name: project_name + in: path + description: Project name. + required: true + type: string + - name: namespace + in: path + description: Namespace project belong to. + required: true + type: string + - name: path + in: query + description: Path to file in project. + required: true + type: string + responses: + '200': + $ref: '#/responses/Success' + #$ref: '#/definitions/HistoryFileInfo' need upgrade to OpenAPI v3? + '400': + $ref: '#/responses/BadStatusResp' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + '422': + $ref: '#/responses/InvalidDataResp' + x-swagger-router-controller: src.controllers.project_controller + /resource/changesets/{namespace}/{project_name}/{version_id}: + get: + tags: + - resource + summary: Changeset of the resource (file) + description: Calculate geodiff changeset for particular file and particular project version + operationId: get_resource_changeset + produces: + - application/json + parameters: + - name: project_name + in: path + description: Project name. + required: true + type: string + - name: namespace + in: path + description: Namespace project belong to. + required: true + type: string + - name: version_id + in: path + description: Version id of the file. + required: true + type: string + - name: path + in: query + description: Path to file in project. + required: true + type: string + responses: + '200': + description: A list of geodiff changesets for versioned file + schema: + type: array + items: + $ref: '#/definitions/GeodiffChangeset' + '400': + $ref: '#/responses/BadStatusResp' + '403': + $ref: '#/responses/Forbidden' + '404': + $ref: '#/responses/NotFoundResp' + '422': + $ref: '#/responses/InvalidDataResp' + x-swagger-router-controller: src.controllers.project_controller + '/namespaces/{namespace_type}': + get: + tags: + - namespace + summary: Find namespace by query. + description: Returns list of namespaces. + operationId: search_namespace + produces: + - application/json + parameters: + - name: namespace_type + in: path + description: Type of namespace (organisation or user) + required: true + type: string + enum: + - user + - organisation + - name: q + in: query + description: Search query string. + required: false + type: string + responses: + '200': + description: Success. + schema: + $ref: '#/responses/Success' + '400': + $ref: '#/responses/BadStatusResp' + '401': + $ref: '#/responses/UnauthorizedError' + '404': + $ref: '#/responses/NotFoundResp' + '422': + $ref: '#/responses/InvalidDataResp' + x-swagger-router-controller: src.controllers.namespace_controller +responses: + UnauthorizedError: + description: Authentication information is missing or invalid. + headers: + WWW_Authenticate: + type: string + NotFoundResp: + description: Project not found. + BadStatusResp: + description: Invalid request. + InvalidDataResp: + description: Invalid/unprocessable data. + Success: + description: Success. + Forbidden: + description: Access is denied. + UnsupportedMediaType: + description: Payload format is in an unsupported format. diff --git a/server/test/__init__.py b/server/test/__init__.py new file mode 100644 index 00000000..c5a7ce94 --- /dev/null +++ b/server/test/__init__.py @@ -0,0 +1,14 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +import tempfile + +# constants +test_namespace = 'mergin' +test_project = 'test' +test_project_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test_projects', test_project) +json_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} +DEFAULT_USER = ('mergin', 'ilovemergin') # username, password - is a super user +TMP_DIR = tempfile.gettempdir() +TEST_ORG = "mergin.org" diff --git a/server/test/conftest.py b/server/test/conftest.py new file mode 100644 index 00000000..48d5a07d --- /dev/null +++ b/server/test/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ["test.fixtures"] diff --git a/server/test/fixtures.py b/server/test/fixtures.py new file mode 100644 index 00000000..652ff6bb --- /dev/null +++ b/server/test/fixtures.py @@ -0,0 +1,189 @@ +import os +import sys +from shutil import copy, move +from sqlalchemy.orm.attributes import flag_modified +from pygeodiff import GeoDiff +import pytest + + +from src import db, create_app +from src.db_events import remove_events +from src.models.db_models import (Project, Upload, ProjectVersion, ProjectAccess, ProjectTransfer, + Namespace, Account, RemovedProject) +from src.mergin_utils import generate_checksum, is_versioned_file, resolve_tags +from src.auth.models import User, UserProfile +from src.organisation.models import Organisation + +from . import test_project, test_namespace, test_project_dir, TMP_DIR, TEST_ORG, DEFAULT_USER +from .test_project_controller import _file_info, create_diff_meta +from .utils import login_as_admin, add_user, initialize, cleanup + +thisdir = os.path.dirname(os.path.realpath(__file__)) +sys.path.append(os.path.join(thisdir, os.pardir)) + + +@pytest.fixture(scope='function') +def flask_app(request): + """ Flask app with fresh db and initialized empty tables """ + application = create_app() + application.config['TEST_DIR'] = os.path.join(thisdir, 'test_projects') + application.config['SERVER_NAME'] = 'localhost.localdomain' + app_context = application.app_context() + app_context.push() + + with app_context: + db.create_all() + + def teardown(): + # clean up db + db.session.remove() + db.drop_all() + db.engine.dispose() + + app_context.pop() + # detach db hooks + remove_events() + + request.addfinalizer(teardown) + return application + + +@pytest.fixture(scope='function') +def app(flask_app, request): + """ Flask app with testing objects created """ + with flask_app.app_context(): + initialize() + + def teardown(): + # remove all project files + with flask_app.app_context(): + dirs = [p.storage.project_dir for p in Project.query.all()] + print(dirs) + cleanup(flask_app.test_client(), dirs) + + request.addfinalizer(teardown) + return flask_app + + +@pytest.fixture(scope='function') +def client(app): + """ Flask app test client with already logged-in superuser """ + client = app.test_client() + login_as_admin(client) + return client + + +@pytest.fixture(scope='function') +def diff_project(app): + """ Modify testing project to contain some history with diffs. Geodiff lib is used to handle changes. + Files are copied to location where server would expect it. Corresponding changes metadata and project versions + are created and stored in db. + + Following changes are applied to base.gpkg in test project (v1): + v2: removed file -> previous version is lost (unless requested explicitly) + v3: uploaded again + v4: patched with changes from inserted_1_A.gpkg (1 inserted feature) + v5: replaced with original file base.gpkg (mimic of force update) + v6: patched with changes from modified_1_geom.gpkg (translated feature) + v7: renamed to test.gpkg + v8: nothing happened (although officially forbidden here it mimics no changes to file of interest) + v9: test.gpkg is patched with changes from inserted_1_B.gpkg (1 inserted feature), final state is modified_1_geom.gpkg + inserted_1_B.gpkg + v10: nothing happened, just to ensure last diff is not last version of project file + """ + geodiff = GeoDiff() + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + update_meta = _file_info(test_project_dir, 'base.gpkg') + diff_meta_A = create_diff_meta('base.gpkg', 'inserted_1_A.gpkg', test_project_dir) + diff_meta_mod = create_diff_meta('base.gpkg', 'modified_1_geom.gpkg', test_project_dir) + rename_meta = { + **_file_info(test_project_dir, 'base.gpkg'), + 'new_path': 'test.gpkg', + } + patch = os.path.join(TMP_DIR, 'patch') + basefile = os.path.join(test_project_dir, 'base.gpkg') + copy(basefile, patch) + geodiff.apply_changeset(patch, os.path.join(TMP_DIR, diff_meta_mod['diff']['path'])) + diff_meta_B = create_diff_meta(patch, 'inserted_1_B.gpkg', test_project_dir) + diff_meta_B['path'] = 'test.gpkg' + + # construct project versions + changes = [ + {'added': [], 'removed': [_file_info(test_project_dir, 'base.gpkg')], 'renamed': [], 'updated': []}, + {'added': [_file_info(test_project_dir, 'base.gpkg')], 'removed': [], 'renamed': [], 'updated': []}, + {'added': [], 'removed': [], 'renamed': [], 'updated': [diff_meta_A]}, + {'added': [], 'removed': [], 'renamed': [], 'updated': [update_meta]}, # force update with full file + {'added': [], 'removed': [], 'renamed': [], 'updated': [diff_meta_mod]}, + {'added': [], 'removed': [], 'renamed': [rename_meta], 'updated': []}, # file renamed, will be tracked with different name + {'added': [], 'removed': [], 'renamed': [], 'updated': []}, + {'added': [], 'removed': [], 'renamed': [], 'updated': [diff_meta_B]}, + {'added': [], 'removed': [], 'renamed': [], 'updated': []}, + ] + version_files = project.files + for i, change in enumerate(changes): + ver = 'v{}'.format(i + 2) + if change['added']: + meta = change['added'][0] + meta['location'] = os.path.join(ver, meta['path']) + new_file = os.path.join(project.storage.project_dir, meta['location']) + os.makedirs(os.path.dirname(new_file), exist_ok=True) + copy(os.path.join(test_project_dir, meta['path']), new_file) + version_files.append(meta) + elif change['updated']: + meta = change['updated'][0] + f_updated = next(f for f in version_files if f['path'] == meta['path']) + new_location = os.path.join(ver, f_updated['path']) + patchedfile = os.path.join(project.storage.project_dir, new_location) + os.makedirs(os.path.dirname(patchedfile), exist_ok=True) + if 'diff' in meta.keys(): + basefile = os.path.join(project.storage.project_dir, f_updated['location']) + changefile = os.path.join(TMP_DIR, meta['diff']['path']) + copy(basefile, patchedfile) + geodiff.apply_changeset(patchedfile, changefile) + meta['diff']['location'] = os.path.join(ver, meta['diff']['path']) + move(changefile, os.path.join(project.storage.project_dir, meta['diff']['location'])) + else: + copy(os.path.join(test_project_dir, f_updated['path']), patchedfile) + f_updated.pop('diff', None) + meta['location'] = new_location + f_updated.update(meta) + elif change['renamed']: + f_renamed = next(f for f in version_files if f['path'] == change['renamed'][0]['path']) + f_renamed['path'] = change['renamed'][0]['new_path'] + elif change['removed']: + f_removed = next(f for f in version_files if f['path'] == change['removed'][0]['path']) + version_files.remove(f_removed) + else: + pass + + pv = ProjectVersion(project, ver, project.creator.username, change, version_files, '127.0.0.1') + db.session.add(pv) + db.session.commit() + version_files = pv.files + assert pv.project_size == sum(file['size'] for file in version_files) + + project.files = version_files + project.disk_usage = sum(file['size'] for file in project.files) + project.tags = resolve_tags(version_files) + project.latest_version = project.versions[0].name + db.session.add(project) + flag_modified(project, "files") + db.session.commit() + return project + + +@pytest.fixture(scope='function') +def test_organisation(client): + """ Test organisation """ + user = User.query.filter_by(username=DEFAULT_USER[0]).first() + org = Organisation(name=TEST_ORG, creator_id=user.id) + org_owner = add_user("owner", "owner") + org.owners.append(org_owner.id) + org_admin = add_user("admin", "admin") + org.admins.extend([org_owner.id, org_admin.id]) + db.session.add(org) + db.session.commit() + # create a free organisation -> assign zero storage + account = Account.query.filter_by(type="organisation", owner_id=org.id).first() + account.namespace.storage = 0 + db.session.commit() + return org diff --git a/server/test/test_auth.py b/server/test/test_auth.py new file mode 100644 index 00000000..41a83c14 --- /dev/null +++ b/server/test/test_auth.py @@ -0,0 +1,522 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +from datetime import datetime, timedelta +import pytest +import json +from flask import url_for +from itsdangerous import URLSafeTimedSerializer +from sqlalchemy import desc +from sqlalchemy.orm.attributes import flag_modified +from unittest.mock import patch + +from src.auth.models import User, UserProfile, LoginHistory +from src import db +from src.models.db_models import Namespace, Account, Project +from . import json_headers +from .utils import add_user, login_as_admin, login + + +@pytest.fixture(scope='function') +def client(app): + client = app.test_client() + return client + + +# login test: success, success with email login, invalid password, missing password, wrong headers +test_login_data = [ + ({'login': 'mergin', 'password': 'ilovemergin'}, json_headers, 200), + ({'login': 'mergin@mergin.com', 'password': 'ilovemergin'}, json_headers, 200), + ({'login': 'mergin', 'password': 'ilovemergi'}, json_headers, 401), + ({'login': 'mergin'}, json_headers, 401), + ({'login': 'mergin', 'password': 'ilovemergin'}, {}, 401), +] + + +@pytest.mark.parametrize("data,headers,expected", test_login_data) +def test_login(client, data, headers, expected): + resp = client.post(url_for('auth.login'), data=json.dumps(data), headers=headers) + assert resp.status_code == expected + if expected == 200: + login_history = LoginHistory.query.filter_by(username='mergin').order_by(desc(LoginHistory.timestamp)).first() + assert login_history + + +def test_logout(client): + login_as_admin(client) + resp = client.get(url_for('auth.logout')) + assert resp.status_code == 200 + + +# user registration test +test_user_reg_data = [ + ('test', 'test@test.com', 200), # success + ('TesTUser', 'test@test.com', 200), # test with upper case, but user is not exist + ('TesTUser2', 'test2@test.com', 200), # test with upper case, but user is not exist + ('bob', 'test@test.com', 400), # invalid (short) username + ('test', 'test.com', 400), # invalid email + ('mergin', 'test@test.com', 400), # existing user + ('MerGin', 'test@test.com', 400), # test with upper case but mergin already exist + (' mergin ', 'test@test.com', 400), # test with blank spaces, but mergin user already exists + ('XmerginX', ' test@test.com ', 200), # test with blank spaces, whitespace to be removed + ('XmerginX', ' mergin@mergin.com ', 400) # test with blank spaces, but email already exists +] + + +@pytest.mark.parametrize("username,email,expected", test_user_reg_data) +def test_user_register(client, username, email, expected): + login_as_admin(client) + url = url_for('auth.register_user') + resp = client.post(url, data=json.dumps({'username': username, 'email': email}), headers=json_headers) + assert resp.status_code == expected + if expected == 200: + user = User.query.filter_by(username=username).first() + assert user + assert user.active + assert not user.verified_email + ns = Namespace.query.filter_by(name=username).first() + assert ns + account = Account.query.filter_by(type='user', owner_id=user.id).first() + assert account + + +@patch('src.celery.send_email_async.apply_async') +def test_confirm_email(send_email_mock, app, client): + serializer = URLSafeTimedSerializer(app.config['SECRET_KEY']) + token = serializer.dumps('mergin@mergin.com', salt=app.config['SECURITY_PASSWORD_SALT']) + resp = client.get(url_for('auth.confirm_email', token=token)) + assert resp.status_code == 200 + assert b'Your email address has been verified.' in resp.data + assert not send_email_mock.called + + user = User.query.filter_by(username='mergin').first() + # test with old registered user + user.verified_email = False + user.profile.registration_date = datetime.utcnow() - timedelta(days=1) + db.session.commit() + resp = client.get(url_for('auth.confirm_email', token=token)) + assert resp.status_code == 200 + assert b'Your email address has been verified.' in resp.data + assert not send_email_mock.called + + # try again with freshly registered user + user.verified_email = False + user.profile.registration_date = datetime.utcnow() + db.session.add(user) + db.session.commit() + resp = client.get(url_for('auth.confirm_email', token=token)) + assert resp.status_code == 200 + assert b'Your email address has been verified.' in resp.data + assert send_email_mock.called + + # try again + resp = client.get(url_for('auth.confirm_email', token=token)) + assert resp.status_code == 200 + assert b'Your email address has been verified.' in resp.data + + # invalid token + resp = client.get(url_for('auth.confirm_email', token='token')) + assert resp.status_code == 400 + + # not-existing user + resp = client.get(url_for('auth.confirm_email', + token=serializer.dumps('test@mergin.com', salt=app.config['SECURITY_PASSWORD_SALT']))) + assert resp.status_code == 404 + + +def test_confirm_password(app, client): + serializer = URLSafeTimedSerializer(app.config['SECRET_KEY']) + token = serializer.dumps('mergin@mergin.com', salt=app.config['SECURITY_PASSWORD_SALT']) + + resp = client.get(url_for('auth.confirm_new_password', token=token)) + assert resp.status_code == 200 + assert b'Activate account' in resp.data + + form_data = {'password': 'ilovemergin#0', 'confirm': 'ilovemergin#0'} + resp = client.post(url_for('auth.confirm_new_password', token=token), data=json.dumps(form_data), headers=json_headers) + assert resp.status_code == 302 + + # invalid token + resp = client.get(url_for('auth.confirm_new_password', token='token')) + assert resp.status_code == 400 + + # not-existing user + resp = client.get(url_for('auth.confirm_new_password', + token=serializer.dumps('test@mergin.com', salt=app.config['SECURITY_PASSWORD_SALT']))) + assert resp.status_code == 404 + + # add inactive user + user = User(username='test', passwd='testuser', is_admin=True, email='test@mergin.com') + user.active = False + db.session.add(user) + db.session.commit() + resp = client.get(url_for('auth.confirm_new_password', + token=serializer.dumps('test@mergin.com', salt=app.config['SECURITY_PASSWORD_SALT']))) + assert resp.status_code == 400 + + +# reset password test: success, no email, not-existing user +test_reset_data = [ + ({'email': 'mergin@mergin.com'}, 200), + ({'email': 'Mergin@mergin.com'}, 200), # case insensitive + ({}, 404), + ({'email': 'test@mergin.com'}, 404) +] + + +@pytest.mark.parametrize("data,expected", test_reset_data) +def test_reset_password(client, data, expected): + resp = client.post(url_for('auth.password_reset'), data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if expected == 200: + assert resp.json['success'] + + +def test_change_password(client): + username = 'user_test' + old_password = 'user_password' + new_password = 'Test#test' + + user = User(username=username, passwd=old_password, is_admin=True, email='user_test@mergin.com') + user.active = True + user.profile = UserProfile() + db.session.add(user) + db.session.commit() + + resp = client.post( + url_for('auth.login'), + data=json.dumps({'login': username, 'password': old_password}), + headers=json_headers + ) + assert resp.status_code == 200 + + # test old password incorrect + resp = client.post( + url_for('auth.change_password'), + data=json.dumps( + {'old_password': 'old_password_incorrect', 'password': new_password, 'confirm': new_password}), + headers=json_headers + ) + assert resp.status_code == 400 + + # test correct old password + resp = client.post( + url_for('auth.change_password'), + data=json.dumps( + {'old_password': old_password, 'password': new_password, 'confirm': new_password}), + headers=json_headers + ) + assert resp.status_code == 200 + + # test login with old password + resp = client.post( + url_for('auth.login'), + data=json.dumps({'login': username, 'password': old_password}), + headers=json_headers + ) + assert resp.status_code == 401 + + # test login with new password + resp = client.post( + url_for('auth.login'), + data=json.dumps({'login': username, 'password': new_password}), + headers=json_headers + ) + assert resp.status_code == 200 + + +def test_remove_user(client): + login_as_admin(client) + user = add_user('test', 'test') + + resp = client.delete(url_for('auth.delete_user', username=user.username)) + assert resp.status_code == 200 + assert resp.json['success'] + assert not User.query.filter_by(username='test').count() + assert not Namespace.query.filter_by(name='test').count() + + resp = client.delete(url_for('auth.delete_user', username=user.username)) + assert resp.status_code == 404 + + +def test_delete_account(client): + username = 'user_test' + password = 'user_password' + user = add_user(username, password) + ns = Namespace.query.filter_by(name=username).first() + user_id = user.id + + # share project of default user + project = Project.query.filter_by(namespace='mergin', name='test').first() + assert project + project.access.owners.append(user_id) + project.access.writers.append(user_id) + project.access.readers.append(user_id) + db.session.add(project) + flag_modified(project.access, 'owners') + flag_modified(project.access, 'writers') + flag_modified(project.access, 'readers') + db.session.commit() + assert user_id in project.access.owners + + login(client, username, password) + + # delete account + resp = client.delete(url_for('auth.delete_account')) + assert resp.status_code == 200 + assert resp.json['success'] + # check nothing is left in database + user = User.query.filter_by(username=username).first() + account = Account.query.filter_by(type='user', owner_id=user_id).first() + assert not account + assert not user + ns = Namespace.query.filter_by(name=username).first() + assert not ns + assert user_id not in project.access.owners + assert user_id not in project.access.writers + assert user_id not in project.access.readers + + # try relogin but should be not found + resp = client.post( + url_for('auth.login'), + data=json.dumps({'login': username, 'password': password}), + headers=json_headers + ) + assert resp.status_code == 401 + + +def test_self_registration(client): + resp = client.get(url_for('auth.self_register_user')) + assert resp.status_code == 405 + + form_data = {'username': 'test', 'email': 'test@test.com', 'password': 'Test#test', 'confirm': 'Test#test'} + resp = client.post(url_for('auth.self_register_user'), data=json.dumps(form_data), headers=json_headers) + assert resp.status_code == 200 + assert resp.json['username'] == 'test' + + +# self registrations: success, invalid username +test_self_registration_validation_data = [ + ({'username': 'test', 'email': 'test@test.com', 'password': 'Test#test', 'confirm': 'Test#test'}, json_headers, 200), + ({'username': 'test!@%$', 'email': 'test@test.com', 'password': 'Test#test', 'confirm': 'Test#test'}, json_headers, 400) +] + + +@pytest.mark.parametrize("data,headers,expected", test_self_registration_validation_data) +def test_self_registration_validation(client, data, headers, expected): + resp = client.post(url_for( + 'auth.self_register_user'), data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + + +# login test: success, success with email login, invalid password, missing password, wrong headers +test_api_login_data = [ + ({'login': 'mergin', 'password': 'ilovemergin'}, json_headers, 200), + ({'login': 'mergin@mergin.com', 'password': 'ilovemergin'}, json_headers, 200), + ({'login': 'mergin', 'password': 'ilovemergi'}, json_headers, 401), + ({'login': 'mergin'}, json_headers, 400), + ({'login': 'mergin', 'password': 'ilovemergin'}, {}, 415), +] + + +@pytest.mark.parametrize("data,headers,expected", test_api_login_data) +def test_api_login(client, data, headers, expected): + resp = client.post('/v1/auth/login', data=json.dumps(data), headers=headers) + assert resp.status_code == expected + if expected == 200: + login_history = LoginHistory.query.filter_by(username='mergin').order_by(desc(LoginHistory.timestamp)).first() + assert login_history + + +def test_api_login_from_urllib(client): + with patch('src.auth.models.get_user_agent') as mock: + mock.return_value = "DB-sync/0.1" + resp = client.post('/v1/auth/login', data=json.dumps({'login': 'mergin', 'password': 'ilovemergin'}), headers=json_headers) + assert resp.status_code == 200 + login_history = LoginHistory.query.filter_by(username='mergin').order_by(desc(LoginHistory.timestamp)).first() + assert not login_history + + +def test_api_user_profile(client): + """ test public API endpoint to get user details """ + resp = client.get('/v1/user/mergin') + assert resp.status_code == 401 + + login_as_admin(client) + user = User.query.filter_by(username="mergin").first() + resp = client.get('/v1/user/mergin') + assert resp.status_code == 200 + assert resp.json['username'] == 'mergin' + assert resp.json['email'] == user.email + assert resp.json['disk_usage'] == 0 + assert resp.json['storage_limit'] == 104857600 + + # now check the profile of someone else + resp = client.get('/v1/user/somebody_else') + assert resp.status_code == 200 + assert resp.json['username'] == 'mergin' + + +# test_seeing_other_profile, invalid username +test_other_user_profile_data = [ + ({'username': 'test1', 'email': 'test@test.com', 'passwd': 'Test#test', 'is_admin': True}, True), + ({'username': 'test2', 'email': 'test@test.com', 'passwd': 'Test#test', 'is_admin': False}, False) +] + + +def test_update_user(client): + login_as_admin(client) + user = User.query.filter_by(username='mergin').first() + data = {"active": False} + resp = client.post('/auth/user/{}'.format(user.username), data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + assert not user.active + + user.is_admin = False + db.session.add(user) + db.session.commit() + resp = client.post('/auth/user/{}'.format(user.id), data=json.dumps(data), headers=json_headers) + assert resp.status_code == 403 + + +def test_update_user_profile(client): + login_as_admin(client) + user = User.query.filter_by(username='mergin').first() + user.verified_email = True + + user2 = User("test", "m@m.com", "testspass", False) + db.session.add(user2) + db.session.commit() + assert user.profile.receive_notifications + assert user.verified_email + + # update profile + resp = client.post( + url_for('auth.update_user_profile'), + data=json.dumps( + { + 'email': 'm@m.com' + } + ), + headers=json_headers + ) + assert resp.status_code == 400 + assert "Email already exists" in resp.json.get("email") + + # update profile + resp = client.post( + url_for('auth.update_user_profile'), + data=json.dumps({ + 'first_name': ' John', + 'last_name': 'Doe ', + 'email': 'john@doe.com ' + }), + headers=json_headers + ) + assert resp.status_code == 200 + assert user.email == 'john@doe.com' + assert user.profile.first_name == 'John' + assert user.profile.last_name == 'Doe' + + # update profile + resp = client.post( + url_for('auth.update_user_profile'), + data=json.dumps( + { + 'receive_notifications': False, + 'email': 'changed_email@mergin.co.uk' + } + ), + headers=json_headers + ) + assert resp.status_code == 200 + + user = User.query.filter_by(username='mergin').first() + assert not user.profile.receive_notifications + assert not user.verified_email + assert user.email == 'changed_email@mergin.co.uk' + + +def test_search_user(client): + user = User.query.filter_by(username='mergin').first() + + resp = client.get('/auth/user/search') + assert resp.status_code == 401 + + login_as_admin(client) + resp = client.get('/auth/user/search') + assert resp.status_code == 200 + assert list(resp.json[0].keys()) == ['id', 'profile', 'username'] + + add_user('fero.mrkva', 'test') + add_user('palomrmrkva', 'test') + add_user('mrkvajozef', 'test') + resp = client.get('/auth/user/search?like=erg') + assert 'mer' in resp.json[0]['username'] + + resp = client.get('/auth/user/search?like=mrk') + assert 'palomrmrkva' in resp.json[2]['username'] + assert 'mrkvajozef' in resp.json[1]['username'] + assert 'fero.mrkva' in resp.json[0]['username'] + assert 3 == len(resp.json) + + resp = client.get('/auth/user/search?like=.mrk') + assert 'fero.mrkva' in resp.json[0]['username'] + assert 1 == len(resp.json) + + + resp = client.get('/auth/user/search?id={}'.format(user.id)) + assert resp.json[0]['username'] == user.username + + resp = client.get('/auth/user/search?names={}'.format(user.username)) + assert resp.json[0]['username'] == user.username + + # no such user + resp = client.get('/auth/user/search?like=test') + assert not resp.json + + # invalid query par + resp = client.get('/auth/user/search?id=1,a') + assert resp.json + + +def test_get_accounts(client): + resp = client.get(url_for('account.list_accounts', type='user')) + assert resp.status_code == 401 + + login_as_admin(client) + resp = client.get(url_for('account.list_accounts', type='user')) + + assert resp.status_code == 200 + assert resp.json['total'] == 1 + account = resp.json['accounts'][0] + assert account['type'] == 'user' + assert account['name'] == 'mergin' + + # filter by type + resp = client.get(url_for('account.list_accounts', type='foo')) + assert resp.status_code == 400 + + resp = client.get(url_for('account.list_accounts', type='organisation')) + assert resp.status_code == 200 + assert resp.json['total'] == 0 + + resp = client.get(url_for('account.list_accounts', type='user', page=2)) + assert resp.status_code == 404 + + resp = client.get(url_for('account.list_accounts', type='user', name='merg')) + assert resp.status_code == 200 + assert resp.json['total'] == 1 + + resp = client.get(url_for('account.list_accounts', type='user', name='foo')) + assert resp.status_code == 200 + assert resp.json['total'] == 0 + + add_user('foo', 'bar') + resp = client.get(url_for('account.list_accounts', type='user', name='foo')) + assert resp.status_code == 200 + assert resp.json['total'] == 1 + + resp = client.get(url_for('account.list_accounts', type='user', order_by='name', descending='true')) + assert resp.status_code == 200 + assert resp.json['accounts'][0]['name'] == 'mergin' + assert resp.json['accounts'][1]['name'] == 'foo' diff --git a/server/test/test_celery.py b/server/test/test_celery.py new file mode 100644 index 00000000..78ce4e9f --- /dev/null +++ b/server/test/test_celery.py @@ -0,0 +1,154 @@ +import os +from datetime import datetime, timedelta +from flask import current_app +from flask_mail import Mail +from unittest.mock import patch +from sqlalchemy.orm.attributes import flag_modified + +from src import db +from src.config import Configuration +from src.organisation import Organisation +from src.auth.models import User, LoginHistory +from src.models.db_models import Project, RemovedProject, Namespace +from src.celery import send_email_async, remove_temp_files, remove_projects_backups, remove_accounts_data +from src.celery import remove_temp_files, remove_projects_backups +from src.storages.disk import move_to_tmp +from . import test_project, test_namespace +from .utils import create_project, cleanup, add_user, create_organisation + + +def test_send_email(app): + """ Test celery is actually sending emails. """ + mail = Mail() + email_data = { + 'subject': 'test', + 'html': 'test', + 'recipients': ['foo@bar.com'], + 'sender': 'no_reply@cloudmergin.com' + } + with mail.record_messages() as outbox: + Configuration.MERGIN_TESTING = True + task = send_email_async.s(**email_data).apply() + assert len(outbox) == 1 + assert task.status == 'SUCCESS' + assert outbox[0].sender == 'no_reply@cloudmergin.com' + assert outbox[0].html == 'test' + assert outbox[0].subject == 'test' + assert current_app.config['MAIL_DEFAULT_SENDER'] not in outbox[0].bcc + assert 'foo@bar.com' in outbox[0].send_to + + # turn off testing mode + Configuration.MERGIN_TESTING = False + task = send_email_async.s(**email_data).apply() + assert len(outbox) == 2 + assert task.status == 'SUCCESS' + assert current_app.config['MAIL_DEFAULT_SENDER'] in outbox[1].bcc + + Configuration.MERGIN_TESTING = True + del email_data['recipients'] + task = send_email_async.s(**email_data).apply() + assert task.status == 'FAILURE' + assert 'No recipients have been added' in task.traceback + + +@patch('src.celery.send_email_async.apply_async') +def test_send_email_from_flask(send_email_mock, client): + """ Test correct data are passed to celery task which is called from endpoint. """ + project = Project.query.filter_by(namespace='mergin', name='test').first() + email_data = { + 'subject': 'Mergin project mergin/test has been deleted', + 'recipients': [project.creator.email], + 'sender': current_app.config['MAIL_DEFAULT_SENDER'] + } + resp = client.delete('/v1/project/{}/{}'.format('mergin', 'test')) + assert resp.status_code == 200 + # cleanup files + cleanup(client, [project.storage.project_dir]) + assert send_email_mock.called + call_args, _ = send_email_mock.call_args + _, kwargs = call_args + del kwargs['html'] + assert email_data == kwargs + + +def test_clean_temp_files(app): + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + # pretend project has been removed + path = move_to_tmp(project.storage.project_dir) + assert os.path.exists(path) + # try with default value, dir is still not marked for removal + remove_temp_files() + assert os.path.exists(path) + # patch modification time of parent dir + t = datetime.utcnow() - timedelta(days=(app.config['TEMP_EXPIRATION']+1)) + parent_dir = os.path.dirname(os.path.dirname(path)) + os.utime(parent_dir, (datetime.timestamp(t), datetime.timestamp(t))) + remove_temp_files() + assert not os.path.exists(path) + + +def test_remove_deleted_project_backups(client): + resp = client.delete('/v1/project/{}/{}'.format(test_namespace, test_project)) + assert resp.status_code == 200 + rp = RemovedProject.query.filter_by(namespace=test_namespace, name=test_project).first() + rp.timestamp = datetime.utcnow() - timedelta(days=(client.application.config['DELETED_PROJECT_EXPIRATION']+1)) + rp_dir = os.path.abspath( + os.path.join(client.application.config["LOCAL_PROJECTS"], rp.properties["storage_params"]["location"])) + assert os.path.exists(rp_dir) + remove_projects_backups() + assert not RemovedProject.query.filter_by(namespace=test_namespace, name=test_project).count() + assert not os.path.exists(rp_dir) + + +def test_remove_accounts_data(client): + usr = add_user("test1", "test") + usr2 = add_user("test2", "test") + usr2_id = usr2.id + org = create_organisation("test_organisation", usr) + org_id = org.id + p = create_project("test1", "test1", usr) + p3 = create_project("test_delete", "test1", usr) + p2 = create_project("test_organisation_project", "test_organisation", usr) + project_location = p.storage.project_dir + project2_location = p2.storage.project_dir + project3_location = p3.storage.project_dir + + resp = client.delete('/v1/project/{}/{}'.format("test1", "test_delete")) + assert resp.status_code == 200 + assert 1 == RemovedProject.query.filter_by(namespace="test1", name="test_delete").count() + assert os.path.exists(project3_location) + + usr.inactive_since = datetime.today() - timedelta(days=Configuration.CLOSED_ACCOUNT_EXPIRATION + 0.5) + usr.active = False + usr_id = usr.id + org.owners.append(usr2.id) + flag_modified(org, "owners") + db.session.add(org) + db.session.commit() + + remove_accounts_data() + + usr = User.query.get(usr_id) + assert "deleted" in usr.username + assert not os.path.exists(project_location) + namespace = Namespace.query.filter(Namespace.name == usr.username).first() + assert namespace + assert 0 == RemovedProject.query.filter_by(namespace="test1", name="test_delete").count() + assert not os.path.exists(project3_location) + + org = Organisation.query.get(org_id) + org.inactive_since = datetime.today() - timedelta(days=Configuration.CLOSED_ACCOUNT_EXPIRATION + 0.5) + org.active = False + db.session.commit() + + remove_accounts_data() + org = Organisation.query.get(org_id) + assert "deleted" in org.name + namespace = Namespace.query.filter(Namespace.name == org.name).first() + assert namespace + usr2 = User.query.get(usr2_id) + assert usr2.username == "test2" + assert "test1" not in org.owners + assert not os.path.exists(project2_location) + project = Project.query.filter(Project.name == "test_organisation_project").first() + assert not project diff --git a/server/test/test_db_hooks.py b/server/test/test_db_hooks.py new file mode 100644 index 00000000..c1581080 --- /dev/null +++ b/server/test/test_db_hooks.py @@ -0,0 +1,153 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import os +from pathlib import Path + +from src.organisation.models import Organisation, OrganisationInvitation +from src.models.db_models import Namespace, ProjectTransfer, Project, ProjectVersion, Upload, ProjectAccess, Account +from src.auth.models import User, UserProfile +from src import db +from . import DEFAULT_USER, TEST_ORG +from .utils import add_user, create_project, cleanup + + +def test_remove_organisation(client, diff_project): + """ Test fully set up organisation is successfully removed incl: + - organisation project + - pending transfer + - pending invitation + - namespace + - associated files + """ + org_project = 'test' + cleanup(client, [os.path.join(TEST_ORG, org_project)]) + + # set up + user = User.query.filter_by(username=DEFAULT_USER[0]).first() + org = Organisation(name=TEST_ORG, creator_id=user.id) + add_user('user', 'user') + invitation = OrganisationInvitation(TEST_ORG, 'user', 'reader') + db.session.add(org) + db.session.add(invitation) + db.session.commit() + project = create_project(org_project, TEST_ORG, user) + proj_dir = Path(project.storage.project_dir) + ns = Namespace.query.filter_by(name=org.name).first() + project_transfer = ProjectTransfer(diff_project, ns, user.id) + db.session.add(project_transfer) + + # delete organisation and thus associated projects and other objects + account = Account.query.filter_by(type='organisation', owner_id=org.id).first() + db.session.delete(account) + db.session.commit() + db.session.delete(org) + db.session.commit() + assert not Organisation.query.filter_by(name=TEST_ORG).count() + assert not Namespace.query.filter_by(name=TEST_ORG).count() + assert not Project.query.filter_by(name=org_project, namespace=TEST_ORG).count() + assert not OrganisationInvitation.query.filter_by(org_name=TEST_ORG).count() # handled as backreference + assert not ProjectTransfer.query.filter_by(to_ns_name=TEST_ORG).count() + assert not proj_dir.exists() + + +def test_close_user_account(client, diff_project, test_organisation): + """ Test fully set up and active user is successfully removed incl: + - user profile + - user project + - pending transfer + - pending invitation + - namespace + - associated files + - membership in organisation + - project access to foreign projects + """ + user_project = 'user_proj' + cleanup(client, [os.path.join('user', user_project)]) + # set up + mergin_user = User.query.filter_by(username=DEFAULT_USER[0]).first() + user = add_user('user', 'user') + user_id = user.id + user_ns = Namespace.query.filter_by(name=user.username).first() + # user invited to TEST_ORG + invitation = OrganisationInvitation(test_organisation.name, 'user', 'reader') + db.session.add(invitation) + # user is member of another organisation + test_org = TEST_ORG + '2' + org = Organisation(name=test_org, creator_id=mergin_user.id) + org.owners.append(user.id) + db.session.add(org) + # user has access to mergin user diff_project + diff_project.access.writers.append(user.id) + # user contributed to another user project so he is listed in projects history + change = {'added': [], 'removed': [], 'renamed': [], 'updated': []} + pv = ProjectVersion(diff_project, 'v11', user.username, change, diff_project.files, '127.0.0.1') + db.session.add(pv) + db.session.add(diff_project) + # user has it's own project + p = create_project(user_project, user_ns.name, user) + # user requested transfer of his project to org + org_ns = Namespace.query.filter_by(name=org.name).first() + project_transfer_out = ProjectTransfer(p, org_ns, user.id) + db.session.add(project_transfer_out) + # create pending transfer to user's namespace + project_transfer_in = ProjectTransfer(diff_project, user_ns, mergin_user.id) + db.session.add(project_transfer_in) + # create user's own organisation to be closed with his account (since he is the only owner) + user_org = Organisation(name='user.org', creator_id=user.id) + org.owners.append(user.id) + db.session.add(user_org) + db.session.commit() + + # now remove user + account = Account.query.filter_by(type="user", owner_id=user.id).first() + db.session.delete(account) + db.session.commit() + db.session.delete(user) + db.session.commit() + assert not User.query.filter_by(username='user').count() + assert not UserProfile.query.filter_by(user_id=user_id).count() # handled as backreference + assert not Namespace.query.filter_by(name='user').count() + assert not Project.query.filter_by(name=user_project, namespace='user').count() + assert not OrganisationInvitation.query.filter_by(username='user').count() + assert not ProjectTransfer.query.filter_by(requested_by=user_id).count() + assert not ProjectTransfer.query.filter_by(to_ns_name='user').count() + assert user_id not in org.owners + assert user_id not in diff_project.access.writers + assert not Organisation.query.filter_by(name='user.org').count() + # user remains referenced in existing project version he created (as read-only ref) + assert diff_project.versions[0].author == 'user' + + +def test_remove_project(client, diff_project, test_organisation): + """ Test active project is successfully removed incl: + - pending transfer + - pending upload + - project access + - project versions + - associated files + """ + # set up + mergin_user = User.query.filter_by(username=DEFAULT_USER[0]).first() + project_dir = Path(diff_project.storage.project_dir) + project_name = diff_project.name + ns = Namespace.query.filter_by(name=test_organisation.name).first() + project_transfer = ProjectTransfer(diff_project, ns, mergin_user.id) + changes = {'added': [], 'removed': [], 'renamed': [], 'updated': []} + upload = Upload(diff_project, 10, changes, mergin_user.id) + db.session.add(project_transfer) + db.session.add(upload) + db.session.commit() + project_id = diff_project.id + + # remove project + db.session.delete(diff_project) + db.session.commit() + assert not Project.query.filter_by(id=project_id).count() + assert not Upload.query.filter_by(project_id=project_id).count() + assert not ProjectVersion.query.filter_by(project_id=project_id).count() + assert not ProjectAccess.query.filter_by(project_id=project_id).count() + assert not ProjectTransfer.query.filter_by(project_id=project_id).count() + # files need to be deleted manually + assert project_dir.exists() + cleanup(client, [project_dir]) + diff --git a/server/test/test_disk_utils.py b/server/test/test_disk_utils.py new file mode 100644 index 00000000..f4c5f573 --- /dev/null +++ b/server/test/test_disk_utils.py @@ -0,0 +1,77 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +import tempfile +import shutil +import pytest +from src.storages.disk import copy_file, copy_dir, move_to_tmp +from src.mergin_utils import generate_checksum +from . import test_project_dir + + +def test_copy_remove_file(app): + f_old = os.path.join(test_project_dir, 'base.gpkg') + f_new = os.path.join(tempfile.gettempdir(), 'base.gpkg') + # clean up + if os.path.exists(f_new): + os.remove(f_new) + + copy_file(f_old, f_new) + assert os.path.exists(f_new) + f_new_hash = generate_checksum(f_new) + assert f_new_hash == generate_checksum(f_old) + + f_temp = move_to_tmp(f_new) + assert not os.path.exists(f_new) + assert os.path.exists(f_temp) + assert generate_checksum(f_temp) == f_new_hash + + +def test_copy_remove_dir(app): + d_old = test_project_dir + d_new = os.path.join(tempfile.gettempdir(), 'new_dir') + # clean up + if os.path.exists(d_new): + shutil.rmtree(d_new) + + copy_dir(d_old, d_new) + assert os.path.exists(d_new) + copied_files = [] + for root, dirs, files in os.walk(d_old): + for file in files: + abs_path = os.path.abspath(os.path.join(root, file)) + rel_path = os.path.relpath(abs_path, start=d_old) + f_copy = os.path.join(d_new, rel_path) + assert os.path.exists(f_copy) + assert generate_checksum(abs_path) == generate_checksum(f_copy) + copied_files.append(rel_path) + + d_temp = move_to_tmp(d_new) + assert not os.path.exists(d_new) + assert os.path.exists(d_temp) + for f in copied_files: + assert os.path.exists(os.path.join(d_temp, f)) + + # try again to remove + assert not move_to_tmp(d_new) + + +def test_failures(): + # file copy source/destination is not a file type + with pytest.raises(IsADirectoryError): + copy_file(os.path.join(test_project_dir, 'base.gpkg'), tempfile.gettempdir()) + + with pytest.raises(FileNotFoundError): + copy_file(test_project_dir, os.path.join(tempfile.gettempdir(), 'base.gpkg')) + + # src file does not exist + with pytest.raises(FileNotFoundError): + copy_file(os.path.join(test_project_dir, 'not-found.gpkg'), os.path.join(tempfile.gettempdir(), 'base.gpkg')) + + # src directory is not valid + with pytest.raises(NotADirectoryError): + copy_dir(os.path.join(test_project_dir, 'base.gpkg'), os.path.join(tempfile.gettempdir(), 'new_dir')) + + with pytest.raises(NotADirectoryError): + copy_dir(os.path.join(test_project_dir, 'not_found'), os.path.join(tempfile.gettempdir(), 'new_dir')) diff --git a/server/test/test_namespace_controller.py b/server/test/test_namespace_controller.py new file mode 100644 index 00000000..c5226269 --- /dev/null +++ b/server/test/test_namespace_controller.py @@ -0,0 +1,28 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import pytest +from .utils import add_user +from . import json_headers, TEST_ORG + + +@pytest.fixture(scope='function') +def test_namespace(client): + add_user('user', 'user') + add_user('user2', 'user2') + + +namespace_data = [ + ('user', 'user', 2), # if query=user + ('user', 'user2', 1), # if query=user2 + ('organisation', TEST_ORG, 1), # if query=TEST_ORG + ('user', 'user3', 0), # if query=user3, no namespace found +] + + +@pytest.mark.parametrize("namespace_type, query, expected", namespace_data) +def test_get_namespaces(client, test_organisation, test_namespace, namespace_type, query, expected): + resp = client.get(f'/v1/namespaces/{namespace_type}?q={query}', headers=json_headers) + assert len(resp.json) == expected + if expected == 1: + assert resp.json[0]['name'] == query diff --git a/server/test/test_organisations.py b/server/test/test_organisations.py new file mode 100644 index 00000000..3cbf8275 --- /dev/null +++ b/server/test/test_organisations.py @@ -0,0 +1,339 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import json +import os +import shutil +import pytest +from datetime import timedelta +from flask import url_for +from src.models.db_models import Namespace, Project, Account +from src.organisation import Organisation, OrganisationInvitation +from src.auth.models import User +from src import db + +from .utils import add_user, create_project, DateTimeEncoder, login +from .test_project_controller import _get_changes +from . import json_headers, TEST_ORG, test_project_dir, DEFAULT_USER + +add_organisation_data = [ + ({"name": TEST_ORG}, 201), + ({"name": TEST_ORG, "description": "test"}, 201), + ({"description": "test"}, 400), # missing required field + ({"name": "#$&^*"}, 400) # invalid field +] + + +@pytest.mark.parametrize("data,expected", add_organisation_data) +def test_create_organisation(client, data, expected): + owner = User.query.filter_by(username='mergin').first() + resp = client.post(url_for('organisation.create_organisation'), data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if expected == 201: + assert Organisation.query.filter_by(name=TEST_ORG).count() == 1 + assert Namespace.query.filter_by(name=TEST_ORG).count() == 1 + org = Organisation.query.filter_by(name=TEST_ORG).first() + assert org.owners == [owner.id] + # call user profile endpoint to check its organisations + resp = client.get('/v1/user/owner') + assert resp.json['organisations'][org.name] == 'owner' + + +def test_add_existing_org(client, test_organisation): + resp = client.post(url_for('organisation.create_organisation'), data=json.dumps({"name": TEST_ORG}), headers=json_headers) + assert resp.status_code == 409 + resp = client.post(url_for('organisation.create_organisation'), data=json.dumps({"name": TEST_ORG.upper()}), headers=json_headers) + assert resp.status_code == 409 + + # test user with the same name as organisation can not be created + resp = client.post( + url_for('auth.register_user'), + data=json.dumps({'username': TEST_ORG, 'email': 'test@test.com'}), + headers=json_headers + ) + assert resp.status_code == 400 + + +def test_free_orgs_limit(client, test_organisation): + url = url_for('organisation.create_organisation') + client.post(url, data=json.dumps({"name": "org2"}), headers=json_headers) + Namespace.query.filter_by(name="org2").update({"storage": 0}) + client.post(url, data=json.dumps({"name": "org3"}), headers=json_headers) + Namespace.query.filter_by(name="org3").update({"storage": 0}) + db.session.commit() + resp = client.post(url, data=json.dumps({"name": "org4"}), headers=json_headers) + assert resp.status_code == 400 + assert resp.json["detail"] == "Too many free organisations" + + +test_organisation_data = [ + ('foo', 404), + (TEST_ORG, 200), +] + + +def test_list_organisations(client, test_organisation): + user = add_user('bob', 'foo') + login(client, 'bob', 'foo') + resp = client.get(url_for('organisation.get_organisations')) + assert resp.status_code == 200 + assert len(json.loads(resp.data)) == 0 + + org = Organisation(name='bob-org', creator_id=user.id) + db.session.add(org) + db.session.commit() + + resp = client.get(url_for('organisation.get_organisations')) + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert len(resp_data) == 1 + assert resp_data[0]["name"] == 'bob-org' + + # login as someone else (even admin) + login(client, 'mergin', 'ilovemergin') + resp = client.get(url_for('organisation.get_organisations')) + assert resp.status_code == 200 + assert len(json.loads(resp.data)) == 1 + assert json.loads(resp.data)[0]["name"] != 'bob-org' + + +@pytest.mark.parametrize("name, status_code", test_organisation_data) +def test_get_organisation(client, test_organisation, name, status_code): + resp = client.get(url_for('organisation.get_organisation_by_name', name=name)) + assert resp.status_code == status_code + if resp.status_code == 200: + resp_data = json.loads(resp.data) + assert resp_data["name"] == name + assert DEFAULT_USER[0] in resp_data["owners"] + + +def test_get_org_by_non_member(client, test_organisation): + add_user("test", "test") + login(client, "test", "test") + resp = client.get(url_for('organisation.get_organisation_by_name', name=TEST_ORG)) + assert resp.status_code == 403 + + +def test_delete_organisation(client, test_organisation): + url = url_for('organisation.delete_organisation', name=TEST_ORG) + login(client, "admin", "admin") + resp = client.delete(url) + assert resp.status_code == 403 + + # create some project + owner = User.query.filter_by(username="owner").first() + project = create_project("test1", test_organisation.name, owner) + project_dir = project.storage.project_dir + assert os.path.exists(project_dir) + + login(client, "owner", "owner") + resp = client.delete(url) + assert resp.status_code == 200 + assert not Organisation.query.filter_by(name=TEST_ORG).count() + assert not Namespace.query.filter_by(name=TEST_ORG).count() + assert not Project.query.filter_by(namespace=TEST_ORG).count() + assert not os.path.exists(project_dir) + + +def test_update_org(client, test_organisation): + url = url_for('organisation.update_organisation', name=TEST_ORG) + # login as owner + login(client, "owner", "owner") + data = {"description": "Foo"} + + resp = client.patch(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + org = Organisation.query.filter_by(name=TEST_ORG).first() + assert org.description == "Foo" + + # login with not-privileged user + login(client, "admin", "admin") + resp = client.patch(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == 403 + + +def test_update_members(client, test_organisation): + url = url_for('organisation.update_access', name=TEST_ORG) + new_writer = add_user("writer", "writer") + login(client, "writer", "writer") + # check no access + resp = client.get(url_for('organisation.get_organisations', username='writer')) + assert not json.loads(resp.data) + + # login with as admin + login(client, "admin", "admin") + data = { + "owners": ["owner"], + "admins": ["admin", "owner"], + "writers": ["admin", "writer"], + "readers": ["admin", "writer"] + } + + resp = client.patch(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + org = Organisation.query.filter_by(name=TEST_ORG).first() + assert new_writer.id in org.writers + assert new_writer.id in org.readers + + # login with not-privileged user + login(client, "writer", "writer") + resp = client.patch(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == 403 + + # try to remove all members + login(client, "owner", "owner") + data = {"owners": [], "admins": [], "writers": [], "readers": []} + resp = client.patch(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == 400 + + +@pytest.fixture(scope='function') +def test_invitation(test_organisation): + # default organisation + add_user("user", "user") + add_user("user1", "user1") + + invitation = OrganisationInvitation(TEST_ORG, 'user', 'admin') + db.session.add(invitation) + db.session.commit() + + +create_invitation_data = [ + ({"username": 'user1', "org_name": TEST_ORG, "role": 'admin'}, 'admin', 'admin', 201), # admin create invitation + ({"username": 'user1', "org_name": TEST_ORG, "role": 'admin'}, 'owner', 'owner', 201), # owner create invitation + ({"username": 'user1', "org_name": TEST_ORG, "role": 'admin'}, 'user', 'user', 403), # user can't create invitation + ({"username": 'user', "org_name": TEST_ORG, "role": 'admin'}, 'admin', 'admin', 409), # already exist + ({"username": 'user', "org_name": "org", "role": 'admin'}, 'admin', 'admin', 404), # organisation not found + ({"username": 'user_test', "org_name": TEST_ORG, "role": 'admin'}, 'admin', 'admin', 404), # user not found + ({"username": 'user', "org_name": TEST_ORG, "role": 'test'}, 'admin', 'admin', 400), # wrong role +] + + +@pytest.mark.parametrize("data, username, password, expected", create_invitation_data) +def test_create_invitation(client, test_invitation, data, username, password, expected): + login(client, username, password) + url = url_for('organisation.create_invitation') + resp = client.post(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if expected == 201: + login(client, "user1", "user1") + url = url_for('organisation.get_invitations', type="user", name="user1") + resp = client.get(url, headers=json_headers) + assert resp.status_code == 200 + assert len(resp.json) == 1 + invitation = resp.json[0] + for key, value in data.items(): + assert key in invitation.keys() + assert value == invitation[key] + + +get_invitation_data = [ + ('user', 1, 200), # user can see invitation 1 (target) + ('admin', 1, 200), # admin can see invitation 1 (admin of org) + ('user1', 1, 403), # user1 can't see invitation 1 + ('user', 2, 404), # invitation 1 is not exist +] + + +@pytest.mark.parametrize("user, id, expected", get_invitation_data) +def test_get_invitation(client, test_invitation, user, id, expected): + url = url_for('organisation.get_invitation', id=id) + login(client, user, user) + resp = client.get(url, headers=json_headers) + assert resp.status_code == expected + + +@pytest.mark.parametrize("user, id, expected", get_invitation_data) +def test_delete_invitation(client, test_invitation, user, id, expected): + url = url_for('organisation.delete_invitation', id=id) + login(client, user, user) + resp = client.delete(url, headers=json_headers) + assert resp.status_code == expected + if expected == 200: + url = url_for('organisation.get_invitation', id=id) + resp = client.get(url, headers=json_headers) + assert resp.status_code == 404 + + +test_accept_invitation_data = [ + ('user', 1, False, 200), # user can accept 1 (target) + ('user', 1, True, 400), # user can accept 1 (target) but already expired + ('admin', 1, False, 403), # admin can see invitation 1 (admin of org) but not the target + ('user1', 1, False, 403), # user1 can't accept invitation 1 + ('user', 2, False, 404), # invitation 1 is not exist +] + + +@pytest.mark.parametrize("user, id, expired, expected", test_accept_invitation_data) +def test_accept_invitation(client, test_invitation, user, id, expired, expected): + # if expired: + if expired: + invitation = OrganisationInvitation.query.first() + invitation.expire = invitation.expire - timedelta( + seconds=client.application.config['ORGANISATION_INVITATION_EXPIRATION']) + db.session.add(invitation) + db.session.commit() + + url = url_for('organisation.accept_invitation', id=id) + login(client, user, user) + resp = client.post(url, headers=json_headers) + assert resp.status_code == expected + if expected == 200: + organisation = Organisation.query.first() + user = User.query.filter_by(username=user).first() + assert user.id in organisation.admins + + +def test_upload_to_free_org(client, diff_project, test_organisation): + # clean up + proj_data = os.path.join(client.application.config['LOCAL_PROJECTS'], TEST_ORG, 'empty') + if os.path.exists(proj_data): + shutil.rmtree(proj_data) + + login(client, 'mergin', 'ilovemergin') + # try clone to free organisation + data = { + 'namespace': TEST_ORG, + 'project': 'clone' + } + resp = client.post(f'/v1/project/clone/{DEFAULT_USER[0]}/test', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 400 + assert 'Disk quota reached' in resp.json['detail'] + + # create empty project and then push to + data = {'name': 'empty'} + resp = client.post(f'/v1/project/{TEST_ORG}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + + data = {'version': 'v0', 'changes': _get_changes(test_project_dir)} + resp = client.post(f'/v1/project/push/{TEST_ORG}/empty', data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 400 + assert 'You have reached a data limit' in resp.json['detail'] + + +def test_fetch_organisation_projects(client, test_organisation): + login(client, 'mergin', 'ilovemergin') + resp = client.post(f'/v1/project/{TEST_ORG}', data=json.dumps({"name": "org_project"}), headers=json_headers) + assert resp.status_code == 200 + resp = client.post(f'/v1/project/{DEFAULT_USER[0]}', data=json.dumps({"name": "usr_project"}), headers=json_headers) + assert resp.status_code == 200 + url = '/v1/project?flag=shared' + resp = client.get(url, headers=json_headers) + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert len(resp_data) == 1 + + login(client, "admin", "admin") + url = '/v1/project?flag=shared' + resp = client.get(url, headers=json_headers) + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert len(resp_data) == 1 + + add_user("random", "random") + login(client, "random", "random") + url = '/v1/project?flag=shared' + resp = client.get(url, headers=json_headers) + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert not len(resp_data) diff --git a/server/test/test_permissions.py b/server/test/test_permissions.py new file mode 100644 index 00000000..1035c6ee --- /dev/null +++ b/server/test/test_permissions.py @@ -0,0 +1,120 @@ +import json +from flask import url_for +from flask_login import AnonymousUserMixin +from sqlalchemy.orm.attributes import flag_modified + +from src.permissions import require_project, ProjectPermissions +from src.models.db_models import Namespace, Project, ProjectAccess +from src.auth.models import User +from src import db +from .utils import login, add_user, create_project +from . import json_headers, TEST_ORG, DEFAULT_USER + + +def test_organisation_permissions(client, test_organisation): + user = add_user("random", "random") + admin = User.query.filter_by(username=DEFAULT_USER[0]).first() + project = create_project("foo", TEST_ORG, admin) + + assert not ProjectPermissions.Upload.check(project, user) + assert not ProjectPermissions.Delete.check(project, user) + assert not ProjectPermissions.Read.check(project, user) + assert not ProjectPermissions.Update.check(project, user) + + test_organisation.readers.append(user.id) + flag_modified(test_organisation, "readers") + db.session.commit() + + assert ProjectPermissions.Read.check(project, user) + assert not ProjectPermissions.Upload.check(project, user) + assert not ProjectPermissions.Delete.check(project, user) + assert not ProjectPermissions.Update.check(project, user) + + test_organisation.writers.append(user.id) + flag_modified(test_organisation, "writers") + db.session.commit() + + assert ProjectPermissions.Read.check(project, user) + assert ProjectPermissions.Upload.check(project, user) + assert ProjectPermissions.Delete.check(project, user) + assert ProjectPermissions.Update.check(project, user) + + +def test_project_permissions(client): + data = {"name": 'foo'} + resp = client.post('/v1/project/{}'.format("mergin"), data=json.dumps(data), headers=json_headers) + + user = add_user("random", "random") + project = Project.query.filter_by(name="foo", namespace="mergin").first() + + assert not ProjectPermissions.Upload.check(project, user) + assert not ProjectPermissions.Delete.check(project, user) + assert not ProjectPermissions.Read.check(project, user) + assert not ProjectPermissions.Update.check(project, user) + + pa = project.access + pa.readers.append(user.id) + flag_modified(pa, "readers") + db.session.commit() + + assert ProjectPermissions.Read.check(project, user) + assert not ProjectPermissions.Upload.check(project, user) + assert not ProjectPermissions.Delete.check(project, user) + assert not ProjectPermissions.Update.check(project, user) + + pa.writers.append(user.id) + flag_modified(pa, "writers") + db.session.commit() + + assert ProjectPermissions.Read.check(project, user) + assert ProjectPermissions.Upload.check(project, user) + assert not ProjectPermissions.Delete.check(project, user) + assert not ProjectPermissions.Update.check(project, user) + + # when users is removed from readers can not read a project anymore but still can write into it + pa.readers.clear() + flag_modified(pa, "readers") + db.session.commit() + + assert not ProjectPermissions.Read.check(project, user) + assert ProjectPermissions.Upload.check(project, user) + + # testing owner permissions + user = User.query.filter_by(username='mergin').first() + + assert ProjectPermissions.Read.check(project, user) + assert ProjectPermissions.Upload.check(project, user) + assert ProjectPermissions.Delete.check(project, user) + assert ProjectPermissions.Update.check(project, user) + + # test AnonymousUserMixin + client.get(url_for('auth.logout')) + user = AnonymousUserMixin() + assert not ProjectPermissions.Read.check(project, user) + project.access.public = True + db.session.commit() + assert ProjectPermissions.Read.check(project, user) + assert not ProjectPermissions.Update.check(project, user) + assert not ProjectPermissions.Upload.check(project, user) + + +def test_permission_to_create_project(client, test_organisation): + user = add_user("random", "random") + login(client, "random", "random") + resp = client.post('/v1/project/{}'.format(TEST_ORG), data=json.dumps({"name": "foo"}), headers=json_headers) + assert resp.status_code == 403 + + test_organisation.readers.append(user.id) + flag_modified(test_organisation, "readers") + db.session.commit() + resp = client.post('/v1/project/{}'.format(TEST_ORG), data=json.dumps({"name": "foo"}), headers=json_headers) + assert resp.status_code == 403 + + test_organisation.writers.append(user.id) + flag_modified(test_organisation, "writers") + db.session.commit() + resp = client.post('/v1/project/{}'.format(TEST_ORG), data=json.dumps({"name": "foo"}), headers=json_headers) + assert resp.status_code == 200 + + resp = client.post('/v1/project/{}'.format("mergin"), data=json.dumps({"name": "foo"}), headers=json_headers) + assert resp.status_code == 403 diff --git a/server/test/test_project_controller.py b/server/test/test_project_controller.py new file mode 100644 index 00000000..6ee719f1 --- /dev/null +++ b/server/test/test_project_controller.py @@ -0,0 +1,1191 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +import pytest +import json +import uuid +import math +import time +import hashlib +import shutil +from datetime import datetime +from dateutil.tz import tzlocal +from pygeodiff import GeoDiff +from flask import url_for, current_app + +from src import db +from src.models.db_models import (Project, Upload, ProjectVersion, ProjectAccess, ProjectTransfer, + Namespace, Account, RemovedProject) +from src.models.schemas import ProjectSchema +from src.mergin_utils import generate_checksum, is_versioned_file, resolve_tags +from src.auth.models import User, UserProfile + +from . import test_project, test_namespace, test_project_dir, json_headers, TMP_DIR +from .utils import add_user, create_project, DateTimeEncoder, initialize + +CHUNK_SIZE = 1024 + + +def test_file_history(client, diff_project): + resp = client.get('/v1/resource/history/{}/{}?path={}'.format(test_namespace, test_project, 'test.gpkg')) + history = resp.json['history'] + assert resp.status_code == 200 + assert 'v2' not in history + assert 'v8' not in history + assert history['v3']['change'] == 'added' + assert history['v9']['change'] == 'updated' + assert 'location' not in history['v9'] + assert 'expiration' in history['v9'] + + resp = client.get('/v1/project/{}/{}?since=v1'.format(test_namespace, test_project)) + files = resp.json["files"] + history = files[-1]["history"] + assert files[-1]["path"] == 'test.gpkg' + assert resp.status_code == 200 + assert 'v2' not in history + assert 'v8' not in history + assert history['v3']['change'] == 'added' + assert history['v9']['change'] == 'updated' + + resp = client.get('/v1/project/{}/{}?since=v4'.format(test_namespace, test_project)) + files = resp.json["files"] + history = files[-1]["history"] + assert files[-1]["path"] == 'test.gpkg' + assert resp.status_code == 200 + assert 'v3' not in history + assert history['v9']['change'] == 'updated' + + # check geodiff changeset in project version object + resp = client.get('/v1/project/version/{}/{}?version_id=v9'.format(test_namespace, test_project)) + version_info = resp.json[0] + assert "changesets" in version_info + # the only diff update in version v9 is test.gpkg + assert len(version_info["changesets"].keys()) == 1 + assert "test.gpkg" in version_info["changesets"] + assert "summary" in version_info["changesets"]["test.gpkg"] + assert "size" in version_info["changesets"]["test.gpkg"] + # test when no diffs were applied + resp = client.get('/v1/project/version/{}/{}?version_id=v10'.format(test_namespace, test_project)) + assert not resp.json[0]["changesets"] + + # not geodiff file -> empty history + resp = client.get('/v1/resource/history/{}/{}?path={}'.format(test_namespace, test_project, 'test_dir/test2.txt')) + assert resp.status_code == 200 + assert not resp.json['history'] + + # not existing file + resp = client.get('/v1/resource/history/{}/{}?path={}'.format(test_namespace, test_project, 'not_existing.txt')) + assert resp.status_code == 404 + + # test to delete user and account with all of depended entries + # user delete -> profile + do account delete -> namespace -> project -> (version, upload, transfer, access) + account = Account.query.filter_by(type="user", owner_id=1).first() + db.session.delete(account) + User.query.filter_by(username="mergin").delete() + db.session.commit() + project = Project.query.first() + upload = Upload.query.first() + ns = Namespace.query.first() + assert not project + assert not upload + assert not ns + + +def test_get_projects(app): + app.config['BEARER_TOKEN_EXPIRATION'] = 4 + client = app.test_client() + response = client.post("/v1/auth/login", data=json.dumps({'login': 'mergin', 'password': 'ilovemergin'}), + headers=json_headers) + data = json.loads(response.data) + session = data["session"] + token = session["token"] + resp = client.get('/v1/project') + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert len(resp_data) + assert test_project in resp_data[0]["name"] + time.sleep(5) + url = '/v1/project?flag=created' + resp = client.get(url, headers={"Authorization": f"Bearer {token}"}) + assert resp.status_code == 401 + + +def test_get_paginated_projects(client): + user = User.query.filter_by(username="mergin").first() + for i in range(14): + create_project('foo' + str(i), test_namespace, user) + + resp = client.get('/v1/project/paginated?page=1&per_page=10&as_admin=true') + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert len(resp_data.get("projects")) == 10 + assert resp_data.get("count") == 15 + assert "foo8" in resp_data.get("projects")[9]["name"] + assert "v0" == resp_data.get("projects")[9]["version"] + + resp = client.get('/v1/project/paginated?page=1&per_page=10&order_params=updated_desc') + resp_data = json.loads(resp.data) + assert test_project in resp_data.get("projects")[0]["name"] + + resp = client.get('/v1/project/paginated?page=2&per_page=10&order_params=namespace_asc,updated_desc') + resp_data = json.loads(resp.data) + assert len(resp_data.get("projects")) == 5 + assert resp_data.get("count") == 15 + assert "foo0" in resp_data.get("projects")[4]["name"] + + resp = client.get('/v1/project/paginated?page=1&per_page=10&order_params=updated_desc&namespace=foo1') + resp_data = json.loads(resp.data) + assert resp_data.get("count") == 0 + + resp = client.get('/v1/project/paginated?page=1&per_page=10&name=foo1') + resp_data = json.loads(resp.data) + assert resp_data.get("count") == 5 + + resp = client.get('/v1/project/paginated?page=1&per_page=101&name=foo1') + assert resp.status_code == 400 + assert '101 is greater than the maximum of 100' in resp.json.get('detail') + + add_user('user2', 'ilovemergin') + user2 = User.query.filter_by(username="user2").first() + create_project('foo_a', 'user2', user2) + + resp = client.get('/v1/project/paginated?page=1&per_page=10&only_namespace=user2&as_admin=true') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert len(resp_data.get("projects")) == 1 + assert resp_data.get("count") == 1 + assert "foo_a" in resp_data.get("projects")[0]["name"] + + resp = client.get('/v1/project/paginated?page=1&per_page=10&only_namespace=user2') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert len(resp_data.get("projects")) == 0 + assert resp_data.get("count") == 0 + + project = Project.query.filter(Project.name == "foo_a").first() + readers = project.access.readers.copy() + readers.append(user.id) + project.access.readers = readers + # flag_modified(project.access, "owners") + db.session.commit() + + resp = client.get('/v1/project/paginated?page=1&per_page=10&only_namespace=user2') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert len(resp_data.get("projects")) == 1 + assert resp_data.get("count") == 1 + + project = Project.query.filter(Project.name == "foo_a").first() + readers = project.access.readers.copy() + readers.remove(user.id) + project.access.readers = readers + project.access.public = True + project.updated = datetime.utcnow() + db.session.commit() + + resp = client.get('/v1/project/paginated?page=1&per_page=10&name=foo_a') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert resp_data.get("projects")[0]["name"] == "foo_a" + + # searching also in namespace + resp = client.get('/v1/project/paginated?page=1&per_page=10&name=user') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert resp_data.get("count") == 1 + + resp = client.get('/v1/project/paginated?page=1&per_page=10&only_public=true') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert resp_data.get("count") == 2 + + resp = client.get('/v1/project/paginated?page=1&per_page=10&name=foo_a&public=false') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert len(resp_data.get("projects")) == 0 + + resp = client.get('/v1/project/paginated?page=1&per_page=15&name=test') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert resp_data.get("count") == 1 + assert not resp_data.get("projects")[0].get("has_conflict") + + # tests if project contains conflict files + project = Project.query.filter(Project.name == "test").first() + files = project.files.copy() + files.append({ + 'checksum': '89469a6482267de394c7c7270cb7ffafe694ea76', + 'location': 'v1/base.gpkg_rebase_conflicts', + 'mtime': '2021-04-14T17:33:32.766731Z', + 'path': 'base.gpkg_rebase_conflicts', + 'size': 98304 + }) + project.files = files + db.session.commit() + + resp = client.get('/v1/project/paginated?page=1&per_page=15&name=test') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert resp_data.get("count") == 1 + assert resp_data.get("projects")[0].get("has_conflict") + + files = project.files.copy() + files.remove([f for f in files if f.get('path') == 'base.gpkg'][0]) + project.files = files + db.session.commit() + + resp = client.get('/v1/project/paginated?page=1&per_page=15&name=test') + resp_data = json.loads(resp.data) + assert resp.status_code == 200 + assert resp_data.get("count") == 1 + assert not resp_data.get("projects")[0].get("has_conflict") + + +def test_get_projects_by_names(client): + user = User.query.filter_by(username="mergin").first() + create_project('foo', test_namespace, user) + add_user('user2', 'ilovemergin') + user2 = User.query.filter_by(username="user2").first() + create_project('foo', 'user2', user2) + create_project('other', 'user2', user2) + + data = {"projects": [ + "mergin/foo", + "user2/foo", + "user2/other", + "something" + ]} + resp = client.post('/v1/project/by_names', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert resp_data.get("mergin/foo").get("name") == "foo" + assert resp_data.get("user2/foo").get("error") == 403 + assert resp_data.get("user2/other").get("error") == 403 + assert resp_data.get("something").get("error") == 404 + + +add_project_data = [ + ({"name": ' foo ', "template": test_project}, 200), # valid project name, whitespace will be removed + ({"name": 'foo/bar', "template": test_project}, 400), # invalid project name + ({"name": 'ba%r', "template": test_project}, 400), # invalid project name + ({"name": 'bar*', "template": test_project}, 200), # valid project name + ({"name": test_project}, 409), +] + + +@pytest.mark.parametrize("data,expected", add_project_data) +def test_add_project(client, app, data, expected): + # add TEMPLATES user and make him creator of test_project (to become template) + user = User(username='TEMPLATES', passwd='templates', is_admin=False, email='templates@mergin.com') + user.active = True + db.session.add(user) + template = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + template.creator = user + db.session.commit() + + resp = client.post('/v1/project/{}'.format(test_namespace), data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if expected == 200: + project = Project.query.filter_by(name=data['name'].strip(), namespace=test_namespace).first() + assert not any(x['checksum'] != y['checksum'] and x['path'] != y['path'] for x, y in zip(project.files, template.files)) + assert project.versions[0].user_agent is not None + shutil.rmtree(os.path.join(app.config['LOCAL_PROJECTS'], project.storage.project_dir)) # cleanup + + +def test_versioning(client): + # test if blank project has version set up to v0 + resp = client.post('/v1/project/{}'.format(test_namespace), data=json.dumps({"name": "version_test"}), headers=json_headers) + assert resp.status_code == 200 + project = Project.query.filter_by(name="version_test", namespace=test_namespace).first() + assert project.versions[0].name == "v0" + assert project.versions[0].project_size == 0 + + # testing if versions related to same project name and different namespace is not deleted + user = User(username='version', passwd='version', is_admin=True, email='version@mergin.com') + user.active = True + db.session.add(user) + db.session.commit() + client.get(url_for('auth.logout')) + client.post(url_for('auth.login'), data=json.dumps({'login': "version", 'password': 'version'}), + headers=json_headers) + + resp = client.post('/v1/project/{}'.format("version"), data=json.dumps({"name": "version_test"}), + headers=json_headers) + client.delete('/v1/project/{}/{}'.format("version", "version_test")) + assert resp.status_code == 200 + project = Project.query.filter_by(name="version_test", namespace=test_namespace).first() + assert len(project.versions) == 1 + + +def test_delete_project(client): + project = Project.query.filter_by(namespace=test_namespace, name=test_project).first() + creator_id = project.creator_id + files = project.files + changes = project.versions[0].changes + project_dir = project.storage.project_dir + assert os.path.exists(project_dir) + assert not RemovedProject.query.filter_by(namespace=test_namespace, name=test_project).count() + resp = client.delete('/v1/project/{}/{}'.format(test_namespace, test_project)) + assert resp.status_code == 200 + assert not Project.query.filter_by(namespace=test_namespace, name=test_project).count() + rp = RemovedProject.query.filter_by(namespace=test_namespace, name=test_project).first() + assert rp.properties["creator_id"] == creator_id + assert rp.properties["files"] == files + assert rp.properties["versions"][0]["changes"] == changes + assert os.path.exists(project_dir) # files not deleted yet, since there is possibility of restore + + # do permanent delete by removing backup + resp = client.delete(url_for('retire_removed_project', id=rp.id)) + assert resp.status_code == 204 + assert not RemovedProject.query.filter_by(namespace=test_namespace, name=test_project).count() + assert not os.path.exists(project_dir) + + +def test_restore_project(client): + project = Project.query.filter_by(namespace=test_namespace, name=test_project).first() + creator_id = project.creator_id + files = project.files + changes = project.versions[0].changes + project_dir = project.storage.project_dir + project_info = ProjectSchema(exclude=("permissions", "access", )).dump(project) + client.delete(f"/v1/project/{test_namespace}/{test_project}") + + # test listing + resp = client.get(url_for("paginate_removed_projects")) + assert resp.json["count"] == 1 + rp = resp.json["projects"][0] + resp = client.post(url_for("restore_project", id=rp["id"])) + assert resp.status_code == 201 + + restored_project = Project.query.filter_by(namespace=test_namespace, name=test_project).first() + assert restored_project.creator_id == creator_id + assert restored_project.files == files + assert restored_project.versions[0].changes == changes + assert restored_project.access.owners[0] == creator_id + assert os.path.exists(project_dir) + assert ProjectSchema(exclude=("permissions", "access", )).dump(restored_project) == project_info + + +test_project_data = [ + ({"storage_params": {"type": "local", "location": "some_test"}, "name": test_project}, 200), + ({"storage_params": {"type": "local", "location": 'foo'}, "name": 'bar'}, 404), +] + + +@pytest.mark.parametrize("data,expected", test_project_data) +def test_get_project(client, data, expected): + resp = client.get('/v1/project/{}/{}'.format(test_namespace, data["name"])) + assert resp.status_code == expected + if expected == 200: + resp_data = json.loads(resp.data) + assert test_project in resp_data["name"] + assert len(resp_data["access"]["owners"]) + owner = User.query.get(resp_data["access"]["owners"][0]) + assert resp_data["access"]["ownersnames"][0] == owner.username + + +test_history_data = [ + ('v9', {'basefile': {}, 'versions': ['v9']}), + ('v5', {'basefile': {}, 'versions': ['v9', 'v7', 'v6', 'v5']}), + ('v4', {'basefile': {'path': 'base.gpkg', 'version': 'v5'}, 'versions': ['v9', 'v7', 'v6', 'v5', 'v4']}), + ('v1', {'basefile': {'path': 'base.gpkg', 'version': 'v5'}, 'versions': ['v9', 'v7', 'v6', 'v5', 'v4', 'v3']}) # after remove we can't go any further in history +] + + +@pytest.mark.parametrize("version,expected", test_history_data) +def test_get_project_with_history(client, diff_project, version, expected): + resp = client.get('/v1/project/{}/{}?since={}'.format(test_namespace, test_project, version)) + assert resp.status_code == 200 + history = next(item['history'] for item in resp.json['files'] if item['path'] == 'test.gpkg') + assert set(expected['versions']) == set(history.keys()) + if expected['basefile']: + ver = expected['basefile']['version'] + assert history[ver]['path'] == expected['basefile']['path'] + assert 'diff' not in history[ver] + + +def test_get_project_at_version(client, diff_project): + resp = client.get(f'/v1/project/{test_namespace}/{test_project}') + latest_project = resp.json + version = 'v5' + resp2 = client.get(f'/v1/project/{test_namespace}/{test_project}?version={version}') + info = resp2.json + # check version non-specific data + for key in ['created', 'creator', 'uploads', 'name', 'namespace', 'access', 'permissions']: + assert info[key] == latest_project[key] + assert info['version'] == version + version_obj = next(v for v in diff_project.versions if v.name == version) + assert len(info['files']) == len(version_obj.files) + assert info['updated'] == version_obj.created.strftime('%Y-%m-%dT%H:%M:%S%zZ') + assert info['tags'] == ['valid_qgis', 'input_use'] + assert info['disk_usage'] == sum(f["size"] for f in version_obj.files) + + # compare with most recent version + version = 'v10' + resp3 = client.get(f'/v1/project/{test_namespace}/{test_project}?version={version}') + for key, value in latest_project.items(): + # skip updated column as that one would differ slightly due to delay between project and version object update + if key == 'updated' or key == 'access_requests' or 'latest_version': + continue + assert value == resp3.json[key] + + resp4 = client.get(f'/v1/project/{test_namespace}/{test_project}?version=v100') + assert resp4.status_code == 404 + + resp5 = client.get(f'/v1/project/{test_namespace}/{test_project}?version=v1&since=v1') + assert resp5.status_code == 400 + + +def test_update_project(client): + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + # need for private project + project.access.public = False + db.session.add(project) + # add some tester + test_user = User(username='tester', passwd='tester', is_admin=False, email='tester@mergin.com') + test_user.active = True + test_user.profile = UserProfile() + db.session.add(test_user) + db.session.commit() + + # add test user as reader to project + data = {"access": {"readers": project.access.readers + [test_user.id]}} + resp = client.put('/v1/project/{}/{}'.format(test_namespace, test_project), data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + assert test_user.id in project.access.readers + + # try to remove project creator from owners + data = {"access": {"owners": [test_user.id]}} + resp = client.put('/v1/project/{}/{}'.format(test_namespace, test_project), data=json.dumps(data), headers=json_headers) + assert resp.status_code == 400 + + +test_download_proj_data = [ + (test_project, None, 200, None), + (test_project, "zip", 200, None), + (test_project, "foo", 400, None), + ('bar', None, 404, None), + (test_project, None, 200, 'v1'), + (test_project, "zip", 200, 'v1'), + (test_project, "foo", 400, 'v1'), + ('bar', None, 404, 'v99'), + (test_project, None, 404, 'v100'), + (test_project, "zip", 404, 'v100'), + (test_project, "foo", 400, 'v100'), + ('bar', None, 404, 'v100') +] + + +@pytest.mark.parametrize("proj_name,out_format,expected,version", test_download_proj_data) +def test_download_project(client, proj_name, out_format, expected, version): + if out_format: + resp = client.get('/v1/project/download/{}/{}?{}format={}'.format( + test_namespace, proj_name, + 'version={}&'.format(version) if version else '', + out_format + )) + if expected == 200: + header = 'attachment; filename={}{}.zip'.format( + proj_name, '-' + version if version else '') + assert header in resp.headers[1][1] + else: + resp = client.get('/v1/project/download/{}/{}{}'.format( + test_namespace, proj_name, + '?version={}'.format(version) if version else '')) + if expected == 200: + assert 'multipart/form-data' in resp.headers[0][1] + + assert resp.status_code == expected + + +test_download_file_data = [ + (test_project, 'test.txt', 'text/plain', 200), + (test_project, 'logo.pdf', 'application/pdf', 200), + (test_project, 'logo.jpeg', 'image/jpeg', 200), + (test_project, 'base.gpkg', 'None', 200), + (test_project, 'json.json', 'text/plain', 200), + (test_project, 'foo.txt', None, 404), + ('bar', 'test.txt', None, 404) +] + + +@pytest.mark.parametrize("proj_name,file_path,mimetype,expected", test_download_file_data) +def test_download_file(client, proj_name, file_path, mimetype, expected): + resp = client.get('/v1/project/raw/{}/{}?file={}'.format(test_namespace, proj_name, file_path)) + assert resp.status_code == expected + if resp.status_code == 200: + assert resp.headers["content-type"] == mimetype + + +test_download_file_version_data = [ + (test_project, 'v8', 'base.gpkg', 404), # version does not has base.gpkg (but test.gpkg) + (test_project, 'v8', 'test.gpkg', 200), # version has test.gpkg because renamed but nothing uploaded + (test_project, 'v9', 'test.gpkg', 200), # actual changed happened (update with diff) + (test_project, 'v10', 'test.gpkg', 200), # again, file as not changed + (test_project, 'v1', 'test.txt', 200), # initial file (ordinary text file) + (test_project, 'v10', 'test.txt', 200), # unmodified file (ordinary text file) +] + + +@pytest.mark.parametrize("proj_name,version,file_path,expected", test_download_file_version_data) +def test_download_file_by_version(client, diff_project, proj_name, version, file_path, expected): + project = diff_project + + project_version = next((v for v in project.versions if v.name == version), None) + for file in project_version.files: + if not is_versioned_file(file['path']): + continue + + # let's delete the file, so it can be restored + if file['path'] == file_path: + file_location = os.path.join(project.storage.project_dir, file['location']) + os.remove(file_location) + + # download whole files, no diffs + resp = client.get('/v1/project/raw/{}/{}?file={}&version={}'.format(test_namespace, proj_name, file_path, version)) + assert resp.status_code == expected + +test_download_file_diffs_data = [ + (test_project, '', 'base.gpkg', 400), # no version specified + (test_project, 'v3', 'base.gpkg', 404), # upload + (test_project, 'v4', 'base.gpkg', 200), # update with diff + (test_project, 'v5', 'base.gpkg', 404), # forced update without diff + (test_project, 'v10', 'test.gpkg', 404), # nothing changed + (test_project, 'v1', 'test.txt', 404), # ordinary text file +] + +@pytest.mark.parametrize("proj_name,version,file_path,expected", test_download_file_diffs_data) +def test_download_file_version_diffs(client, diff_project, proj_name, version, file_path, expected): + # download only diffs + resp = client.get(f'/v1/project/raw/{test_namespace}/{proj_name}?file={file_path}&version={version}&diff=True') + assert resp.status_code == expected + + +def test_download_diff_file(client, diff_project): + test_file = 'base.gpkg' + # download version of file with force update (no diff) + resp = client.get('/v1/project/raw/{}/{}?file={}&diff=true&version=v5'.format(test_namespace, test_project, test_file)) + assert resp.status_code == 404 + + # updated with diff based on 'inserted_1_A.gpkg' + pv_2 = next((v for v in diff_project.versions if v.name == 'v4'), None) + file_meta = pv_2.changes['updated'][0] + resp = client.get('/v1/project/raw/{}/{}?file={}&diff=true&version=v4'.format(test_namespace, test_project, test_file)) + assert resp.status_code == 200 + # check we get the same file with diff that we created (uploaded) + downloaded_file = os.path.join(TMP_DIR, 'download' + str(uuid.uuid4())) + with open(downloaded_file, 'wb') as f: + f.write(resp.data) + assert file_meta['diff']['checksum'] == generate_checksum(downloaded_file) + patched_file = os.path.join(TMP_DIR, 'patched' + str(uuid.uuid4())) + geodiff = GeoDiff() + basefile = os.path.join(test_project_dir, test_file) + shutil.copy(basefile, patched_file) + geodiff.apply_changeset(patched_file, downloaded_file) + changes = os.path.join(TMP_DIR, 'changeset' + str(uuid.uuid4())) + geodiff.create_changeset(patched_file, os.path.join(test_project_dir, 'inserted_1_A.gpkg'), changes) + assert not geodiff.has_changes(changes) + + # download full version after file was removed + os.remove(os.path.join(diff_project.storage.project_dir, file_meta['location'])) + resp = client.get('/v1/project/raw/{}/{}?file={}&version=v4'.format(test_namespace, test_project, test_file)) + assert resp.status_code == 200 + + +def test_download_fail(app, client): + # remove project files to mimic mismatch with db + os.remove(os.path.join(app.config['LOCAL_PROJECTS'], test_namespace, test_project, 'v1', 'test.txt')) + resp = client.get('/v1/project/raw/{}/{}?file={}'.format(test_namespace, test_project, 'test.txt')) + assert resp.status_code == 404 + + shutil.rmtree(os.path.join(app.config['LOCAL_PROJECTS'], test_namespace, test_project)) + + resp = client.get('/v1/project/download/{}/{}'.format(test_namespace, test_project)) + assert resp.status_code == 404 + + resp = client.get('/v1/project/raw/{}/{}?file={}'.format(test_namespace, test_project, 'test.txt')) + assert resp.status_code == 404 + + p = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + db.session.delete(p) + db.session.commit() + resp = client.get('/v1/project/raw/{}/{}?file={}'.format(test_namespace, test_project, 'test.txt')) + assert resp.status_code == 404 + + +def _file_info(project_dir, path): + abs_path = os.path.join(project_dir, path) + f_size = os.path.getsize(abs_path) + return { + "path": path, + "checksum": generate_checksum(abs_path), + "size": f_size, + "mtime": datetime.fromtimestamp(os.path.getmtime(abs_path), tzlocal()), + "chunks": [str(uuid.uuid4()) for i in range(math.ceil(f_size / CHUNK_SIZE))], + } + + +def create_diff_meta(base, modified, project_dir): + """Create diff metadata for updating files.""" + geodiff = GeoDiff() + diff_id = str(uuid.uuid4()) + diff_name = base + '-diff-' + diff_id + basefile = os.path.join(project_dir, base) + modfile = os.path.join(project_dir, modified) + changeset = os.path.join(TMP_DIR, diff_name) + geodiff.create_changeset(basefile, modfile, changeset) + + diff_meta = { + **_file_info(project_dir, base), + "chunks": [str(uuid.uuid4()) for i in range(math.ceil(_file_info(TMP_DIR, diff_name)["size"] / CHUNK_SIZE))], + "diff": { + "path": diff_name, + "checksum": generate_checksum(changeset), + "size": os.path.getsize(changeset) + } + } + diff_meta["path"] = base + return diff_meta + + +def _get_changes(project_dir, diff=False): + changes = { + "added": [ + { + **_file_info(project_dir, "test_dir/test4.txt"), + "chunks": [str(uuid.uuid4()) for i in range(math.ceil(_file_info(project_dir, "test_dir/test4.txt")["size"] / CHUNK_SIZE))] + } + ], + "renamed": [ + { + **_file_info(project_dir, "test_dir/test2.txt"), + "new_path": "test_dir/renamed.txt", + } + ], + "updated": [ + { + **_file_info(project_dir, "test.txt"), + "chunks": [str(uuid.uuid4()) for i in + range(math.ceil(_file_info(project_dir, "test.txt")["size"] / CHUNK_SIZE))] + } + ], + "removed": [ + _file_info(project_dir, "test3.txt") + ] + } + return changes + + +def _get_changes_without_added(project_dir): + changes = _get_changes(project_dir) + changes["added"] = [] + return changes + + +def _get_changes_without_mtime(project_dir): + changes = _get_changes_without_added(project_dir) + del changes['updated'][0]['mtime'] + return changes + + +def _get_changes_with_broken_mtime(project_dir): + changes = _get_changes_without_added(project_dir) + changes["renamed"] = [] + changes["removed"] = [] + changes['updated'][0]['mtime'] = "frfr" + return changes + + +def _get_changes_with_diff(project_dir): + changes = _get_changes_without_added(project_dir) + # add some updates using diff file + diff_meta = create_diff_meta('base.gpkg', 'inserted_1_A.gpkg', project_dir) + changes['updated'].append(diff_meta) + return changes + +test_push_data = [ + ({'version': 'v1', 'changes': _get_changes_without_added(test_project_dir)}, 200), # success + ({'version': 'v1', 'changes': _get_changes_with_diff(test_project_dir)}, 200), # with diff, success + ({'version': 'v1', 'changes': _get_changes(test_project_dir)}, 400), # contains already uploaded file + ({'version': 'v0', 'changes': _get_changes_without_added(test_project_dir)}, 400), # version mismatch + ({'version': 'v1', 'changes': {}}, 400), # wrong changes format + ({'version': 'v1', 'changes': {'added': [], 'removed': [], 'updated': [], 'renamed': []}}, 400), # no changes requested + ({'version': 'v1', 'changes': {'added': [{'path': 'test.txt'}], 'removed': [], 'updated': [{'path': 'test.txt'}], 'renamed': []}}, 400), # inconsistent changes + ({'changes': _get_changes_without_added(test_project_dir)}, 400) # missing version (required parameter) +] + + +@pytest.mark.parametrize("data,expected", test_push_data) +def test_push_project_start(client, data, expected): + url = '/v1/project/push/{}/{}'.format(test_namespace, test_project) + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == expected + if expected == 200: + assert 'transaction' in resp.json.keys() + + +def test_push_to_new_project(client): + # create blank project + p = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + project = Project('blank', p.storage_params, p.creator, p.namespace, files=[]) + db.session.add(project) + pa = ProjectAccess(project, True) + db.session.add(pa) + db.session.commit() + + current_app.config['BLACKLIST'] = ["test4"] + url = '/v1/project/push/{}/{}'.format(test_namespace, 'blank') + data = {'version': 'v0', 'changes': _get_changes(test_project_dir)} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 200 + + upload_id = resp.json['transaction'] + upload = Upload.query.filter_by(id=upload_id).first() + blacklisted_file = all(added['path'] != 'test_dir/test4.txt' for added in upload.changes['added']) + assert blacklisted_file + + data = {'version': 'v1', 'changes': _get_changes(test_project_dir)} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 400 + + data = {'version': 'v100', 'changes': _get_changes(test_project_dir)} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 400 + assert resp.json['detail'] == 'First push should be with v0' + + +def test_sync_no_upload(client): + # test project sync that does not require data upload (e.g. renaming) + url = '/v1/project/push/{}/{}'.format(test_namespace, test_project) + changes = _get_changes(test_project_dir) + changes['added'] = changes['removed'] = changes['updated'] = [] + data = {'version': 'v1', 'changes': changes} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + renamed = next((i for i in project.files if i['path'] == 'test_dir/renamed.txt'), None) + assert resp.status_code == 200 + assert renamed is not None + assert not project.uploads.all() + + # check project version update after successful sync + resp_2 = client.get('/v1/project/version/{}/{}'.format(test_namespace, test_project)) + assert resp_2.status_code == 200 + assert len(resp_2.json) == 2 + assert resp_2.json[0]['name'] == 'v2' + assert resp_2.json[0]['author'] == 'mergin' + assert resp_2.json[0]['changes']['renamed'][0]['new_path'] == changes['renamed'][0]['new_path'] + + +def test_push_integrity_error(client, app): + app.config['LOCKFILE_EXPIRATION'] = 5 + url = '/v1/project/push/{}/{}'.format(test_namespace, test_project) + changes = _get_changes_without_added(test_project_dir) + data = {'version': 'v1', 'changes': changes} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 200 + + # try another request for transaction + resp2 = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp2.status_code == 400 + assert resp2.json['detail'] == 'Another process is running. Please try later.' + + # try immediate project sync without transaction (no upload) + changes['added'] = changes['removed'] = changes['updated'] = [] + data = {'version': 'v1', 'changes': changes} + resp3 = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp3.status_code == 400 + + time.sleep(5) + # try another request for transaction + resp4 = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp4.status_code == 200 + + +def test_exceed_data_limit(client): + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + user_disk_space = sum(p.disk_usage for p in project.creator.projects) + ns = Namespace.query.filter_by(name=project.creator.username).first() + # set basic storage that it is fully used + ns.storage = user_disk_space + db.session.add(ns) + db.session.commit() + + url = '/v1/project/push/{}/{}'.format(test_namespace, test_project) + changes = _get_changes(test_project_dir) + changes['renamed'] = changes['removed'] = changes['updated'] = [] + changes["added"][0]["path"] = "xxx.txt" + data = {'version': 'v1', 'changes': changes} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 400 + assert resp.json['detail'] == 'You have reached a data limit' + + # try to make some space only by removing file + changes["added"] = [] + changes["removed"] = [project.files[0]] + data = {'version': 'v1', 'changes': changes} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 200 + + # tight limit again + ns.storage = sum(p.disk_usage for p in project.creator.projects) + db.session.commit() + + changes['renamed'] = changes['removed'] = changes['updated'] = [] + changes['added'] = [{ + **_file_info(test_project_dir, "test_dir/test2.txt"), + "chunks": [str(uuid.uuid4()) for i in range(math.ceil(_file_info(test_project_dir, "test_dir/test2.txt")["size"] / CHUNK_SIZE))] + }] + changes["added"][0]["path"] = "xxx.txt" + data = {'version': 'v2', 'changes': changes} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 400 + assert resp.json['detail'] == 'You have reached a data limit' + + # try to upload while removing some other files, test4.txt being larger than test2.txt + changes["removed"] = [{**_file_info(test_project_dir, "test_dir/test4.txt")}] + data = {'version': 'v2', 'changes': changes} + resp = client.post(url, data=json.dumps(data, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + assert resp.status_code == 200 + + +def create_transaction(username, changes, version=1): + """Create transaction in mergin/test project for update to particular version by specified user.""" + user = User.query.filter_by(username=username).first() + project = Project.query.filter_by(name=test_project, namespace='mergin').first() + upload = Upload(project, version, changes, user.id) + db.session.add(upload) + db.session.commit() + upload_dir = os.path.join(upload.project.storage.project_dir, "tmp", upload.id) + os.makedirs(upload_dir) + open(os.path.join(upload_dir, 'lockfile'), 'w').close() + return upload, upload_dir + + +def remove_transaction(transaction_id): + """Remove transaction from db and related files, use to clean after upload failure.""" + upload = Upload.query.get(transaction_id) + upload_dir = os.path.join(upload.project.storage.project_dir, "tmp", transaction_id) + db.session.delete(upload) + db.session.commit() + shutil.rmtree(upload_dir, ignore_errors=True) + + +def test_chunk_upload(client, app): + changes = _get_changes(test_project_dir) + upload, upload_dir = create_transaction('mergin', changes) + chunk_id = upload.changes['added'][0]['chunks'][0] + url = '/v1/project/push/chunk/{}/{}'.format(upload.id, chunk_id) + with open(os.path.join(test_project_dir, 'test_dir', 'test4.txt'), 'rb') as file: + data = file.read(CHUNK_SIZE) + checksum = hashlib.sha1() + checksum.update(data) + headers = {"Content-Type": "application/octet-stream"} + resp = client.post(url, data=data, headers=headers) + assert resp.status_code == 200 + assert resp.json['checksum'] == checksum.hexdigest() + + # test to send bigger chunk than allowed + app.config['MAX_CHUNK_SIZE'] = 10 * CHUNK_SIZE + with open(os.path.join(test_project_dir, 'test_dir', 'test4.txt'), 'rb') as file: + data = file.read(11 * CHUNK_SIZE) + headers = {"Content-Type": "application/octet-stream"} + resp = client.post(url, data=data, headers=headers) + assert resp.status_code == 400 + assert resp.json['detail'] == 'Too big chunk' + + # test with transaction with no uploads expected + changes = _get_changes(test_project_dir) + changes['added'] = changes['removed'] = changes['updated'] = [] + upload.changes = changes + db.session.add(upload) + db.session.commit() + resp2 = client.post(url, data=data, headers=headers) + assert resp2.status_code == 404 + + # cleanup + shutil.rmtree(upload_dir) + + +def upload_chunks(upload_dir, changes): + """Mimic chunks for upload to finish were already uploaded.""" + os.makedirs(os.path.join(upload_dir, 'chunks')) + for f in changes['added'] + changes['updated']: + source = os.path.join(TMP_DIR, f["diff"]["path"]) if "diff" in f else os.path.join(test_project_dir, f["path"]) + with open(source, 'rb') as in_file: + for chunk in f["chunks"]: + with open(os.path.join(upload_dir, 'chunks', chunk), 'wb') as out_file: + out_file.write(in_file.read(CHUNK_SIZE)) + + +def test_push_finish(client): + changes = _get_changes(test_project_dir) + upload, upload_dir = create_transaction('mergin', changes) + url = '/v1/project/push/finish/{}'.format(upload.id) + + resp = client.post(url) + assert resp.status_code == 422 + assert 'corrupted_files' in resp.json['detail'].keys() + assert not os.path.exists(os.path.join(upload_dir, "files", "test.txt")) + + os.mkdir(os.path.join(upload.project.storage.project_dir, 'v2')) + # mimic chunks were uploaded + os.makedirs(os.path.join(upload_dir, 'chunks')) + for f in upload.changes['added'] + upload.changes['updated']: + with open(os.path.join(test_project_dir, f["path"]), 'rb') as in_file: + for chunk in f["chunks"]: + with open(os.path.join(upload_dir, 'chunks', chunk), 'wb') as out_file: + out_file.write(in_file.read(CHUNK_SIZE)) + + resp2 = client.post(url) + assert resp2.status_code == 200 + assert not os.path.exists(upload_dir) + assert upload.project.versions[0].user_agent is not None + + # test basic failures + resp3 = client.post('/v1/project/push/finish/not-existing') + assert resp3.status_code == 404 + + # create new user with permission to do uploads + user = User(username='tester', passwd='test', is_admin=False, email='tester@mergin.com') + user.active = True + db.session.add(user) + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + project.access.owners.append(user.id) + db.session.commit() + + upload, upload_dir = create_transaction(user.username, changes) + url = '/v1/project/push/finish/{}'.format(upload.id) + db.session.add(upload) + db.session.commit() + # still log in as mergin user + resp4 = client.post(url) + assert resp4.status_code == 403 + + +def test_push_close(client): + changes = _get_changes(test_project_dir) + upload, upload_dir = create_transaction('mergin', changes) + url = '/v1/project/push/cancel/{}'.format(upload.id) + resp = client.post(url) + assert resp.status_code == 200 + + +def test_whole_push_process(client): + """ Test whole transactional upload from start, through uploading chunks to finish. + Uploaded files contains also non-ascii chars to test. + """ + test_dir = os.path.join(TMP_DIR, 'test_uploaded_files') + if os.path.exists(test_dir): + shutil.rmtree(test_dir) + uploaded_files = ['テスト.txt', '£¥§.txt', 'name_1_1.txt', 'name\\1\\1.txt'] + # prepare dummy files + os.mkdir(test_dir) + for file in uploaded_files: + with open(os.path.join(test_dir, file), 'w') as f: + f.write("Hello Mergin") + + # push start: create upload transaction + changes = { + "added": [_file_info(test_dir, filename) for filename in uploaded_files], + "renamed": [], + "updated": [], + "removed": [] + } + resp = client.post(f'/v1/project/push/{test_namespace}/{test_project}', data=json.dumps({ + 'version': 'v1', + 'changes': changes + }, cls=DateTimeEncoder).encode("utf-8"), headers=json_headers) + + assert resp.status_code == 200 + assert 'transaction' in resp.json.keys() + upload = Upload.query.get(resp.json['transaction']) + assert upload + + # push upload: upload file chunks + for file in changes["added"]: + for chunk_id in file["chunks"]: + url = '/v1/project/push/chunk/{}/{}'.format(upload.id, chunk_id) + with open(os.path.join(test_dir, file['path']), 'rb') as f: + data = f.read(CHUNK_SIZE) + checksum = hashlib.sha1() + checksum.update(data) + resp = client.post(url, data=data, headers={"Content-Type": "application/octet-stream"}) + assert resp.status_code == 200 + assert resp.json['checksum'] == checksum.hexdigest() + + # push finish: call server to concatenate chunks and finish upload + resp = client.post(f'/v1/project/push/finish/{upload.id}') + assert resp.status_code == 200 + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + for file in project.files: + if file['path'] not in uploaded_files: + continue + file_location = os.path.join(project.storage.project_dir, file['location']) + file_before_upload = os.path.join(test_dir, file['path']) + assert os.path.exists(file_location) + assert open(file_before_upload, 'r').read() == open(file_location, 'r').read() + + +def test_push_diff_finish(client): + # success + changes = _get_changes_with_diff(test_project_dir) + upload, upload_dir = create_transaction('mergin', changes) + upload_chunks(upload_dir, upload.changes) + resp = client.post('/v1/project/push/finish/{}'.format(upload.id)) + assert resp.status_code == 200 + # check there are not any changes between local modified file and server patched file (using geodiff) + geodiff = GeoDiff() + changeset = os.path.join(TMP_DIR, 'test_changeset') + modfile = os.path.join(test_project_dir, 'inserted_1_A.gpkg') + patchedfile = os.path.join(upload.project.storage.project_dir, 'v2', 'base.gpkg') + geodiff.create_changeset(patchedfile, modfile, changeset) + assert not geodiff.has_changes(changeset) + os.remove(changeset) + + # try with valid update metadata but with conflicting diff (rebase was not done) + upload, upload_dir = create_transaction('mergin', changes, 2) + upload_chunks(upload_dir, upload.changes) + + resp = client.post('/v1/project/push/finish/{}'.format(upload.id)) + assert resp.status_code == 422 + assert 'base.gpkg error=project: mergin/test, geodiff error' in resp.json['detail'] + + +clone_project_data = [ + ({"project": " clone "}, 'mergin', 200), # clone own project + ({}, 'mergin', 409), # fail to clone own project into the same one + ({"namespace": "foo"}, 'foo', 200), # public project cloned by another user + ({"namespace": "foo"}, 'foo', 403), # fail to clone private project with no permissions granted +] + + +@pytest.mark.parametrize("data,username,expected", clone_project_data) +def test_clone_project(client, data, username, expected): + endpoint = '/v1/project/clone/{}/{}'.format(test_namespace, test_project) + # need for private project + if expected == 403: + p = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + p.access.public = False + db.session.add(p) + db.session.commit() + # add some user to test clone across namespaces + if username != 'mergin': + user = add_user(username, 'bar') + # switch default user + client.get(url_for('auth.logout')) + client.post(url_for('auth.login'), data=json.dumps({'login': user.username, 'password': 'bar'}), headers=json_headers) + + resp = client.post(endpoint, data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if expected == 200: + ns = data.get('namespace', test_namespace) + proj = data.get('project', test_project).strip() + template = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + project = Project.query.filter_by(name=proj, namespace=ns).first() + assert not any(x['checksum'] != y['checksum'] and x['path'] != y['path'] for x, y in zip(project.files, template.files)) + assert os.path.exists(os.path.join(project.storage.project_dir, project.files[0]['location'])) + assert not project.access.public + # cleanup + shutil.rmtree(project.storage.project_dir) + + +def test_optimize_storage(app, diff_project): + file_v2_base = os.path.join(diff_project.storage.project_dir, 'v4', 'base.gpkg') + file_v4_base = os.path.join(diff_project.storage.project_dir, 'v6', 'base.gpkg') + basefile_v1 = os.path.join(diff_project.storage.project_dir, 'v3', 'base.gpkg') + basefile_v3 = os.path.join(diff_project.storage.project_dir, 'v5', 'base.gpkg') + latest = os.path.join(diff_project.storage.project_dir, 'v9', 'test.gpkg') + + diff_project.storage.optimize_storage() + # nothing removed since created recently + assert os.path.exists(file_v2_base) and os.path.exists(file_v4_base) + + app.config['FILE_EXPIRATION'] = 0 + diff_project.storage.optimize_storage() + assert not (os.path.exists(file_v2_base) and os.path.exists(file_v4_base)) + # we keep latest file, basefiles must stay (either very first one, or any other with forced update) + assert os.path.exists(latest) and os.path.exists(basefile_v1) and os.path.exists(basefile_v3) + + # try again, nothing expected if files already removed + diff_project.storage.optimize_storage() + assert not os.path.exists(file_v2_base) + + +def test_version_file_restore(diff_project): + test_file = os.path.join(diff_project.storage.project_dir, 'v4', 'base.gpkg') + os.remove(test_file) + diff_project.storage.restore_versioned_file('base.gpkg', 'v4') + assert os.path.exists(test_file) + + # we can restore latest version (composed from multiple diffs) + test_file = os.path.join(diff_project.storage.project_dir, 'v9', 'test.gpkg') + os.remove(test_file) + diff_project.storage.restore_versioned_file('test.gpkg', 'v9') + assert os.path.exists(test_file) + + # basefile can not be restored + test_file = os.path.join(diff_project.storage.project_dir, 'v5', 'base.gpkg') + os.remove(test_file) + diff_project.storage.restore_versioned_file('base.gpkg', 'v5') + assert not os.path.exists(test_file) + + # no geodiff file can not be restored + test_file = os.path.join(diff_project.storage.project_dir, 'v1', 'test.txt') + os.remove(test_file) + diff_project.storage.restore_versioned_file('test.txt', 'v1') + assert not os.path.exists(test_file) + + +changeset_data = [ + ('v1', 'test.gpkg', 404), + ('v1', 'test.txt', 404), + ('v9', 'test.gpkg', 200), # diff update in v9 version + ('v10', 'test.gpkg', 404), # no change of the file in v10 version +] + + +@pytest.mark.parametrize("version, path, expected", changeset_data) +def test_changeset_file(client, diff_project, version, path, expected): + url = '/v1/resource/changesets/{}/{}/{}?path={}'.format( + test_namespace, test_project, version, path) + resp = client.get(url) + assert resp.status_code == expected + + if resp.status_code == 200: + version = ProjectVersion.query.filter_by(project_id=diff_project.id, name=version).first() + file = next((f for f in version.files if f['path'] == path), None) + changeset = os.path.join(version.project.storage.project_dir, file['diff']['location']) + json_file = 'changeset' + + # create manually list changes + version.project.storage.geodiff.list_changes( + changeset, json_file + ) + list_changes = json.loads(open(json_file, 'r').read()) + os.remove(json_file) + + # compare manually created with data from request + assert json.loads(resp.data) == list_changes['geodiff'] + + +def test_get_projects_by_uuids(client): + user = User.query.filter_by(username="mergin").first() + p1 = create_project('foo', test_namespace, user) + user2 = add_user('user2', 'ilovemergin') + p2 = create_project('foo', 'user2', user2) + uuids = ",".join([str(p1.id), str(p2.id), "1234"]) + + resp = client.get(f'/v1/project/by_uuids?uuids={uuids}') + assert resp.status_code == 200 + resp_data = [str(project["id"]) for project in json.loads(resp.data)] + assert str(p1.id) in resp_data # user has access to + assert str(p2.id) not in resp_data # belongs to user2, and user does not have access + assert "1234" not in resp_data # invalid id + + uuids = ",".join([str(uuid.uuid4()) for _ in range(0, 11)]) + resp = client.get(f'/v1/project/by_uuids?uuids={uuids}') + assert resp.status_code == 400 + + resp = client.get(f'/v1/project/by_uuids') + assert resp.status_code == 400 diff --git a/server/test/test_project_transfer_controller.py b/server/test/test_project_transfer_controller.py new file mode 100644 index 00000000..53062e9e --- /dev/null +++ b/server/test/test_project_transfer_controller.py @@ -0,0 +1,261 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import os +import shutil +import pytest +import json +from datetime import timedelta +from flask import current_app, url_for +from src import db +from src.auth.models import User +from src.models.db_models import ( + Project, + ProjectTransfer, + Namespace, + Upload, + ProjectAccess, + ProjectVersion, + AccessRequest) + +from . import test_project, test_namespace, json_headers, TMP_DIR, DEFAULT_USER, test_project_dir, TEST_ORG +from .utils import add_user, login, create_project + + +test_uploaded_file_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test_uploaded_files') +CHUNK_SIZE = 1024 + +add_project_transfer_data = [ + (DEFAULT_USER, test_namespace, test_project, {'namespace': 'user'}, 201), # success + (DEFAULT_USER, test_namespace, test_project, {'namespace': 'user'}, 409), # try to transfer project already on transfer + (DEFAULT_USER, test_namespace, test_project, {'namespace': test_namespace}, 400), # origin and destination namespaces are the same + (DEFAULT_USER, test_namespace, test_project, {'namespace': 'user2'}, 404), # destination namespace does not exist + (DEFAULT_USER, test_namespace, 'test_error', {'namespace': 'user'}, 404), # source project does not exist + (DEFAULT_USER, test_namespace, test_project, {'user': 'user'}, 400), # missing data['namespace'] + (('user', 'user'), test_namespace, test_project, {'namespace': 'user'}, 403), # try with unprivileged user +] + + +@pytest.mark.parametrize("user, namespace, project_name, data, expected", add_project_transfer_data) +def test_create_transfer_project(client, user, namespace, project_name, data, expected): + """ Test create project transfer from mergin namespace to different namespace """ + add_user('user', 'user') + login(client, user[0], user[1]) + url = '/v1/project/transfer/{}/{}'.format(namespace, project_name) + resp = client.post(url, data=json.dumps(data), headers=json_headers) + if expected == 409: + resp = client.post(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if resp.status_code == 201: + u = User.query.filter_by(username=user[0]).first() + project = Project.query.filter_by(name=project_name, namespace=namespace).first() + transfer = ProjectTransfer.query.first() + assert transfer.project_id == project.id + assert transfer.from_ns_name == namespace + assert transfer.to_ns_name == data['namespace'] + assert transfer.requested_by == u.id + + +@pytest.fixture(scope='function') +def project_transfer_init(diff_project): + """ Creates testing fixture of ProjectTransfer of mergin/test with history (diff_project) to user namespace """ + user = add_user('user', 'user') + add_user('user2', 'user2') # dummy user to permission checks + namespace = Namespace.query.filter_by(name='user').first() + mergin_user = User.query.filter_by(username='mergin').first() + project_transfer = ProjectTransfer(diff_project, namespace, mergin_user.id) + db.session.add(project_transfer) + return project_transfer + + +test_get_transfer_project_data = [ + (('user', 'user'), 'user', 'incoming', 1, 200), # user has 1 transfer request (target) + (DEFAULT_USER, 'mergin', 'outcoming', 1, 200), # mergin has 1 transfer request (creator) + (('user2', 'user2'), 'user2', 'incoming', 0, 200), # user2 doesn't have any transfer request + (('user2', 'user2'), 'user', 'incoming', 0, 403), # user2 can not check others users transfers +] + + +@pytest.mark.parametrize("user, namespace, direction, count, expected", test_get_transfer_project_data) +def test_get_transfer_project(client, project_transfer_init, user, namespace, direction, count, expected): + """ Test list of existing project transfer """ + login(client, user[0], user[1]) + resp = client.get(f'/v1/project/transfer/{namespace}', headers=json_headers) + assert resp.status_code == expected + if expected == 200: + assert len(resp.json) == count + if count: + key = 'to_ns_name' if direction == 'incoming' else 'from_ns_name' + assert resp.json[0][key] == namespace + + +test_delete_transfer_project_data = [ + (None, DEFAULT_USER, 200), # success for creator + (None, ('user', 'user'), 200), # accepting user can also remove a transfer + (None, ('user2', 'user2'), 403), # unrelated user can not remove transfer + (1, DEFAULT_USER, 404) # the id of transfer project does not exist +] + + +@pytest.mark.parametrize("id, user, expected", test_delete_transfer_project_data) +def test_delete_transfer_project(client, project_transfer_init, id, user, expected): + """ Testing delete project transfer for mergin for different namespace """ + login(client, user[0], user[1]) + id = id if id else project_transfer_init.id + resp = client.delete(f'/v1/project/transfer/{id}', headers=json_headers) + assert resp.status_code == expected + if resp.status_code == 200: + assert not ProjectTransfer.query.count() + + +test_execute_transfer_project_data = [ + (None, ('user2', 'user2'), 'Test', True, 403), # unprivileged user + (None, DEFAULT_USER, 'Test', True, 403), # creator of transfer can not accept it + (1, ('user', 'user'), 'Test', True, 404), # transfer project does not exist + (None, ('user', 'user'), 'Test', True, 200), # transfer project with using original name + (None, ('user', 'user'), 'Test', False, 200), # transfer project with using original name and no permissions transferred + (None, ('user', 'user'), 'My Test', True, 200), # transfer project with using new name +] + + +@pytest.mark.parametrize("id, user, project_name, transfer_permission, expected", test_execute_transfer_project_data) +def test_execute_transfer_project_to_user(client, project_transfer_init, id, user, project_name, transfer_permission, expected): + """ Test acceptance of project transfer """ + # cleanup after previous tests + transferred_data = os.path.join(current_app.config['LOCAL_PROJECTS'], 'user', project_name) + if os.path.exists(transferred_data): + shutil.rmtree(transferred_data) + + id = id if id else project_transfer_init.id + # keep information before original project gets deleted + original_project = Project.query.filter_by(namespace=test_namespace, name=test_project).first() + original_proj_id = original_project.id + original_owner_id = original_project.creator.id + original_data_dir = original_project.storage.project_dir + + login(client, user[0], user[1]) + + # test if all project access requests are deleted during project transfer + if expected == 200: + url = url_for('create_project_access_request', namespace=test_namespace, project_name=test_project) + resp = client.post(url, headers=json_headers) + assert resp.status_code == 200 + access_request = AccessRequest.query.filter(AccessRequest.namespace == test_namespace, + AccessRequest.project_id == original_project.id).first() + assert access_request + + data = { + 'name': project_name, + 'transfer_permissions': transfer_permission + } + resp = client.post(f'/v1/project/transfer/{id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == expected + if resp.status_code == 200: + transferred_project = Project.query.filter_by(namespace=user[0], name=project_name).first() + assert transferred_project + assert transferred_project.creator.username == user[0] + # check original project has been modified (changed namespace) and files on disk were not touched + assert os.path.exists(original_data_dir) + assert Project.query.filter_by(id=original_proj_id).first().id == transferred_project.id + assert not Project.query.filter_by(namespace=test_namespace, name=test_project).first() + assert not AccessRequest.query.filter(AccessRequest.namespace == test_namespace, + AccessRequest.project_id == original_proj_id).first() + + # check the permission transfer + for key in ['owners', 'readers', 'writers']: + attr = getattr(transferred_project.access, key) + if transfer_permission: + assert len(attr) == 2 and original_owner_id in attr + else: + assert len(attr) == 1 and attr[0] == transferred_project.creator.id + + +def test_transfer_failures(client, project_transfer_init): + """ Test failing scenarios for accepting project transfer """ + # cleanup after previous tests + proj_data = os.path.join(current_app.config['LOCAL_PROJECTS'], 'user', 'foo') + if os.path.exists(proj_data): + shutil.rmtree(proj_data) + + login(client, 'user', 'user') + data = {'name': 'Test', 'transfer_permissions': True} + user = User.query.filter_by(username='user').first() + + # decrease limit + ns = Namespace.query.filter_by(name=user.username).first() + ns.storage = 0 + db.session.add(ns) + db.session.commit() + resp = client.post(f'/v1/project/transfer/{project_transfer_init.id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 400 + assert resp.json['detail'] == 'Disk quota reached' + + # create upload for project in transfer + ns.storage = 100000 + db.session.add(ns) + upload = Upload( + project_transfer_init.project, + 10, + {}, + project_transfer_init.project.creator_id + ) + db.session.add(upload) + db.session.commit() + resp = client.post(f'/v1/project/transfer/{project_transfer_init.id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 400 + assert 'There is ongoing upload' in resp.json['detail'] + + # make transfer expired + db.session.delete(upload) + project_transfer_init.expire = project_transfer_init.expire - timedelta(seconds=client.application.config['TRANSFER_EXPIRATION']) + db.session.add(project_transfer_init) + db.session.commit() + resp = client.post(f'/v1/project/transfer/{project_transfer_init.id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 400 + assert resp.json['detail'] == 'The request is already expired' + + project_transfer_init.expire = project_transfer_init.expire + timedelta(seconds=client.application.config['TRANSFER_EXPIRATION']) + db.session.add(project_transfer_init) + db.session.commit() + + # create conflict project + create_project('foo', 'user', user) + data = {'name': 'foo', 'transfer_permissions': True} + resp = client.post(f'/v1/project/transfer/{project_transfer_init.id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 409 + assert resp.json['detail'] == "Project user/foo already exists" + + +def test_transfer_to_org(client, test_organisation): + """ Test project transfer from user namespace to organisation (with him as owner) """ + # clean up + proj_data = os.path.join(current_app.config['LOCAL_PROJECTS'], TEST_ORG, test_project) + if os.path.exists(proj_data): + shutil.rmtree(proj_data) + + namespace = Namespace.query.filter_by(name=TEST_ORG).first() + mergin_user = User.query.filter_by(username=DEFAULT_USER[0]).first() + proj = Project.query.filter_by(namespace=DEFAULT_USER[0], name=test_project).first() + project_transfer = ProjectTransfer(proj, namespace, mergin_user.id) + db.session.add(project_transfer) + + org_reader = add_user('reader', 'reader') + test_organisation.readers.append(org_reader.id) + db.session.add(test_organisation) + db.session.commit() + + login(client, 'reader', 'reader') + data = {'name': test_project, 'transfer_permissions': True} + resp = client.post(f'/v1/project/transfer/{project_transfer.id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 403 + + login(client, DEFAULT_USER[0], DEFAULT_USER[1]) + resp = client.post(f'/v1/project/transfer/{project_transfer.id}', data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + transferred_project = Project.query.filter_by(namespace=TEST_ORG, name=test_project).first() + assert transferred_project + assert transferred_project.creator.username == DEFAULT_USER[0] + # mergin was owner even of original project + for key in ['owners', 'readers', 'writers']: + attr = getattr(transferred_project.access, key) + assert len(attr) == 1 and attr[0] == transferred_project.creator.id diff --git a/server/test/test_projects/test/base.gpkg b/server/test/test_projects/test/base.gpkg new file mode 100644 index 00000000..a152f6d4 Binary files /dev/null and b/server/test/test_projects/test/base.gpkg differ diff --git a/server/test/test_projects/test/inserted_1_A.gpkg b/server/test/test_projects/test/inserted_1_A.gpkg new file mode 100644 index 00000000..7b23f56d Binary files /dev/null and b/server/test/test_projects/test/inserted_1_A.gpkg differ diff --git a/server/test/test_projects/test/inserted_1_B.gpkg b/server/test/test_projects/test/inserted_1_B.gpkg new file mode 100644 index 00000000..08e0a743 Binary files /dev/null and b/server/test/test_projects/test/inserted_1_B.gpkg differ diff --git a/server/test/test_projects/test/json.json b/server/test/test_projects/test/json.json new file mode 100644 index 00000000..d5ca56d1 --- /dev/null +++ b/server/test/test_projects/test/json.json @@ -0,0 +1,22 @@ +{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +} \ No newline at end of file diff --git a/server/test/test_projects/test/logo.jpeg b/server/test/test_projects/test/logo.jpeg new file mode 100644 index 00000000..03effbef Binary files /dev/null and b/server/test/test_projects/test/logo.jpeg differ diff --git a/server/test/test_projects/test/logo.pdf b/server/test/test_projects/test/logo.pdf new file mode 100644 index 00000000..bdd80949 Binary files /dev/null and b/server/test/test_projects/test/logo.pdf differ diff --git a/server/test/test_projects/test/modified_1_geom.gpkg b/server/test/test_projects/test/modified_1_geom.gpkg new file mode 100644 index 00000000..705d8cc4 Binary files /dev/null and b/server/test/test_projects/test/modified_1_geom.gpkg differ diff --git a/server/test/test_projects/test/test.qgs b/server/test/test_projects/test/test.qgs new file mode 100644 index 00000000..e69de29b diff --git a/server/test/test_projects/test/test.txt b/server/test/test_projects/test/test.txt new file mode 100644 index 00000000..fe1b5438 --- /dev/null +++ b/server/test/test_projects/test/test.txt @@ -0,0 +1 @@ +Some content. \ No newline at end of file diff --git a/server/test/test_projects/test/test3.txt b/server/test/test_projects/test/test3.txt new file mode 100644 index 00000000..fe1b5438 --- /dev/null +++ b/server/test/test_projects/test/test3.txt @@ -0,0 +1 @@ +Some content. \ No newline at end of file diff --git a/server/test/test_projects/test/test_dir/test2.txt b/server/test/test_projects/test/test_dir/test2.txt new file mode 100644 index 00000000..ed82dfec --- /dev/null +++ b/server/test/test_projects/test/test_dir/test2.txt @@ -0,0 +1 @@ +To be renamed. \ No newline at end of file diff --git a/server/test/test_projects/test/test_dir/test4.txt b/server/test/test_projects/test/test_dir/test4.txt new file mode 100644 index 00000000..b870d87a --- /dev/null +++ b/server/test/test_projects/test/test_dir/test4.txt @@ -0,0 +1,604 @@ + + + + + + + + + + +proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 +x_0=400000 +y_0=-100000 +ellps=airy +towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 +units=m +no_defs + 2437 + 27700 + EPSG:27700 + OSGB 1936 / British National Grid + tmerc + airy + false + + + + + + + + + + + + test_cb445cf1_520a_45a2_a417_47db296d5715 + polygon_77fbe60b_69aa_4d89_acf0_e6bf94188aeb + + + + + + + + + + + meters + + 318174.66713497589807957 + 485002.82415033015422523 + 319096.42773780913557857 + 486714.66526987764518708 + + 0 + + + +proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 +x_0=400000 +y_0=-100000 +ellps=airy +towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 +units=m +no_defs + 2437 + 27700 + EPSG:27700 + OSGB 1936 / British National Grid + tmerc + airy + false + + + 0 + + + + + + + + + + + + + + + + + 318018.93311925540911034 + 485586.37552858324488625 + 318769.45369502302492037 + 486169.70992282393854111 + + polygon_77fbe60b_69aa_4d89_acf0_e6bf94188aeb + ./polygon.shp + + + + polygon + + + +proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 +x_0=400000 +y_0=-100000 +ellps=airy +towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 +units=m +no_defs + 2437 + 27700 + EPSG:27700 + OSGB 1936 / British National Grid + tmerc + airy + false + + + + + + + + + + + + + + + + 0 + 0 + + + + + true + + + + + ogr + + + + + + + + + + + 1 + 1 + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + 0 + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + + + 0 + generatedlayout + + + + + + + + + 318256.99170382093871012 + 485555.48242982279043645 + 318389.65030438033863902 + 485659.06517272535711527 + + test_cb445cf1_520a_45a2_a417_47db296d5715 + ./test.shp + + + + test + + + +proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 +x_0=400000 +y_0=-100000 +ellps=airy +towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 +units=m +no_defs + 2437 + 27700 + EPSG:27700 + OSGB 1936 / British National Grid + tmerc + airy + false + + + + + + + + + + + + + + + + + + + + + + + + + 0 + 0 + + + + + true + + + + + + + + + + + + + ogr + + + + + + + + + + + 1 + 1 + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + 0 + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + + + 0 + generatedlayout + + + + + + + + + + + + + + "id" || ' ' || "Desc" || ' surevyed on ' || "Date" + + + + + + + + + + + + + + 1 + true + + + 255 + 255 + 255 + 255 + 0 + 255 + 255 + + + + + + false + + + + + + WGS84 + + + m2 + meters + + + 50 + 16 + 30 + true + false + 0 + false + false + true + 0 + + + false + + + true + 2 + MU + + + 1 + + + + + + + + + + + None + false + + + + + + conditions unknown + 90 + + + + + 8 + false + false + + false + + + false + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/server/test/test_user.py b/server/test/test_user.py new file mode 100644 index 00000000..6948920c --- /dev/null +++ b/server/test/test_user.py @@ -0,0 +1,50 @@ +import json +from unittest.mock import patch + +from src.auth.models import User, UserProfile +from src.models.db_models import Project +from src import db +from src.celery import send_email_async +from . import test_project, test_namespace, json_headers + + +@patch('src.celery.send_email_async.apply_async') +def test_mail_notifications(send_email_mock, client): + project = Project.query.filter_by(name=test_project, namespace=test_namespace).first() + # need for private project + project.access.public = False + db.session.add(project) + # add some tester + test_user = User(username='tester', passwd='tester', is_admin=False, email='tester@mergin.com') + test_user.verified_email = True + test_user.profile = UserProfile() + db.session.add(test_user) + test_user2 = User(username='tester2', passwd='tester2', is_admin=False, email='tester2@mergin.com') + test_user2.active = True + test_user2.verified_email = True + test_user2.profile = UserProfile() + db.session.add(test_user2) + db.session.commit() + + # add test user as reader to project + data = {"access": {"readers": project.access.readers + [test_user.id]}} + resp = client.put('/v1/project/{}/{}'.format(test_namespace, test_project), data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + assert test_user.id in project.access.readers + call_args, _ = send_email_mock.call_args + _, email_data = call_args + assert test_user.email in email_data['recipients'] + + # disable notifications for test_user, and promote test_user and test_user2 to writers + user_profile = UserProfile.query.filter_by(user_id=test_user.id).first() + user_profile.receive_notifications = False + data = {"access": {"writers": project.access.readers + [test_user2.id]}} + resp = client.put('/v1/project/{}/{}'.format(test_namespace, test_project), data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + assert test_user.id in project.access.writers + assert test_user2.id in project.access.writers + call_args, _ = send_email_mock.call_args + _, email_data = call_args + # only test_user2 receives notification + assert test_user.email not in email_data['recipients'] + assert test_user2.email in email_data['recipients'] diff --git a/server/test/test_utils.py b/server/test/test_utils.py new file mode 100644 index 00000000..f9386968 --- /dev/null +++ b/server/test/test_utils.py @@ -0,0 +1,52 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. +import json +import os +import pytest +from flask import url_for, current_app +from sqlalchemy import desc + +from src.auth.models import LoginHistory +from . import json_headers +from .utils import login + + +@pytest.fixture(scope='function') +def client(app): + client = app.test_client() + return client + + +def test_maintenance_mode(client): + main_file = current_app.config['MAINTENANCE_FILE'] + try: + # create maintenance file + file = open(main_file, 'w+') + login(client, "mergin", "ilovemergin") + + login_history = LoginHistory.query.filter_by(username='mergin').order_by( + desc(LoginHistory.timestamp)).first() + # no login history was created because server is in maintenance mode + assert not login_history + + resp = client.post('/v1/project/{}'.format('mergin'), data=json.dumps({"name": ' foo '}), headers=json_headers) + assert resp.status_code == 503 + + resp = client.get(url_for('auth.logout')) + assert resp.status_code == 200 + + # delete maintenance file + os.remove(main_file) + + login(client, "mergin", "ilovemergin") + login_history = LoginHistory.query.filter_by(username='mergin').order_by( + desc(LoginHistory.timestamp)).first() + assert login_history + + resp = client.post('/v1/project/{}'.format('mergin'), data=json.dumps({"name": ' foo '}), headers=json_headers) + assert resp.status_code == 200 + finally: + # delete maintenance file if test failed + if os.path.isfile(main_file): + os.remove(main_file) + diff --git a/server/test/test_web.py b/server/test/test_web.py new file mode 100644 index 00000000..19d7ab2f --- /dev/null +++ b/server/test/test_web.py @@ -0,0 +1,97 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import json +from flask import url_for +from unittest.mock import patch + +from src.models.db_models import AccessRequest, Project +from src import db +from src.auth.models import User +from .utils import login, create_project, add_user +from . import json_headers + + +@patch('src.celery.send_email_async.apply_async') +def test_email_notifications(send_email_mock, client): + url = url_for('send_email_notification') + users = User.query.all() + for user in users: + user.verified_email = True + db.session.add(user) + db.session.commit() + data = { + 'users': [u.username for u in users], + 'subject': 'Test', + 'message': '

Greeting,

this is a test message

' + } + resp = client.post(url, data=json.dumps(data), headers=json_headers) + assert resp.status_code == 200 + call_args, _ = send_email_mock.call_args + _, email_data = call_args + assert data['subject'] == email_data['subject'] + assert data['message'] == email_data['html'] + assert [u.email for u in users] == email_data['bcc'] + + +def test_project_access_request(client): + user = User.query.filter(User.username == 'mergin').first() + p = create_project("testx", "mergin", user) + + user2 = add_user("test_user", "ilovemergin") + login(client, 'test_user', 'ilovemergin') + + url = url_for('create_project_access_request', namespace="mergin", project_name="testx") + + user2.active = False + db.session.commit() + + # inactive user + resp = client.post(url, headers=json_headers) + assert resp.status_code == 409 + + user2.active = True + db.session.commit() + + resp = client.post(url, headers=json_headers) + access_request = AccessRequest.query.filter(AccessRequest.namespace == "mergin", AccessRequest.project_id == p.id).first() + assert resp.status_code == 200 + assert access_request.user.username == "test_user" + + # already exists + resp = client.post(url, headers=json_headers) + assert resp.status_code == 409 + + url2 = url_for('get_project_access_requests') + resp = client.get(url2, headers=json_headers) + assert resp.status_code == 200 + resp_data = json.loads(resp.data) + assert resp_data[0]['user']["username"] == "test_user" + + url2 = url_for('delete_project_access_request', request_id=access_request.id) + resp = client.delete(url2, headers=json_headers) + assert resp.status_code == 200 + access_request = AccessRequest.query.filter(AccessRequest.namespace == "mergin", AccessRequest.project_id == p.id).first() + assert access_request is None + + resp = client.post(url, headers=json_headers) + assert resp.status_code == 200 + access_request = AccessRequest.query.filter(AccessRequest.namespace == "mergin", + AccessRequest.project_id == p.id).first() + assert access_request.user.username == "test_user" + + + url = url_for('accept_project_access_request', request_id=access_request.id) + data = {"permissions": "write"} + resp = client.post(url, headers=json_headers, data=json.dumps(data)) + assert resp.status_code == 403 + + login(client, 'mergin', 'ilovemergin') + resp = client.post(url, headers=json_headers, data=json.dumps(data)) + assert resp.status_code == 200 + access_request = AccessRequest.query.filter(AccessRequest.namespace == "mergin", AccessRequest.project_id == p.id).first() + assert access_request is None + project = Project.query.filter(Project.name == "testx", Project.namespace == "mergin").first() + assert user2.id in project.access.readers + assert user2.id in project.access.writers + assert user2.id not in project.access.owners diff --git a/server/test/test_webhook.py b/server/test/test_webhook.py new file mode 100644 index 00000000..2daa90da --- /dev/null +++ b/server/test/test_webhook.py @@ -0,0 +1,41 @@ +# Copyright (C) 2019 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import pytest +import responses +from blinker.base import NamedSignal +from src.webhooks import WebhookManager, Webhook + + +@pytest.fixture +def mocked_resp(): + with responses.RequestsMock() as resp: + yield resp + + +def test_webhook_manager(mocked_resp, caplog): + wm = WebhookManager() + wm.register_signal('test') + assert 'test' in wm.signals + assert isinstance(wm.signals['test'], NamedSignal) + + wm.connect_handler('test', 'handler') + assert not wm.signals['test'].receivers + handler = Webhook('Handler', 'unknown_url') + wm.connect_handler('test', handler) + assert wm.signals['test'].receivers + + wm.emit_signal('test', 'Sender', foo='bar') + assert caplog.record_tuples[0][2] == 'Invalid url unknown_url, webhook Handler from sender Sender failed: {\'foo\': \'bar\'}' + + wm.disconnect_handler('test', handler) + assert not wm.signals['test'].receivers + + mocked_resp.add(responses.POST, 'http://webhook-test.com', body='{}', status=200, content_type='application/json') + handler = Webhook('Handler', 'http://webhook-test.com') + wm.connect_handler('test', handler) + resp = wm.emit_signal('test', 'Sender', foo='bar')[0][1] + assert resp.ok + + wm.remove_signal('test') + assert not wm.signals diff --git a/server/test/utils.py b/server/test/utils.py new file mode 100644 index 00000000..820e0776 --- /dev/null +++ b/server/test/utils.py @@ -0,0 +1,182 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + +import json +import shutil +from datetime import datetime +from flask import url_for, current_app +import os +from sqlalchemy import or_ +from sqlalchemy.orm.attributes import flag_modified + +from src.auth.models import User, UserProfile +from src.mergin_utils import generate_location, generate_checksum +from src.models.db_models import Project, ProjectAccess, ProjectVersion, Namespace, ProjectTransfer, Account +from src.organisation.models import Organisation +from src import db +from . import json_headers, DEFAULT_USER, test_project, test_project_dir + + +def add_user(username, password, is_admin=False): + """ Helper function to create not-privileged user. + Associated user namespace is created with db hook. + + :param username: username + :type username: str + :param password: password + :type password: str + :param is_admin: whether user is mergin admin + :type is_admin: bool + :returns: User + """ + user = User(username=username, passwd=password, is_admin=is_admin, email=f"{username}@mergin.com") + user.active = True + user.verified_email = True + user.profile = UserProfile() + db.session.add(user) + db.session.commit() + return user + + +def login(client, username, password): + resp = client.post( + url_for('auth.login'), + data=json.dumps({'login': username, 'password': password}), + headers=json_headers + ) + assert resp.status_code == 200 + + +def create_project(name, namespace, user, **kwargs): + default_project = {"storage_params": {"type": "local", "location": generate_location()}, "name": name} + project_params = dict(default_project) + project_params['creator'] = user + project_params['namespace'] = namespace + + p = Project(**project_params, **kwargs) + p.updated = datetime.utcnow() + db.session.add(p) + + public = kwargs.get("public", False) + pa = ProjectAccess(p, public) + db.session.add(pa) + + changes = {"added": [], "renamed": [], "updated": [], "removed": []} + pv = ProjectVersion(p, 'v0', user.username, changes, p.files, '127.0.0.1') + p.versions.append(pv) + db.session.commit() + + os.makedirs(p.storage.project_dir, exist_ok=True) + return p + + +def cleanup(client, projects_dirs): + """ Clean up project files created at various test scenarios """ + for d in projects_dirs: + path = os.path.join(client.application.config['LOCAL_PROJECTS'], d) + if os.path.exists(path): + shutil.rmtree(path) + + +def login_as_admin(client): + login(client, 'mergin', 'ilovemergin') + + +def share_project(project, user): + project.access.owners.append(user.id) + project.access.writers.append(user.id) + project.access.readers.append(user.id) + flag_modified(project.access, "owners") + flag_modified(project.access, "writers") + flag_modified(project.access, "readers") + db.session.add(project) + db.session.commit() + + +def transfer_project(project, to_namespace): + project_transfer = ProjectTransfer(project, to_namespace, project.creator_id) + db.session.add(project_transfer) + db.session.commit() + return project_transfer + + +def get_shared_projects(user): + projects = Project.query.filter(Project.namespace != user.username).filter( + or_(Project.access.has(ProjectAccess.owners.contains([user.id])), + Project.access.has(ProjectAccess.writers.contains([user.id])), + Project.access.has(ProjectAccess.readers.contains([user.id]))) + ).all() + return projects + + +def create_organisation(name, owner): + org = Organisation(name=name, creator_id=owner.id) + db.session.add(org) + db.session.commit() + return org + + +class Response: + """ Simple mock of requests.response object. """ + def __init__(self, ok, json): + self.ok = ok + self._json = json + self.text = f'{json}' + + def json(self): + return self._json + + +class DateTimeEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, datetime): + return obj.isoformat() + + return super().default(obj) + + +def initialize(): + # clean up (in case of previous failures) + proj_dir = os.path.join(current_app.config['LOCAL_PROJECTS'], DEFAULT_USER[0]) + if os.path.exists(proj_dir): + shutil.rmtree(proj_dir) + + # add default user/super admin + user = add_user(DEFAULT_USER[0], DEFAULT_USER[1], is_admin=True) + + # add default test project for later use + project_params = { + "storage_params": {"type": "local", "location": os.path.join(DEFAULT_USER[0], test_project)}, + "name": test_project, + "creator": user, + "namespace": user.username + } + + p = Project(**project_params) + p.files = [] + for root, dirs, files in os.walk(test_project_dir, topdown=True): # pylint: disable=W0612 + for f in files: + abs_path = os.path.join(root, f) + p.files.append({ + 'path': abs_path.replace(test_project_dir, '').lstrip('/'), + 'location': os.path.join('v1', abs_path.replace(test_project_dir, '').lstrip('/')), + 'size': os.path.getsize(abs_path), + 'checksum': generate_checksum(abs_path), + 'mtime': datetime.fromtimestamp(os.path.getmtime(abs_path)) + }) + p.latest_version = "v1" + db.session.add(p) + + # add default project permissions + pa = ProjectAccess(p, True) + db.session.add(pa) + db.session.commit() + + changes = {"added": p.files, "renamed": [], "updated": [], "removed": []} + pv = ProjectVersion(p, 'v1', user.username, changes, p.files, '127.0.0.1') + db.session.add(pv) + db.session.commit() + + # mimic files were uploaded + shutil.copytree(os.path.join(current_app.config['TEST_DIR'], test_project), + os.path.join(proj_dir, test_project, 'v1')) diff --git a/web-app/.browserslistrc b/web-app/.browserslistrc new file mode 100644 index 00000000..9dee6464 --- /dev/null +++ b/web-app/.browserslistrc @@ -0,0 +1,3 @@ +> 1% +last 2 versions +not ie <= 8 diff --git a/web-app/.eslintrc.js b/web-app/.eslintrc.js new file mode 100644 index 00000000..6c7acb98 --- /dev/null +++ b/web-app/.eslintrc.js @@ -0,0 +1,18 @@ +module.exports = { + root: true, + env: { + node: true + }, + extends: [ + 'plugin:vue/essential', + '@vue/standard' + ], + rules: { + 'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off', + 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off', + 'no-multiple-empty-lines': ['warn', { max: 2 }] + }, + parserOptions: { + parser: 'babel-eslint' + } +} diff --git a/web-app/.gitignore b/web-app/.gitignore new file mode 100644 index 00000000..ea575e25 --- /dev/null +++ b/web-app/.gitignore @@ -0,0 +1,23 @@ +.DS_Store +node_modules +/dist +package-lock.json + +# local env files +.env.local +.env.*.local + +# Log files +npm-debug.log* +yarn-debug.log* +yarn-error.log* +*.log + +# Editor directories and files +.idea +.vscode +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw* diff --git a/web-app/babel.config.js b/web-app/babel.config.js new file mode 100644 index 00000000..ba179669 --- /dev/null +++ b/web-app/babel.config.js @@ -0,0 +1,5 @@ +module.exports = { + presets: [ + '@vue/app' + ] +} diff --git a/web-app/package.json b/web-app/package.json new file mode 100644 index 00000000..1246c586 --- /dev/null +++ b/web-app/package.json @@ -0,0 +1,68 @@ +{ + "name": "mergin", + "version": "0.1.0", + "private": true, + "scripts": { + "serve": "vue-cli-service serve", + "build": "vue-cli-service build", + "lint": "vue-cli-service lint --fix", + "i18n:report": "vue-cli-service i18n:report --src './src/admin/**/*.?(js|vue)' --locales './src/admin/locales/**/*.json'" + }, + "dependencies": { + "@gtm-support/vue2-gtm": "^1.0.0", + "@mdi/font": "^5.3.45", + "axios": "^0.21.1", + "axios-retry": "^3.1.2", + "core-js": "^3.6.5", + "country-region-data": "^1.6.0", + "crypto-js": "^3.1.9-1", + "date-fns": "^1.29.0", + "file-saver": "^2.0.5", + "lodash": "^4.17.15", + "material-icons": "^0.2.3", + "mdi-vue": "^1.6.3", + "node-sass": "^4.9.4", + "pdfjs-dist": "2.5.207", + "portal-vue": "^2.1.7", + "sass-loader": "^8.0.0", + "stylus-loader": "^3.0.2", + "tiptap": "^1.19.2", + "tiptap-extensions": "^1.19.2", + "vue": "^2.6.11", + "vue-country-region-select": "^2.0.14", + "vue-i18n": "^8.15.3", + "vue-material-design-icons": "^4.7.1", + "vue-meta": "^2.4.0", + "vue-pdf": "^4.1.0", + "vue-router": "^3.5.1", + "vue-tel-input": "~4.3.0", + "vue-wysiwyg": "^1.7.2", + "vuetify": "2.2.11", + "vuex": "^3.1.2" + }, + "devDependencies": { + "@vue/cli-plugin-babel": "^4.1.2", + "@vue/cli-plugin-e2e-cypress": "^4.1.2", + "@vue/cli-plugin-eslint": "^4.1.2", + "@vue/cli-plugin-unit-jest": "^4.1.2", + "@vue/cli-service": "^4.1.2", + "@vue/eslint-config-standard": "^5.0.1", + "@vue/test-utils": "1.0.0-beta.30", + "babel-core": "7.0.0-bridge.0", + "babel-eslint": "^10.0.3", + "babel-jest": "^24.9.0", + "eslint": "^6.8.0", + "eslint-config-vuetify": "^0.4.1", + "eslint-plugin-vue": "^6.1.2", + "@fortawesome/fontawesome-free": "^5.9.0", + "svg-inline-loader": "^0.8.0", + "vue-template-compiler": "^2.5.17", + "sass": "^1.24.3", + "vee-validate": "^3.2.2", + "vue-chartist": "^2.2.1", + "vue-cli-plugin-i18n": "^0.6.0", + "vue-cli-plugin-vuetify": "^2.0.3", + "vue-world-map": "^0.1.1", + "vuetify-loader": "^1.4.3" + } +} diff --git a/web-app/postcss.config.js b/web-app/postcss.config.js new file mode 100644 index 00000000..961986e2 --- /dev/null +++ b/web-app/postcss.config.js @@ -0,0 +1,5 @@ +module.exports = { + plugins: { + autoprefixer: {} + } +} diff --git a/web-app/public/favicon.ico b/web-app/public/favicon.ico new file mode 100644 index 00000000..5204da91 Binary files /dev/null and b/web-app/public/favicon.ico differ diff --git a/web-app/public/index.html b/web-app/public/index.html new file mode 100644 index 00000000..05bb1ebd --- /dev/null +++ b/web-app/public/index.html @@ -0,0 +1,19 @@ + + + + + + + + + Mergin + + + +
+ + + + diff --git a/web-app/public/index_main.html b/web-app/public/index_main.html new file mode 100644 index 00000000..fa2ca8ef --- /dev/null +++ b/web-app/public/index_main.html @@ -0,0 +1,20 @@ + + + + + + + + Mergin + + + +
+ + + + diff --git a/web-app/src/App.vue b/web-app/src/App.vue new file mode 100644 index 00000000..1becd266 --- /dev/null +++ b/web-app/src/App.vue @@ -0,0 +1,254 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + + + + diff --git a/web-app/src/admin/components/base/Card.vue b/web-app/src/admin/components/base/Card.vue new file mode 100644 index 00000000..16ec4919 --- /dev/null +++ b/web-app/src/admin/components/base/Card.vue @@ -0,0 +1,9 @@ + diff --git a/web-app/src/admin/components/base/Item.vue b/web-app/src/admin/components/base/Item.vue new file mode 100644 index 00000000..1d86be28 --- /dev/null +++ b/web-app/src/admin/components/base/Item.vue @@ -0,0 +1,77 @@ + + + diff --git a/web-app/src/admin/components/base/ItemGroup.vue b/web-app/src/admin/components/base/ItemGroup.vue new file mode 100644 index 00000000..10183154 --- /dev/null +++ b/web-app/src/admin/components/base/ItemGroup.vue @@ -0,0 +1,134 @@ + + + + + diff --git a/web-app/src/admin/components/base/ItemSubGroup.vue b/web-app/src/admin/components/base/ItemSubGroup.vue new file mode 100644 index 00000000..6feadc39 --- /dev/null +++ b/web-app/src/admin/components/base/ItemSubGroup.vue @@ -0,0 +1,25 @@ + + + diff --git a/web-app/src/admin/components/base/MaterialAlert.vue b/web-app/src/admin/components/base/MaterialAlert.vue new file mode 100644 index 00000000..694c372e --- /dev/null +++ b/web-app/src/admin/components/base/MaterialAlert.vue @@ -0,0 +1,60 @@ + + + + + diff --git a/web-app/src/admin/components/base/MaterialCard.vue b/web-app/src/admin/components/base/MaterialCard.vue new file mode 100644 index 00000000..8e7f8f6d --- /dev/null +++ b/web-app/src/admin/components/base/MaterialCard.vue @@ -0,0 +1,148 @@ + + + + + diff --git a/web-app/src/admin/components/base/MaterialChartCard.vue b/web-app/src/admin/components/base/MaterialChartCard.vue new file mode 100644 index 00000000..56344d25 --- /dev/null +++ b/web-app/src/admin/components/base/MaterialChartCard.vue @@ -0,0 +1,95 @@ + + + + + diff --git a/web-app/src/admin/components/base/MaterialSnackbar.vue b/web-app/src/admin/components/base/MaterialSnackbar.vue new file mode 100644 index 00000000..b5712ab9 --- /dev/null +++ b/web-app/src/admin/components/base/MaterialSnackbar.vue @@ -0,0 +1,71 @@ + + + + diff --git a/web-app/src/admin/components/base/MaterialStatsCard.vue b/web-app/src/admin/components/base/MaterialStatsCard.vue new file mode 100644 index 00000000..85c3f319 --- /dev/null +++ b/web-app/src/admin/components/base/MaterialStatsCard.vue @@ -0,0 +1,113 @@ + + + + + diff --git a/web-app/src/admin/components/base/MaterialTabs.vue b/web-app/src/admin/components/base/MaterialTabs.vue new file mode 100644 index 00000000..de7d7d93 --- /dev/null +++ b/web-app/src/admin/components/base/MaterialTabs.vue @@ -0,0 +1,43 @@ + + + + + diff --git a/web-app/src/admin/components/base/MaterialTestimony.vue b/web-app/src/admin/components/base/MaterialTestimony.vue new file mode 100644 index 00000000..6c0cc410 --- /dev/null +++ b/web-app/src/admin/components/base/MaterialTestimony.vue @@ -0,0 +1,76 @@ + + + + + diff --git a/web-app/src/admin/components/base/Subheading.vue b/web-app/src/admin/components/base/Subheading.vue new file mode 100644 index 00000000..34fcba8d --- /dev/null +++ b/web-app/src/admin/components/base/Subheading.vue @@ -0,0 +1,34 @@ + + + diff --git a/web-app/src/admin/components/base/VComponent.vue b/web-app/src/admin/components/base/VComponent.vue new file mode 100644 index 00000000..d3eafc56 --- /dev/null +++ b/web-app/src/admin/components/base/VComponent.vue @@ -0,0 +1,40 @@ + + + diff --git a/web-app/src/admin/i18n.js b/web-app/src/admin/i18n.js new file mode 100644 index 00000000..0abbb339 --- /dev/null +++ b/web-app/src/admin/i18n.js @@ -0,0 +1,19 @@ +import Vue from 'vue' +import VueI18n from 'vue-i18n' + +import en from 'vuetify/lib/locale/en' + +Vue.use(VueI18n) + +const messages = { + en: { + ...require('./locales/en.json'), + $vuetify: en + } +} + +export default new VueI18n({ + locale: process.env.VUE_APP_I18N_LOCALE || 'en', + fallbackLocale: process.env.VUE_APP_I18N_FALLBACK_LOCALE || 'en', + messages +}) diff --git a/web-app/src/admin/locales/en.json b/web-app/src/admin/locales/en.json new file mode 100644 index 00000000..2a8e7309 --- /dev/null +++ b/web-app/src/admin/locales/en.json @@ -0,0 +1,45 @@ +{ + "avatar": "Vuetify MD", + "buttons": "Buttons", + "calendar": "Calendar", + "charts": "Charts", + "components": "Components", + "ct": "CT", + "dashboard": "Dashboard", + "dtables": "Data Tables", + "eforms": "Extended Forms", + "error": "Error Page", + "etables": "Extended Tables", + "example": "Example", + "forms": "Forms", + "fullscreen": "Full Screen Map", + "google": "Google Maps", + "grid": "Grid System", + "icons": "Icons", + "lock": "Lock Screen Page", + "login": "Login Page", + "maps": "Maps", + "multi": "Multi Level Collapse", + "notifications": "Notifications", + "pages": "Pages", + "plan": "Choose Plan", + "pricing": "Pricing", + "my-profile": "My Profile", + "edit-profile": "Edit Profile", + "register": "Register Page", + "rforms": "Regular Forms", + "rtables": "Regular Tables", + "rtl": "RTL Support", + "search": "Search", + "settings": "Settings", + "tables": "Tables", + "tabs": "Tabs", + "tim": "Creative Tim", + "timeline": "Timeline", + "typography": "Typography", + "upgrade": "Upgrade To PRO", + "user": "User Profile", + "vforms": "Validation Forms", + "widgets": "Widgets", + "wizard": "Wizard" +} diff --git a/web-app/src/admin/mixins/CommonAPI.js b/web-app/src/admin/mixins/CommonAPI.js new file mode 100644 index 00000000..3a9e041d --- /dev/null +++ b/web-app/src/admin/mixins/CommonAPI.js @@ -0,0 +1,97 @@ +// Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +// Do not distribute without the express permission of the author. + +import { waitCursor } from '../../util' +import http from '@/http' + +export default { + methods: { + fetchUserProfileByName (username) { + waitCursor(true) + this.$http.get(`/auth/user_profile_by_name/${username}?random=${Math.random()}`) + .then(resp => { + this.$store.commit('admin/userAdminProfile', resp.data) + }) + .catch(() => this.$notification.error('Failed to fetch user profile')) + .finally(() => { + waitCursor(false) + }) + }, + closeAccount (accountId) { + waitCursor(true) + const promise = this.$http.delete(`/app/account/${accountId}`, { 'axios-retry': { retries: 5 } }) + Promise.resolve(promise).then(() => { + location.href = '/admin/accounts' + this.$router.push({ name: 'Accounts' }) + }).catch(err => { + const msg = (err.response && err.response.data.detail) ? err.response.data.detail : 'Unable to close account' + this.$notification.error(msg) + }).finally(() => { waitCursor(false) }) + }, + /** + * Fetch a list of removed projects based on pagination params + * @param params (Object) pagination parameters + * @return Result promise with either data or error + */ + async fetchRemovedProjects (params) { + let result = {} + try { + const resp = await http.get('/app/removed-project', { params }) + result = { ...resp.data, success: true } + } catch (e) { + result.success = false + result.message = e.response.data.detail || 'Failed to fetch list of removed projects' + } + return new Promise((resolve) => { resolve(result) }) + }, + /** + * Permanently remove project + * @param id (Int) removed project id + * @return Result promise + */ + async removeProject (id) { + const result = {} + try { + await this.$http.delete(`/app/removed-project/${id}`, { 'axios-retry': { retries: 5 } }) + result.success = true + } catch (e) { + result.success = false + result.message = e.response.data.detail || 'Unable to remove project' + } + return new Promise((resolve) => { resolve(result) }) + }, + /** + * Restore removed project + * @param id (Int) removed project id + * @return Result promise + */ + async restoreProject (id) { + const result = {} + try { + await this.$http.post(`/app/removed-project/restore/${id}`, null, { 'axios-retry': { retries: 5 } }) + result.success = true + } catch (e) { + result.success = false + result.message = e.response.data.detail || 'Unable to restore project' + } + return new Promise((resolve) => { resolve(result) }) + }, + /** + * Update account storage + * @param accountId (Int) edited account + * @param storage (Int) new storage + * @return Result promise + */ + async updateAccountStorage (accountId, storage) { + const result = {} + try { + await this.$http.post(`/app/account/change_storage/${accountId}`, { storage: storage }, { 'axios-retry': { retries: 5 } }) + result.success = true + } catch (e) { + result.success = false + result.message = e.response.data.detail || 'Unable to update storage' + } + return new Promise((resolve) => { resolve(result) }) + } + } +} diff --git a/web-app/src/admin/module.js b/web-app/src/admin/module.js new file mode 100644 index 00000000..4ba1a91f --- /dev/null +++ b/web-app/src/admin/module.js @@ -0,0 +1,16 @@ +// Copyright (C) 2021 Lutra Consulting Limited. All rights reserved. +import routes from './module/routes' +import store from './module/store' +import router from '@/router' +export default { + name: 'admin', + routes: routes, + store: store, + addRoutes: () => { + // add routes to router + routes.forEach(route => { + router.addRoute(route) + }) + } + +} diff --git a/web-app/src/admin/module/routes.js b/web-app/src/admin/module/routes.js new file mode 100644 index 00000000..3905e873 --- /dev/null +++ b/web-app/src/admin/module/routes.js @@ -0,0 +1,195 @@ +// Copyright (C) 2021 Lutra Consulting Limited. All rights reserved. +import store from '../../store' +import Index from '../views/dashboard/Index' +import Account from '../views/dashboard/pages/Account' +import AdminProfile from '../views/dashboard/pages/Profile' +import AdminOrganisationProfile from '../views/dashboard/pages/OrganisationProfile' +import Projects from '../views/dashboard/pages/Projects' +import Project from '@/views/Project' +import AppHeader from '@/components/AppHeader' +import FileBrowser from '@/views/FileBrowser' +import FileDetail from '@/views/FileDetail' +import ProjectSettings from '@/views/ProjectSettings' +import FileVersionDetail from '@/views/FileVersionDetail' +import VersionDetail from '@/views/VersionDetail' +import ProjectVersions from '@/views/ProjectVersions' +import { parseError } from '@/mergin' + + +import HTTP from '@/http' + + +export default [ + { + path: '/admin', + name: 'admin', + components: { + default: Index + }, + beforeEnter: (to, from, next) => { + if (store.state.app.user && store.state.app.user.is_admin) next() + else next('/') + }, + props: { + default: true + }, + children: [ + { + path: 'accounts', + name: 'accounts', + component: Account, + props: true + }, + { + path: 'accounts/:account_id', + name: 'account', + component: Account, + props: true, + beforeEnter: (to, from, next) => { + HTTP.get(`/app/accounts/${to.params.account_id}`) + .then(resp => { + if (resp.data.type === 'user') { + next({ name: 'admin-profile', params: { username: resp.data.name } }) + } else { + next({ name: 'admin-organisation', params: { username: resp.data.name } }) + } + }) + .catch(() => next(from)) + } + }, + { + path: 'user/:username', + name: 'admin-profile', + component: AdminProfile, + props: true, + beforeEnter: (to, from, next) => { + store.commit('admin/userAdminProfile', null) + store.dispatch('admin/fetchUserAdminProfile', to.params.username) + .then(next()) + .catch((e) => { next(Error(parseError(e, 'Failed to fetch user profile'))) }) + } + }, + { + path: 'organisation/:name', + name: 'admin-organisation', + component: AdminOrganisationProfile, + props: true, + beforeEnter: (to, from, next) => { + store.commit('organisation', null) + store.dispatch('setOrganisation', to.params.name) + .then(next()) + .catch((e) => { next(Error(parseError(e, 'Failed to get organisation'))) }) + } + }, + { + path: 'projects', + name: 'admin-projects', + component: Projects, + props: true, + children: [ + { + path: ':namespace', + name: 'admin-namespace-projects', + component: Projects, + props: true + } + ] + }, + { + path: 'projects/:namespace/:projectName', + name: 'admin-project', + components: { + default: Project, + header: AppHeader + }, + props: { + default: route => ({ + namespace: route.params.namespace, + projectName: route.params.projectName, + asAdmin: true + }) + }, + redirect: { name: 'admin-project-tree' }, + children: [ + { + path: 'blob/:location*', + name: 'admin-blob', + component: FileDetail, + props (route) { + return { + asAdmin: true, + namespace: route.params.namespace, + projectName: route.params.projectName, + location: route.params.location + } + } + }, + { + path: 'tree/:location*', + name: 'admin-project-tree', + component: FileBrowser, + props (route) { + return { + asAdmin: true, + namespace: route.params.namespace, + projectName: route.params.projectName, + location: route.params.location + } + } + }, + { + path: 'settings', + name: 'admin-project-settings', + component: ProjectSettings, + props (route) { + return { + asAdmin: true, + namespace: route.params.namespace, + projectName: route.params.projectName + } + } + }, + { + path: 'history', + name: 'admin-project-versions', + component: ProjectVersions, + props (route) { + return { + asAdmin: true, + namespace: route.params.namespace, + projectName: route.params.projectName + } + } + }, + { + path: 'history/:version_id', + name: 'admin-project-versions-detail', + component: VersionDetail, + props (route) { + return { + asAdmin: true, + namespace: route.params.namespace, + projectName: route.params.projectName, + version_id: route.params.version_id + } + } + }, + { + path: 'history/:version_id/:path', + name: 'admin-file-version-detail', + component: FileVersionDetail, + props (route) { + return { + asAdmin: true, + namespace: route.params.namespace, + projectName: route.params.projectName, + version_id: route.params.version_id, + path: route.params.path + } + } + } + ] + } + ] + } +] diff --git a/web-app/src/admin/module/store.js b/web-app/src/admin/module/store.js new file mode 100644 index 00000000..ce7819e5 --- /dev/null +++ b/web-app/src/admin/module/store.js @@ -0,0 +1,48 @@ +// Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +import http from '@/http' + +export default { + namespaced: true, + state: { + loading: false, + error: '', + accounts: { + items: [], + count: 0 + }, + userAdminProfile: null + }, + mutations: { + loading (state, value) { + state.loading = value + }, + error (state, message) { + state.error = message + }, + accounts (state, data) { + state.accounts.count = data.total + state.accounts.items = data.accounts + }, + userAdminProfile (state, userAdminProfile) { + state.userAdminProfile = userAdminProfile + } + }, + actions: { + fetchAccounts ({ commit }, { type, params }) { + commit('loading', true) + http.get(`/app/accounts/${type}`, { params }) + .then(resp => { + commit('accounts', resp.data) + commit('error', '') + }) + .catch((e) => { + commit('error', e.response.data.detail || e.message) + }) + .finally(() => commit('loading', false)) + }, + async fetchUserAdminProfile ({ commit }, username) { + const resp = await http.get(`/auth/user_profile_by_name/${username}?random=${Math.random()}`) + commit('userAdminProfile', resp.data) + } + } +} diff --git a/web-app/src/admin/plugins/base.js b/web-app/src/admin/plugins/base.js new file mode 100644 index 00000000..72aea18d --- /dev/null +++ b/web-app/src/admin/plugins/base.js @@ -0,0 +1,17 @@ +import Vue from 'vue' +import upperFirst from 'lodash/upperFirst' +import camelCase from 'lodash/camelCase' + +const requireComponent = require.context( + '@/admin/components/base', true, /\.vue$/ +) + +requireComponent.keys().forEach(fileName => { + const componentConfig = requireComponent(fileName) + + const componentName = upperFirst( + camelCase(fileName.replace(/^\.\//, '').replace(/\.\w+$/, '')) + ) + + Vue.component(`Base${componentName}`, componentConfig.default || componentConfig) +}) diff --git a/web-app/src/admin/plugins/chartist.js b/web-app/src/admin/plugins/chartist.js new file mode 100644 index 00000000..2c8bb2d9 --- /dev/null +++ b/web-app/src/admin/plugins/chartist.js @@ -0,0 +1,4 @@ +import Vue from 'vue' +import 'chartist/dist/chartist.min.css' + +Vue.use(require('vue-chartist')) diff --git a/web-app/src/admin/plugins/vee-validate.js b/web-app/src/admin/plugins/vee-validate.js new file mode 100644 index 00000000..555a404e --- /dev/null +++ b/web-app/src/admin/plugins/vee-validate.js @@ -0,0 +1,22 @@ +import Vue from 'vue' +import { + extend, + ValidationObserver, + ValidationProvider +} from 'vee-validate' +import { + email, + max, + min, + numeric, + required +} from 'vee-validate/dist/rules' + +extend('email', email) +extend('max', max) +extend('min', min) +extend('numeric', numeric) +extend('required', required) + +Vue.component('validation-provider', ValidationProvider) +Vue.component('validation-observer', ValidationObserver) diff --git a/web-app/src/admin/plugins/vuetify.js b/web-app/src/admin/plugins/vuetify.js new file mode 100644 index 00000000..10f60cbf --- /dev/null +++ b/web-app/src/admin/plugins/vuetify.js @@ -0,0 +1,26 @@ +import Vue from 'vue' +import Vuetify from 'vuetify/lib' +import i18n from '../i18n' +// import '../sass/overrides.sass' + +Vue.use(Vuetify) + +const theme = { + // primary: '#4CAF50', + primary: '#2d4470', + secondary: '#9C27b0', + accent: '#9C27b0', + info: '#00CAE3' +} + +export default new Vuetify({ + lang: { + t: (key, ...params) => i18n.t(key, params) + }, + theme: { + themes: { + dark: theme, + light: theme + } + } +}) diff --git a/web-app/src/admin/sass/overrides.sass b/web-app/src/admin/sass/overrides.sass new file mode 100644 index 00000000..8df80789 --- /dev/null +++ b/web-app/src/admin/sass/overrides.sass @@ -0,0 +1,49 @@ +// ========================================================= +// * Vuetify Material Dashboard - v2.1.0 +// ========================================================= +// +// * Product Page: https://www.creative-tim.com/product/vuetify-material-dashboard +// * Copyright 2019 Creative Tim (https://www.creative-tim.com) +// +// * Coded by Creative Tim +// +// ========================================================= +// +// * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +// Creative Tim refine style code +@import vuetify-material/sidebar +@import vuetify-material/appbar +@import vuetify-material/buttons +@import vuetify-material/pagination +@import vuetify-material/footer +@import vuetify-material/view +@import vuetify-material/settings +@import vuetify-material/card +@import vuetify-material/table +@import vuetify-material/tab +@import vuetify-material/notification +@import vuetify-material/modal +@import vuetify-material/map +@import vuetify-material/chip +@import 'variables.scss' + + +//.v-btn.v-size--default, +//.v-btn.v-size--large +// &:not(.v-btn--icon):not(.v-btn--fab) +// padding: 0 30px !important + +.theme--light.v-list-item .v-list-item__action-text, +.theme--light.v-list-item .v-list-item__subtitle + color: #999 + +.theme--light.v-text-field>.v-input__control>.v-input__slot:before + border-color: #d2d2d2 + +.v-label.v-label, +.v-alert.v-alert + font-size: $font-size-root + +.theme--light .v-content + background-color: #eee diff --git a/web-app/src/admin/sass/variables.scss b/web-app/src/admin/sass/variables.scss new file mode 100644 index 00000000..052be8cd --- /dev/null +++ b/web-app/src/admin/sass/variables.scss @@ -0,0 +1,34 @@ +$font-size-root: 12px; +$sheet-border-radius: 4px; +$list-item-title-font-size: 0.929rem; +$list-item-dense-title-font-size: 0.929rem; +$list-item-dense-title-font-weight: initial; +$fab-icon-sizes: ( small: 20 ); +$btn-font-sizes: ( default: 1rem, large: 1rem ); +$btn-sizes: ( default: 41, large: 54 ); +$btn-letter-spacing: 0; +$btn-font-weight: 400; +$card-text-font-size: 12px; + +$headings: ( + //'h1': ( + // 'size': 3.3125rem, + // 'line-height': 1.15em + //), + //'h2': ( + // 'size': 2.25rem, + // 'line-height': 1.5em + //), + //'h3': ( + // 'size': 1.5625rem, + // 'line-height': 1.4em + //), + //'h4': ( + // 'size': 1.125rem, + // 'line-height': 1.4em + //), + //'h5': ( 'size': 1.0625rem ), + //'h6': ( 'size': .75rem ), + 'subtitle-2': ( 'size': 1rem ), + 'overline': ( 'letter-spacing': 0 ) +); diff --git a/web-app/src/admin/sass/vuetify-material/_appbar.sass b/web-app/src/admin/sass/vuetify-material/_appbar.sass new file mode 100644 index 00000000..f94ad7c8 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_appbar.sass @@ -0,0 +1,54 @@ +#app-bar + .v-badge__badge + font-size: 9px + padding: 5px 6px + +// ----------------------- +.v-toolbar__content, +.v-toolbar__extension + padding: 0px 15px 0 31px + +.v-sheet + .v-toolbar__content + .v-btn.v-size--default:not(.v-btn--icon):not(.v-btn--fab), + .v-btn.v-size--large:not(.v-btn--icon):not(.v-btn--fab) + margin-bottom: 5px + padding: 10px 15px !important + .theme--light.v-btn:not(.v-btn--flat):not(.v-btn--text):not(.v-btn--outlined) + background-color: #fff + .v-icon + color: #999 + +.theme--light.v-btn:not(.v-btn--flat):not(.v-btn--text):not(.v-btn--outlined) + background-color: #fff + margin-right: 17px + margin-bottom: 2px + +.theme--light.v-btn:not(.v-btn--flat):not(.v-btn--text):not(.v-btn--outlined):hover + background-color: #fff + +.v-toolbar__content + height: 75px + +.v-toolbar__content .v-btn--flat + .v-icon + margin-right: 3px + +.theme--light.v-label + color: rgba(0, 0, 0, 0.3) + +.v-menu__content .v-list--nav + padding: .3125rem 0 + border-radius: 4px + .v-list-item + padding: 10px 20px + margin: 0 .3125rem + margin-bottom: 0px !important + min-height: 40px + border-radius: 2px + .v-list-item__title + font-weight: 400 + font-size: 13px + +.v-navigation-drawer .v-icon.v-icon + font-size: 24px diff --git a/web-app/src/admin/sass/vuetify-material/_buttons.sass b/web-app/src/admin/sass/vuetify-material/_buttons.sass new file mode 100644 index 00000000..ce29f477 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_buttons.sass @@ -0,0 +1,65 @@ +.v-btn.v-size--default + font-size: .85rem + +.v-icon.v-icon + font-size: 20px + +.v-btn__content .v-icon--left + margin-right: 4px + +.v-sheet button.v-btn.v-size--default:not(.v-btn--icon):not(.v-btn--fab) + padding: 12px 30px !important + +.theme--light.v-btn:not(.v-btn--flat):not(.v-btn--text):not(.v-btn--outlined) + background-color: #999 + &:hover + background-color: #999 + +.v-btn.white + .v-btn__content + color: #999 + +.v-sheet .v-btn.v-size--large:not(.v-btn--icon):not(.v-btn--fab) + padding: 18px 36px !important + +.v-btn--fab.v-size--small + height: 41px + width: 41px + +.v-btn:not(.v-btn--text):not(.v-btn--outlined):hover:before + opacity: 0 + +.v-btn:not(.v-btn--text):not(.v-btn--outlined):focus:before + opacity: 0 + +.v-btn.v-size--default:not(.v-btn--icon):not(.v-btn--fab), +.v-btn.v-size--large:not(.v-btn--icon):not(.v-btn--fab) + padding: 10px 15px !important + +// Button group + +.v-item-group + .v-btn:not(.v-btn--flat):not(.v-btn--text):not(.v-btn--outlined) + margin-right: 0 + +.v-btn-toggle + .v-btn + opacity: 1 + +.v-btn-toggle > .v-btn.v-size--default + height: inherit + +.theme--light.v-btn-toggle .v-btn.v-btn + border-color: #999 !important + &.primary + border-color: #e91e63 !important + &.secondary + border-color: #9c27b0 !important + &.success + border-color: #4caf50 !important + &.warning + border-color: #fb8c00 !important + &.error + border-color: #ff5252 !important + &.info + border-color: #00cae3 !important diff --git a/web-app/src/admin/sass/vuetify-material/_card.sass b/web-app/src/admin/sass/vuetify-material/_card.sass new file mode 100644 index 00000000..b1b40671 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_card.sass @@ -0,0 +1,107 @@ +.v-card + border-radius: 6px + margin-top: 30px + margin-bottom: 15px + + .card-title + font-size: 18px + + .v-card--material__heading + top: -30px + + .subtitle-1 + color: hsla(0,0%,100%,.8) + + .display-2 + font-size: 18px !important + + .caption + font-size: 12px !important + letter-spacing: 0 !important + + .v-card__actions + padding-top: 15px + .display-2 + font-size: 18px !important + + .v-divider + border-color: #eee + + .ct-label + font-size: 14px + +.v-card--material-chart .v-card--material__heading .ct-label + font-weight: 300 + + +.v-btn--icon.v-size--default .v-icon, +.v-btn--fab.v-size--default .v-icon + font-size: 18px + +.v-card--material .v-image + .v-image__image + border-radius: 6px + +.v-card__title + font-size: 18px + padding-top: 7px + padding-bottom: 2px + +.theme--light + .v-card > .v-card__text + color: #333 + .card-title + color: #3c4858 + +.theme--dark + .card-title + color: #fff + + +.v-timeline-item .v-card + margin-top: 0 + +.v-card--wizard + .v-tabs-bar + height: 42px + .v-card__actions + .v-btn + margin-right: 0 !important + .v-tabs .v-tab--active:hover::before, .theme--light.v-tabs .v-tab--active::before + opacity: 0 + .v-tabs .v-tab:hover::before + opacity: 0 + +.v-card--plan + .body-2 + font-weight: 500 + letter-spacing: 0 !important + margin-top: 10px + margin-bottom: 8px + .display-2 + margin-top: 30px + + .v-card__text + color: #999 + margin-bottom: 16px + .v-btn + margin-right: 0 !important + .v-avatar + margin-top: 10px + +.v-card--testimony + .v-card__text + color: #999 !important + + .display-2 + font-size: 18px !important + + .body-2 + font-weight: 500 + font-size: 12px !important + + .v-avatar + left: calc(50% - 50px) + +.ct-square:before + float: none diff --git a/web-app/src/admin/sass/vuetify-material/_chip.sass b/web-app/src/admin/sass/vuetify-material/_chip.sass new file mode 100644 index 00000000..b5926ef5 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_chip.sass @@ -0,0 +1,9 @@ +.v-chip.v-size--small + height: 20px + +.v-chip__content + font-size: 10px + font-weight: 500 + .v-chip__close + font-size: 15px + margin-top: -1px diff --git a/web-app/src/admin/sass/vuetify-material/_footer.sass b/web-app/src/admin/sass/vuetify-material/_footer.sass new file mode 100644 index 00000000..9cc178a7 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_footer.sass @@ -0,0 +1,29 @@ +.v-footer + padding: 20px 0 20px 4px + border-top: 1px solid #e7e7e7 !important + position: relative + a + padding: 15px 18px 15px 16px + font-size: 12px !important + .body-1 + font-size: 16px !important + padding-right: 18px + letter-spacing: 0px !important + a + color: #9c27b0 !important + padding: 0 + text-transform: inherit !important + font-size: 16px !important + font-weight: 300 !important + .v-icon + margin-top: -3px + + &.v-footer--absolute + position: absolute !important + +.theme--light.v-footer + background-color: transparent + .body-1 + color: #3c4858 + .v-icon + color: #3c4858 diff --git a/web-app/src/admin/sass/vuetify-material/_map.sass b/web-app/src/admin/sass/vuetify-material/_map.sass new file mode 100644 index 00000000..a8a90f53 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_map.sass @@ -0,0 +1,3 @@ +.mapouter + position: relative !important + height: 100vh !important diff --git a/web-app/src/admin/sass/vuetify-material/_modal.sass b/web-app/src/admin/sass/vuetify-material/_modal.sass new file mode 100644 index 00000000..cf81cd89 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_modal.sass @@ -0,0 +1,25 @@ +.v-dialog + .v-card + margin: 0 + .v-card__title + font-weight: 300 + font-size: 18px + display: inline-block + text-align: center + width: 100% + padding: 24px 24px 0 + .v-icon + position: absolute + top: 15px + right: 20px + color: #999 + opacity: .5 + font-size: 16px + &:hover + opacity: 1 + +.v-dialog > .v-card > .v-card__text + padding-top: 24px + font-weight: 300 + line-height: 1.75em + letter-spacing: 0 diff --git a/web-app/src/admin/sass/vuetify-material/_notification.sass b/web-app/src/admin/sass/vuetify-material/_notification.sass new file mode 100644 index 00000000..f687f620 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_notification.sass @@ -0,0 +1,12 @@ +.v-alert + padding: 20px 15px + + .v-alert__wrapper + .v-alert__icon + height: 38px + min-width: 38px + .v-alert__content + font-weight: 300 + span + font-size: 12px + font-weight: 500 diff --git a/web-app/src/admin/sass/vuetify-material/_pagination.sass b/web-app/src/admin/sass/vuetify-material/_pagination.sass new file mode 100644 index 00000000..f84bdd5c --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_pagination.sass @@ -0,0 +1,5 @@ +.v-pagination + .v-pagination__item, + .v-pagination__navigation + &:focus + outline: none diff --git a/web-app/src/admin/sass/vuetify-material/_settings.sass b/web-app/src/admin/sass/vuetify-material/_settings.sass new file mode 100644 index 00000000..9bb030ee --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_settings.sass @@ -0,0 +1,36 @@ +#settings + z-index: 200 + +.v-settings + border-radius: 10px + .v-card + margin-top: 0 + .v-card__text + strong + height: 30px + line-height: 25px + font-size: 12px + font-weight: 600 + text-transform: uppercase + text-align: center + .v-avatar + border-color: #fff + border-radius: 50% !important + cursor: pointer + display: inline-block + height: 23px + margin-right: 12px + position: relative + width: 23px + padding: 8px + + .v-settings__item + border-radius: 10px + .v-image + border-radius: 7px !important + + .v-settings__item:not(.v-settings__item--active) + border-color: #fff !important + + .v-divider.secondary + border-color: rgb(221, 221, 221) !important diff --git a/web-app/src/admin/sass/vuetify-material/_sidebar.sass b/web-app/src/admin/sass/vuetify-material/_sidebar.sass new file mode 100644 index 00000000..0acbd4ec --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_sidebar.sass @@ -0,0 +1,68 @@ +.v-application .v-navigation-drawer .v-navigation-drawer__content .v-list-item .v-list-item__content .v-list-item__title.display-2 + font-size: 18px !important + margin-top: 12px + margin-bottom: 12px + +.v-application .v-navigation-drawer .v-navigation-drawer__content .v-list .v-list-group .v-list-group__header .v-list-item__content .v-list-item__title + font-size: 14px + font-weight: 300 + +.v-application--is-ltr .v-list-item__avatar:first-child + margin-right: 11px + +.v-application .v-navigation-drawer .v-navigation-drawer__content .v-list-item__icon.v-list-group__header__append-icon .v-icon + font-size: 19px + +.v-application--is-ltr #core-navigation-drawer div.v-list-item__icon--text, +.v-application--is-ltr #core-navigation-drawer div.v-list-item__icon:first-child + margin-left: 5px !important + margin-right: 18px + opacity: .8 + +.v-application--is-ltr .v-list-item__action:last-of-type:not(:only-child), +.v-application--is-ltr .v-list-item__avatar:last-of-type:not(:only-child), +.v-application--is-ltr .v-list-item__icon:last-of-type:not(:only-child) + margin-right: 2px + +.v-list--nav.v-list--dense .v-list-item:not(:last-child):not(:only-child), +.v-list--nav .v-list-item--dense:not(:last-child):not(:only-child), +.v-list--rounded.v-list--dense .v-list-item:not(:last-child):not(:only-child), +.v-list--rounded .v-list-item--dense:not(:last-child):not(:only-child) + margin-bottom: 3px + +.v-list-item .v-list-item__title, .v-list-item .v-list-item__subtitle + line-height: 1.2 + font-weight: 300 + font-size: 14px + +.v-list-group__items .v-list-item + font-size: 13px + margin-bottom: 5px !important + .v-list-item__title + font-size: 13px + .v-list-item__icon + margin-top: 14px + +.v-list-group__items .v-list-group--sub-group .v-list-group__header .v-list-item__icon--text + margin-top: 15px !important + + +.v-list-item__icon + margin: 12px 0 + +.theme--dark.v-list-item--active:hover::before, .theme--dark.v-list-item--active::before + opacity: 0 + +.v-navigation-drawer + .v-list-item__content + transition: all 0.3s linear 0s + +.v-list--nav + padding-left: 15px + padding-right: 15px + +.theme--dark.v-navigation-drawer .v-divider + background-color: rgba(181, 181, 181, 0.2) + border-color: rgba(181, 181, 181, 0.1) + width: calc(100% - 30px) + margin-left: 15px diff --git a/web-app/src/admin/sass/vuetify-material/_tab.sass b/web-app/src/admin/sass/vuetify-material/_tab.sass new file mode 100644 index 00000000..fd08bd23 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_tab.sass @@ -0,0 +1,12 @@ +.v-card--wizard + .v-tabs-bar + .v-slide-group__wrapper + overflow: visible + display: -webkit-inline-box + contain: inherit + + .v-slide-group__content + z-index: 2 + + .v-tab:not(:first-child) + margin-left: 5px diff --git a/web-app/src/admin/sass/vuetify-material/_table.sass b/web-app/src/admin/sass/vuetify-material/_table.sass new file mode 100644 index 00000000..31ca1624 --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_table.sass @@ -0,0 +1,22 @@ +.v-data-table td + font-weight: 300 + padding: 12px 8px + +.v-data-table table thead tr th + font-weight: 300 + font-size: 17px + padding: 0px 8px + +.v-data-table table tbody tr td .v-btn + margin-right: 0px !important + +.v-data-table .v-data-table-header__sort-badge + font-size: 10px + +.v-data-table.theme--dark + tr th + color: #fff !important + +.theme--light + .v-data-table table thead tr th + color: #333 diff --git a/web-app/src/admin/sass/vuetify-material/_view.sass b/web-app/src/admin/sass/vuetify-material/_view.sass new file mode 100644 index 00000000..0cf65b1d --- /dev/null +++ b/web-app/src/admin/sass/vuetify-material/_view.sass @@ -0,0 +1,23 @@ +.v-content__wrap + .container--fluid + padding-left: 30px + padding-right: 30px + +.v-application .headline + font-size: 25px !important + padding-bottom: 0 + +.v-application .black--text + color: #333 !important + +.v-application .small + font-weight: 300 + line-height: 2rem + small + font-weight: 400 + +@media(max-width: 960px) + .v-content__wrap + .container--fluid + padding-left: 15px + padding-right: 15px diff --git a/web-app/src/admin/views/dashboard/Dashboard.vue b/web-app/src/admin/views/dashboard/Dashboard.vue new file mode 100644 index 00000000..43cee7c1 --- /dev/null +++ b/web-app/src/admin/views/dashboard/Dashboard.vue @@ -0,0 +1,601 @@ + + + diff --git a/web-app/src/admin/views/dashboard/Index.vue b/web-app/src/admin/views/dashboard/Index.vue new file mode 100644 index 00000000..104ff8aa --- /dev/null +++ b/web-app/src/admin/views/dashboard/Index.vue @@ -0,0 +1,33 @@ + + + + diff --git a/web-app/src/admin/views/dashboard/Upgrade.vue b/web-app/src/admin/views/dashboard/Upgrade.vue new file mode 100644 index 00000000..981f362d --- /dev/null +++ b/web-app/src/admin/views/dashboard/Upgrade.vue @@ -0,0 +1,131 @@ + + + + + diff --git a/web-app/src/admin/views/dashboard/component/Buttons.vue b/web-app/src/admin/views/dashboard/component/Buttons.vue new file mode 100644 index 00000000..6f469e8c --- /dev/null +++ b/web-app/src/admin/views/dashboard/component/Buttons.vue @@ -0,0 +1,432 @@ + + + diff --git a/web-app/src/admin/views/dashboard/component/Grid.vue b/web-app/src/admin/views/dashboard/component/Grid.vue new file mode 100644 index 00000000..5dcb0c71 --- /dev/null +++ b/web-app/src/admin/views/dashboard/component/Grid.vue @@ -0,0 +1,300 @@ + + + diff --git a/web-app/src/admin/views/dashboard/component/Icons.vue b/web-app/src/admin/views/dashboard/component/Icons.vue new file mode 100644 index 00000000..e4cc1cbb --- /dev/null +++ b/web-app/src/admin/views/dashboard/component/Icons.vue @@ -0,0 +1,301 @@ + + + diff --git a/web-app/src/admin/views/dashboard/component/Notifications.vue b/web-app/src/admin/views/dashboard/component/Notifications.vue new file mode 100644 index 00000000..4cd3a9da --- /dev/null +++ b/web-app/src/admin/views/dashboard/component/Notifications.vue @@ -0,0 +1,406 @@ + + + diff --git a/web-app/src/admin/views/dashboard/component/Tabs.vue b/web-app/src/admin/views/dashboard/component/Tabs.vue new file mode 100644 index 00000000..fa990a60 --- /dev/null +++ b/web-app/src/admin/views/dashboard/component/Tabs.vue @@ -0,0 +1,418 @@ + + + diff --git a/web-app/src/admin/views/dashboard/component/Typography.vue b/web-app/src/admin/views/dashboard/component/Typography.vue new file mode 100644 index 00000000..d0883c2d --- /dev/null +++ b/web-app/src/admin/views/dashboard/component/Typography.vue @@ -0,0 +1,120 @@ + + + + + diff --git a/web-app/src/admin/views/dashboard/components/AccountDetail.vue b/web-app/src/admin/views/dashboard/components/AccountDetail.vue new file mode 100644 index 00000000..c10f8e8f --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/AccountDetail.vue @@ -0,0 +1,64 @@ +# Copyright (C) 2021 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + diff --git a/web-app/src/admin/views/dashboard/components/AdminProjectPermissions.vue b/web-app/src/admin/views/dashboard/components/AdminProjectPermissions.vue new file mode 100644 index 00000000..15a4545b --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/AdminProjectPermissions.vue @@ -0,0 +1,184 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + + diff --git a/web-app/src/admin/views/dashboard/components/ChangeStorageDialog.vue b/web-app/src/admin/views/dashboard/components/ChangeStorageDialog.vue new file mode 100644 index 00000000..72356fb6 --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/ChangeStorageDialog.vue @@ -0,0 +1,85 @@ +# Copyright (C) 2021 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + diff --git a/web-app/src/admin/views/dashboard/components/OrganisationPermissions.vue b/web-app/src/admin/views/dashboard/components/OrganisationPermissions.vue new file mode 100644 index 00000000..1f9a5049 --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/OrganisationPermissions.vue @@ -0,0 +1,125 @@ +# Copyright (C) 2020 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + + diff --git a/web-app/src/admin/views/dashboard/components/ProjectsTable.vue b/web-app/src/admin/views/dashboard/components/ProjectsTable.vue new file mode 100644 index 00000000..e2e1f59f --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/ProjectsTable.vue @@ -0,0 +1,501 @@ +# Copyright (C) 2018 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + + diff --git a/web-app/src/admin/views/dashboard/components/RemovedProjectsTable.vue b/web-app/src/admin/views/dashboard/components/RemovedProjectsTable.vue new file mode 100644 index 00000000..a0a4a55b --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/RemovedProjectsTable.vue @@ -0,0 +1,236 @@ +# Copyright (C) 2021 Lutra Consulting Limited. All rights reserved. +# Do not distribute without the express permission of the author. + + + + + + diff --git a/web-app/src/admin/views/dashboard/components/core/AppBar.vue b/web-app/src/admin/views/dashboard/components/core/AppBar.vue new file mode 100644 index 00000000..f68cca2e --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/core/AppBar.vue @@ -0,0 +1,156 @@ + + + diff --git a/web-app/src/admin/views/dashboard/components/core/Drawer.vue b/web-app/src/admin/views/dashboard/components/core/Drawer.vue new file mode 100644 index 00000000..7da578f0 --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/core/Drawer.vue @@ -0,0 +1,204 @@ + + + + + diff --git a/web-app/src/admin/views/dashboard/components/core/Footer.vue b/web-app/src/admin/views/dashboard/components/core/Footer.vue new file mode 100644 index 00000000..cdb7cbfe --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/core/Footer.vue @@ -0,0 +1,78 @@ + + + + + diff --git a/web-app/src/admin/views/dashboard/components/core/Settings.vue b/web-app/src/admin/views/dashboard/components/core/Settings.vue new file mode 100644 index 00000000..93636da3 --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/core/Settings.vue @@ -0,0 +1,263 @@ + + + + + diff --git a/web-app/src/admin/views/dashboard/components/core/View.vue b/web-app/src/admin/views/dashboard/components/core/View.vue new file mode 100644 index 00000000..4bf3e959 --- /dev/null +++ b/web-app/src/admin/views/dashboard/components/core/View.vue @@ -0,0 +1,17 @@ + + + diff --git a/web-app/src/admin/views/dashboard/maps/GoogleMaps.vue b/web-app/src/admin/views/dashboard/maps/GoogleMaps.vue new file mode 100644 index 00000000..5b02446f --- /dev/null +++ b/web-app/src/admin/views/dashboard/maps/GoogleMaps.vue @@ -0,0 +1,78 @@ +