From 6145c7692a132fe43d361b1ac0eee8291d33447f Mon Sep 17 00:00:00 2001 From: ciusji Date: Sat, 23 Apr 2022 11:54:34 +0800 Subject: [PATCH] Feature: add elixir note server --- .github/workflows/check-release.yml | 4 +- .github/workflows/codeql-analysis.yml | 4 +- .github/workflows/linuxjs-flaky-tests.yml | 4 +- .github/workflows/linuxjs-tests.yml | 4 +- .github/workflows/linuxtests.yml | 4 +- .github/workflows/macostests.yml | 4 +- .github/workflows/windowstests.yml | 4 +- README.md | 3 +- .../style/icons/jupyter/jupyter-favicon.svg | 19 +- .../style/icons/jupyter/jupyter.svg | 8 +- .../icons/jupyter/jupyterlab-wordmark.svg | 14 +- server/.babelrc | 3 + server/.eslintignore | 5 + server/.eslintrc.json | 13 + server/.git-blame-ignore-revs | 2 + server/.gitconfig | 2 + server/.gitignore | 46 + server/.gitmodules | 0 server/.mailmap | 149 + server/.pre-commit-config.yaml | 49 + server/.prettierignore | 1 + server/CHANGELOG.md | 0 server/CONTRIBUTING.rst | 0 server/COPYING.md | 0 server/MANIFEST.in | 32 + server/README.md | 50 + server/RELEASE.md | 48 + server/codecov.yml | 9 + server/docs/Makefile | 203 ++ server/docs/README.md | 3 + server/docs/autogen_config.py | 45 + server/docs/doc-requirements.txt | 12 + server/docs/environment.yml | 7 + server/docs/make.bat | 263 ++ server/docs/source/_static/.gitkeep | 0 .../source/_static/jupyter_server_logo.svg | 191 ++ server/docs/source/conf.py | 387 +++ .../docs/source/contributors/contributing.rst | 3 + server/docs/source/contributors/index.rst | 12 + .../source/contributors/team-meetings.rst | 21 + server/docs/source/developers/contents.rst | 289 ++ server/docs/source/developers/dependency.rst | 19 + server/docs/source/developers/extensions.rst | 553 ++++ server/docs/source/developers/index.rst | 16 + server/docs/source/developers/rest-api.rst | 7 + server/docs/source/developers/savehooks.rst | 84 + .../source/developers/websocket-protocols.rst | 155 + server/docs/source/index.rst | 46 + .../operators/configuring-extensions.rst | 59 + server/docs/source/operators/index.rst | 15 + .../source/operators/ipython_security.asc | 52 + .../operators/migrate-from-nbserver.rst | 36 + .../source/operators/multiple-extensions.rst | 89 + .../docs/source/operators/public-server.rst | 443 +++ server/docs/source/operators/security.rst | 376 +++ server/docs/source/other/faq.rst | 13 + server/docs/source/other/full-config.rst | 1390 ++++++++ server/docs/source/other/index.rst | 10 + server/docs/source/other/links.rst | 9 + server/docs/source/users/configuration.rst | 68 + server/docs/source/users/help.rst | 8 + server/docs/source/users/index.rst | 15 + server/docs/source/users/installation.rst | 19 + server/docs/source/users/launching.rst | 26 + server/examples/authorization/README.md | 84 + .../jupyter_nbclassic_readonly_config.py | 14 + .../jupyter_nbclassic_rw_config.py | 14 + .../authorization/jupyter_temporary_config.py | 14 + server/examples/simple/README.md | 204 ++ server/examples/simple/conftest.py | 1 + .../jupyter_server_config.d/simple_ext1.json | 7 + .../jupyter_server_config.d/simple_ext11.json | 7 + .../jupyter_server_config.d/simple_ext2.json | 7 + .../examples/simple/jupyter_server_config.py | 6 + .../simple/jupyter_simple_ext11_config.py | 1 + .../simple/jupyter_simple_ext1_config.py | 4 + .../simple/jupyter_simple_ext2_config.py | 1 + server/examples/simple/package.json | 18 + server/examples/simple/pyproject.toml | 3 + server/examples/simple/pytest.ini | 3 + server/examples/simple/setup.py | 58 + .../examples/simple/simple_ext1/__init__.py | 5 + .../examples/simple/simple_ext1/__main__.py | 4 + .../simple/simple_ext1/application.py | 61 + .../examples/simple/simple_ext1/handlers.py | 51 + .../simple/simple_ext1/static/bundle.js | 144 + .../simple/simple_ext1/static/favicon.ico | Bin 0 -> 32038 bytes .../simple/simple_ext1/static/home.html | 1 + .../simple/simple_ext1/static/index.d.ts | 1 + .../simple/simple_ext1/static/index.js | 5 + .../simple/simple_ext1/static/test.html | 1 + .../simple/simple_ext1/templates/error.html | 21 + .../simple/simple_ext1/templates/page.html | 20 + .../simple/simple_ext1/templates/simple1.html | 19 + .../simple_ext1/templates/typescript.html | 21 + .../examples/simple/simple_ext11/__init__.py | 5 + .../examples/simple/simple_ext11/__main__.py | 4 + .../simple/simple_ext11/application.py | 75 + .../examples/simple/simple_ext2/__init__.py | 7 + .../examples/simple/simple_ext2/__main__.py | 4 + .../simple/simple_ext2/application.py | 53 + .../examples/simple/simple_ext2/handlers.py | 34 + .../simple/simple_ext2/static/favicon.ico | Bin 0 -> 32038 bytes .../simple/simple_ext2/static/test.html | 1 + .../simple/simple_ext2/templates/error.html | 20 + .../simple/simple_ext2/templates/index.html | 1 + .../simple/simple_ext2/templates/page.html | 20 + .../simple_ext2/templates/simple_ext2.html | 1 + server/examples/simple/tests/test_handlers.py | 20 + server/examples/simple/webpack.config.js | 8 + server/jupyter_server/__init__.py | 29 + server/jupyter_server/__main__.py | 4 + server/jupyter_server/_sysinfo.py | 97 + server/jupyter_server/_tz.py | 49 + server/jupyter_server/_version.py | 6 + server/jupyter_server/auth/__init__.py | 3 + server/jupyter_server/auth/__main__.py | 55 + server/jupyter_server/auth/authorizer.py | 69 + server/jupyter_server/auth/decorator.py | 78 + server/jupyter_server/auth/login.py | 256 ++ server/jupyter_server/auth/logout.py | 18 + server/jupyter_server/auth/security.py | 178 ++ server/jupyter_server/auth/utils.py | 66 + server/jupyter_server/base/__init__.py | 0 server/jupyter_server/base/handlers.py | 987 ++++++ server/jupyter_server/base/zmqhandlers.py | 346 ++ server/jupyter_server/config_manager.py | 138 + server/jupyter_server/conftest.py | 31 + server/jupyter_server/extension/__init__.py | 0 .../jupyter_server/extension/application.py | 583 ++++ server/jupyter_server/extension/config.py | 33 + server/jupyter_server/extension/handler.py | 119 + server/jupyter_server/extension/manager.py | 401 +++ .../extension/serverextension.py | 384 +++ server/jupyter_server/extension/utils.py | 103 + server/jupyter_server/files/__init__.py | 0 server/jupyter_server/files/handlers.py | 94 + server/jupyter_server/gateway/__init__.py | 0 .../jupyter_server/gateway/gateway_client.py | 438 +++ server/jupyter_server/gateway/handlers.py | 292 ++ server/jupyter_server/gateway/managers.py | 718 +++++ server/jupyter_server/i18n/README.md | 132 + server/jupyter_server/i18n/__init__.py | 101 + server/jupyter_server/i18n/babel_nbui.cfg | 4 + server/jupyter_server/i18n/babel_notebook.cfg | 2 + server/jupyter_server/i18n/nbjs.json | 11 + server/jupyter_server/i18n/nbui.pot | 731 +++++ server/jupyter_server/i18n/notebook.pot | 442 +++ .../i18n/zh_CN/LC_MESSAGES/nbui.po | 731 +++++ .../i18n/zh_CN/LC_MESSAGES/notebook.po | 446 +++ server/jupyter_server/kernelspecs/__init__.py | 0 server/jupyter_server/kernelspecs/handlers.py | 37 + server/jupyter_server/log.py | 56 + server/jupyter_server/nbconvert/__init__.py | 0 server/jupyter_server/nbconvert/handlers.py | 197 ++ server/jupyter_server/prometheus/__init__.py | 0 .../prometheus/log_functions.py | 24 + server/jupyter_server/prometheus/metrics.py | 37 + server/jupyter_server/pytest_plugin.py | 511 +++ server/jupyter_server/serverapp.py | 2827 +++++++++++++++++ server/jupyter_server/services/__init__.py | 0 .../jupyter_server/services/api/__init__.py | 0 server/jupyter_server/services/api/api.yaml | 857 +++++ .../jupyter_server/services/api/handlers.py | 63 + .../services/config/__init__.py | 1 + .../services/config/handlers.py | 45 + .../jupyter_server/services/config/manager.py | 68 + .../services/contents/__init__.py | 0 .../services/contents/checkpoints.py | 249 ++ .../services/contents/filecheckpoints.py | 329 ++ .../services/contents/fileio.py | 431 +++ .../services/contents/filemanager.py | 908 ++++++ .../services/contents/handlers.py | 340 ++ .../services/contents/largefilemanager.py | 150 + .../services/contents/manager.py | 875 +++++ .../services/kernels/__init__.py | 0 .../services/kernels/handlers.py | 785 +++++ .../services/kernels/kernelmanager.py | 659 ++++ .../services/kernelspecs/__init__.py | 0 .../services/kernelspecs/handlers.py | 109 + .../services/nbconvert/__init__.py | 0 .../services/nbconvert/handlers.py | 53 + .../services/security/__init__.py | 4 + .../services/security/handlers.py | 38 + .../services/sessions/__init__.py | 0 .../services/sessions/handlers.py | 195 ++ .../services/sessions/sessionmanager.py | 314 ++ server/jupyter_server/services/shutdown.py | 28 + server/jupyter_server/static/favicon.ico | Bin 0 -> 61209 bytes .../static/favicons/favicon-busy-1.ico | Bin 0 -> 61209 bytes .../static/favicons/favicon-busy-2.ico | Bin 0 -> 61209 bytes .../static/favicons/favicon-busy-3.ico | Bin 0 -> 61209 bytes .../static/favicons/favicon-file.ico | Bin 0 -> 1150 bytes .../static/favicons/favicon-notebook.ico | Bin 0 -> 1150 bytes .../static/favicons/favicon-terminal.ico | Bin 0 -> 1150 bytes .../static/favicons/favicon.ico | Bin 0 -> 61209 bytes server/jupyter_server/static/logo/logo.png | Bin 0 -> 61209 bytes server/jupyter_server/static/style/index.css | 91 + server/jupyter_server/templates/404.html | 4 + .../templates/browser-open.html | 18 + server/jupyter_server/templates/error.html | 32 + server/jupyter_server/templates/login.html | 117 + server/jupyter_server/templates/logout.html | 34 + server/jupyter_server/templates/main.html | 7 + server/jupyter_server/templates/page.html | 93 + server/jupyter_server/templates/view.html | 35 + server/jupyter_server/terminal/__init__.py | 51 + .../jupyter_server/terminal/api_handlers.py | 47 + server/jupyter_server/terminal/handlers.py | 50 + .../terminal/terminalmanager.py | 169 + server/jupyter_server/tests/__init__.py | 0 server/jupyter_server/tests/auth/__init__.py | 0 .../tests/auth/test_authorizer.py | 277 ++ .../jupyter_server/tests/auth/test_login.py | 95 + .../tests/auth/test_security.py | 31 + .../jupyter_server/tests/auth/test_utils.py | 37 + .../tests/extension/__init__.py | 0 .../tests/extension/conftest.py | 51 + .../extension/mockextensions/__init__.py | 18 + .../tests/extension/mockextensions/app.py | 55 + .../tests/extension/mockextensions/mock1.py | 10 + .../tests/extension/mockextensions/mock2.py | 10 + .../tests/extension/mockextensions/mock3.py | 6 + .../extension/mockextensions/mockext_both.py | 10 + .../extension/mockextensions/mockext_py.py | 10 + .../extension/mockextensions/mockext_sys.py | 10 + .../extension/mockextensions/mockext_user.py | 10 + .../extension/mockextensions/static/mock.txt | 1 + .../tests/extension/test_app.py | 158 + .../tests/extension/test_config.py | 60 + .../tests/extension/test_entrypoint.py | 15 + .../tests/extension/test_handler.py | 85 + .../tests/extension/test_launch.py | 111 + .../tests/extension/test_manager.py | 132 + .../tests/extension/test_serverextension.py | 106 + .../tests/extension/test_utils.py | 20 + .../tests/namespace-package-test/README.md | 3 + .../tests/namespace-package-test/setup.cfg | 5 + .../test_namespace/test_package/__init__.py | 0 .../tests/nbconvert/__init__.py | 0 .../tests/nbconvert/test_handlers.py | 150 + .../jupyter_server/tests/services/__init__.py | 0 .../tests/services/api/__init__.py | 0 .../tests/services/api/test_api.py | 23 + .../tests/services/config/__init__.py | 0 .../tests/services/config/test_api.py | 50 + .../tests/services/contents/__init__.py | 0 .../tests/services/contents/test_api.py | 802 +++++ .../tests/services/contents/test_config.py | 27 + .../tests/services/contents/test_fileio.py | 139 + .../contents/test_largefilemanager.py | 109 + .../tests/services/contents/test_manager.py | 669 ++++ .../tests/services/kernels/__init__.py | 0 .../tests/services/kernels/test_api.py | 277 ++ .../tests/services/kernels/test_config.py | 23 + .../tests/services/kernels/test_cull.py | 127 + .../tests/services/kernelspecs/__init__.py | 0 .../tests/services/kernelspecs/test_api.py | 79 + .../tests/services/nbconvert/__init__.py | 0 .../tests/services/nbconvert/test_api.py | 14 + .../tests/services/sessions/__init__.py | 0 .../tests/services/sessions/test_api.py | 600 ++++ .../tests/services/sessions/test_manager.py | 365 +++ .../tests/test_config_manager.py | 50 + server/jupyter_server/tests/test_files.py | 134 + server/jupyter_server/tests/test_gateway.py | 411 +++ server/jupyter_server/tests/test_paths.py | 68 + server/jupyter_server/tests/test_serialize.py | 24 + server/jupyter_server/tests/test_serverapp.py | 338 ++ server/jupyter_server/tests/test_terminal.py | 179 ++ .../jupyter_server/tests/test_traittypes.py | 72 + server/jupyter_server/tests/test_utils.py | 63 + server/jupyter_server/tests/test_version.py | 51 + server/jupyter_server/tests/test_view.py | 60 + .../tests/unix_sockets/__init__.py | 0 .../tests/unix_sockets/conftest.py | 34 + .../tests/unix_sockets/test_api.py | 69 + .../test_serverapp_integration.py | 165 + server/jupyter_server/tests/utils.py | 41 + server/jupyter_server/traittypes.py | 224 ++ server/jupyter_server/transutils.py | 21 + server/jupyter_server/utils.py | 387 +++ server/jupyter_server/view/__init__.py | 0 server/jupyter_server/view/handlers.py | 37 + server/package-lock.json | 320 ++ server/package.json | 13 + server/pyproject.toml | 43 + server/readthedocs.yml | 11 + server/setup.cfg | 76 + server/setup.py | 11 + server/yarn.lock | 295 ++ 291 files changed, 33525 insertions(+), 43 deletions(-) create mode 100644 server/.babelrc create mode 100644 server/.eslintignore create mode 100644 server/.eslintrc.json create mode 100644 server/.git-blame-ignore-revs create mode 100644 server/.gitconfig create mode 100644 server/.gitignore create mode 100644 server/.gitmodules create mode 100644 server/.mailmap create mode 100644 server/.pre-commit-config.yaml create mode 100644 server/.prettierignore create mode 100644 server/CHANGELOG.md create mode 100644 server/CONTRIBUTING.rst create mode 100644 server/COPYING.md create mode 100644 server/MANIFEST.in create mode 100644 server/README.md create mode 100644 server/RELEASE.md create mode 100644 server/codecov.yml create mode 100644 server/docs/Makefile create mode 100644 server/docs/README.md create mode 100644 server/docs/autogen_config.py create mode 100644 server/docs/doc-requirements.txt create mode 100644 server/docs/environment.yml create mode 100644 server/docs/make.bat create mode 100644 server/docs/source/_static/.gitkeep create mode 100644 server/docs/source/_static/jupyter_server_logo.svg create mode 100644 server/docs/source/conf.py create mode 100644 server/docs/source/contributors/contributing.rst create mode 100644 server/docs/source/contributors/index.rst create mode 100644 server/docs/source/contributors/team-meetings.rst create mode 100644 server/docs/source/developers/contents.rst create mode 100644 server/docs/source/developers/dependency.rst create mode 100644 server/docs/source/developers/extensions.rst create mode 100644 server/docs/source/developers/index.rst create mode 100644 server/docs/source/developers/rest-api.rst create mode 100644 server/docs/source/developers/savehooks.rst create mode 100644 server/docs/source/developers/websocket-protocols.rst create mode 100644 server/docs/source/index.rst create mode 100644 server/docs/source/operators/configuring-extensions.rst create mode 100644 server/docs/source/operators/index.rst create mode 100644 server/docs/source/operators/ipython_security.asc create mode 100644 server/docs/source/operators/migrate-from-nbserver.rst create mode 100644 server/docs/source/operators/multiple-extensions.rst create mode 100644 server/docs/source/operators/public-server.rst create mode 100644 server/docs/source/operators/security.rst create mode 100644 server/docs/source/other/faq.rst create mode 100644 server/docs/source/other/full-config.rst create mode 100644 server/docs/source/other/index.rst create mode 100644 server/docs/source/other/links.rst create mode 100644 server/docs/source/users/configuration.rst create mode 100644 server/docs/source/users/help.rst create mode 100644 server/docs/source/users/index.rst create mode 100644 server/docs/source/users/installation.rst create mode 100644 server/docs/source/users/launching.rst create mode 100644 server/examples/authorization/README.md create mode 100644 server/examples/authorization/jupyter_nbclassic_readonly_config.py create mode 100644 server/examples/authorization/jupyter_nbclassic_rw_config.py create mode 100644 server/examples/authorization/jupyter_temporary_config.py create mode 100644 server/examples/simple/README.md create mode 100644 server/examples/simple/conftest.py create mode 100644 server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json create mode 100644 server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json create mode 100644 server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json create mode 100644 server/examples/simple/jupyter_server_config.py create mode 100644 server/examples/simple/jupyter_simple_ext11_config.py create mode 100644 server/examples/simple/jupyter_simple_ext1_config.py create mode 100644 server/examples/simple/jupyter_simple_ext2_config.py create mode 100644 server/examples/simple/package.json create mode 100644 server/examples/simple/pyproject.toml create mode 100644 server/examples/simple/pytest.ini create mode 100644 server/examples/simple/setup.py create mode 100644 server/examples/simple/simple_ext1/__init__.py create mode 100644 server/examples/simple/simple_ext1/__main__.py create mode 100644 server/examples/simple/simple_ext1/application.py create mode 100644 server/examples/simple/simple_ext1/handlers.py create mode 100644 server/examples/simple/simple_ext1/static/bundle.js create mode 100644 server/examples/simple/simple_ext1/static/favicon.ico create mode 100644 server/examples/simple/simple_ext1/static/home.html create mode 100644 server/examples/simple/simple_ext1/static/index.d.ts create mode 100644 server/examples/simple/simple_ext1/static/index.js create mode 100644 server/examples/simple/simple_ext1/static/test.html create mode 100644 server/examples/simple/simple_ext1/templates/error.html create mode 100644 server/examples/simple/simple_ext1/templates/page.html create mode 100644 server/examples/simple/simple_ext1/templates/simple1.html create mode 100644 server/examples/simple/simple_ext1/templates/typescript.html create mode 100644 server/examples/simple/simple_ext11/__init__.py create mode 100644 server/examples/simple/simple_ext11/__main__.py create mode 100644 server/examples/simple/simple_ext11/application.py create mode 100644 server/examples/simple/simple_ext2/__init__.py create mode 100644 server/examples/simple/simple_ext2/__main__.py create mode 100644 server/examples/simple/simple_ext2/application.py create mode 100644 server/examples/simple/simple_ext2/handlers.py create mode 100644 server/examples/simple/simple_ext2/static/favicon.ico create mode 100644 server/examples/simple/simple_ext2/static/test.html create mode 100644 server/examples/simple/simple_ext2/templates/error.html create mode 100644 server/examples/simple/simple_ext2/templates/index.html create mode 100644 server/examples/simple/simple_ext2/templates/page.html create mode 100644 server/examples/simple/simple_ext2/templates/simple_ext2.html create mode 100644 server/examples/simple/tests/test_handlers.py create mode 100644 server/examples/simple/webpack.config.js create mode 100644 server/jupyter_server/__init__.py create mode 100644 server/jupyter_server/__main__.py create mode 100644 server/jupyter_server/_sysinfo.py create mode 100644 server/jupyter_server/_tz.py create mode 100644 server/jupyter_server/_version.py create mode 100644 server/jupyter_server/auth/__init__.py create mode 100644 server/jupyter_server/auth/__main__.py create mode 100644 server/jupyter_server/auth/authorizer.py create mode 100644 server/jupyter_server/auth/decorator.py create mode 100644 server/jupyter_server/auth/login.py create mode 100644 server/jupyter_server/auth/logout.py create mode 100644 server/jupyter_server/auth/security.py create mode 100644 server/jupyter_server/auth/utils.py create mode 100644 server/jupyter_server/base/__init__.py create mode 100644 server/jupyter_server/base/handlers.py create mode 100644 server/jupyter_server/base/zmqhandlers.py create mode 100644 server/jupyter_server/config_manager.py create mode 100644 server/jupyter_server/conftest.py create mode 100644 server/jupyter_server/extension/__init__.py create mode 100644 server/jupyter_server/extension/application.py create mode 100644 server/jupyter_server/extension/config.py create mode 100644 server/jupyter_server/extension/handler.py create mode 100644 server/jupyter_server/extension/manager.py create mode 100644 server/jupyter_server/extension/serverextension.py create mode 100644 server/jupyter_server/extension/utils.py create mode 100644 server/jupyter_server/files/__init__.py create mode 100644 server/jupyter_server/files/handlers.py create mode 100644 server/jupyter_server/gateway/__init__.py create mode 100644 server/jupyter_server/gateway/gateway_client.py create mode 100644 server/jupyter_server/gateway/handlers.py create mode 100644 server/jupyter_server/gateway/managers.py create mode 100644 server/jupyter_server/i18n/README.md create mode 100644 server/jupyter_server/i18n/__init__.py create mode 100644 server/jupyter_server/i18n/babel_nbui.cfg create mode 100644 server/jupyter_server/i18n/babel_notebook.cfg create mode 100644 server/jupyter_server/i18n/nbjs.json create mode 100644 server/jupyter_server/i18n/nbui.pot create mode 100644 server/jupyter_server/i18n/notebook.pot create mode 100644 server/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po create mode 100644 server/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po create mode 100644 server/jupyter_server/kernelspecs/__init__.py create mode 100644 server/jupyter_server/kernelspecs/handlers.py create mode 100644 server/jupyter_server/log.py create mode 100644 server/jupyter_server/nbconvert/__init__.py create mode 100644 server/jupyter_server/nbconvert/handlers.py create mode 100644 server/jupyter_server/prometheus/__init__.py create mode 100644 server/jupyter_server/prometheus/log_functions.py create mode 100644 server/jupyter_server/prometheus/metrics.py create mode 100644 server/jupyter_server/pytest_plugin.py create mode 100644 server/jupyter_server/serverapp.py create mode 100644 server/jupyter_server/services/__init__.py create mode 100644 server/jupyter_server/services/api/__init__.py create mode 100644 server/jupyter_server/services/api/api.yaml create mode 100644 server/jupyter_server/services/api/handlers.py create mode 100644 server/jupyter_server/services/config/__init__.py create mode 100644 server/jupyter_server/services/config/handlers.py create mode 100644 server/jupyter_server/services/config/manager.py create mode 100644 server/jupyter_server/services/contents/__init__.py create mode 100644 server/jupyter_server/services/contents/checkpoints.py create mode 100644 server/jupyter_server/services/contents/filecheckpoints.py create mode 100644 server/jupyter_server/services/contents/fileio.py create mode 100644 server/jupyter_server/services/contents/filemanager.py create mode 100644 server/jupyter_server/services/contents/handlers.py create mode 100644 server/jupyter_server/services/contents/largefilemanager.py create mode 100644 server/jupyter_server/services/contents/manager.py create mode 100644 server/jupyter_server/services/kernels/__init__.py create mode 100644 server/jupyter_server/services/kernels/handlers.py create mode 100644 server/jupyter_server/services/kernels/kernelmanager.py create mode 100644 server/jupyter_server/services/kernelspecs/__init__.py create mode 100644 server/jupyter_server/services/kernelspecs/handlers.py create mode 100644 server/jupyter_server/services/nbconvert/__init__.py create mode 100644 server/jupyter_server/services/nbconvert/handlers.py create mode 100644 server/jupyter_server/services/security/__init__.py create mode 100644 server/jupyter_server/services/security/handlers.py create mode 100644 server/jupyter_server/services/sessions/__init__.py create mode 100644 server/jupyter_server/services/sessions/handlers.py create mode 100644 server/jupyter_server/services/sessions/sessionmanager.py create mode 100644 server/jupyter_server/services/shutdown.py create mode 100644 server/jupyter_server/static/favicon.ico create mode 100644 server/jupyter_server/static/favicons/favicon-busy-1.ico create mode 100644 server/jupyter_server/static/favicons/favicon-busy-2.ico create mode 100644 server/jupyter_server/static/favicons/favicon-busy-3.ico create mode 100644 server/jupyter_server/static/favicons/favicon-file.ico create mode 100644 server/jupyter_server/static/favicons/favicon-notebook.ico create mode 100644 server/jupyter_server/static/favicons/favicon-terminal.ico create mode 100644 server/jupyter_server/static/favicons/favicon.ico create mode 100644 server/jupyter_server/static/logo/logo.png create mode 100644 server/jupyter_server/static/style/index.css create mode 100644 server/jupyter_server/templates/404.html create mode 100644 server/jupyter_server/templates/browser-open.html create mode 100644 server/jupyter_server/templates/error.html create mode 100644 server/jupyter_server/templates/login.html create mode 100644 server/jupyter_server/templates/logout.html create mode 100644 server/jupyter_server/templates/main.html create mode 100644 server/jupyter_server/templates/page.html create mode 100644 server/jupyter_server/templates/view.html create mode 100644 server/jupyter_server/terminal/__init__.py create mode 100644 server/jupyter_server/terminal/api_handlers.py create mode 100644 server/jupyter_server/terminal/handlers.py create mode 100644 server/jupyter_server/terminal/terminalmanager.py create mode 100644 server/jupyter_server/tests/__init__.py create mode 100644 server/jupyter_server/tests/auth/__init__.py create mode 100644 server/jupyter_server/tests/auth/test_authorizer.py create mode 100644 server/jupyter_server/tests/auth/test_login.py create mode 100644 server/jupyter_server/tests/auth/test_security.py create mode 100644 server/jupyter_server/tests/auth/test_utils.py create mode 100644 server/jupyter_server/tests/extension/__init__.py create mode 100644 server/jupyter_server/tests/extension/conftest.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/__init__.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/app.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mock1.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mock2.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mock3.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mockext_both.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mockext_py.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mockext_sys.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/mockext_user.py create mode 100644 server/jupyter_server/tests/extension/mockextensions/static/mock.txt create mode 100644 server/jupyter_server/tests/extension/test_app.py create mode 100644 server/jupyter_server/tests/extension/test_config.py create mode 100644 server/jupyter_server/tests/extension/test_entrypoint.py create mode 100644 server/jupyter_server/tests/extension/test_handler.py create mode 100644 server/jupyter_server/tests/extension/test_launch.py create mode 100644 server/jupyter_server/tests/extension/test_manager.py create mode 100644 server/jupyter_server/tests/extension/test_serverextension.py create mode 100644 server/jupyter_server/tests/extension/test_utils.py create mode 100644 server/jupyter_server/tests/namespace-package-test/README.md create mode 100644 server/jupyter_server/tests/namespace-package-test/setup.cfg create mode 100644 server/jupyter_server/tests/namespace-package-test/test_namespace/test_package/__init__.py create mode 100644 server/jupyter_server/tests/nbconvert/__init__.py create mode 100644 server/jupyter_server/tests/nbconvert/test_handlers.py create mode 100644 server/jupyter_server/tests/services/__init__.py create mode 100644 server/jupyter_server/tests/services/api/__init__.py create mode 100644 server/jupyter_server/tests/services/api/test_api.py create mode 100644 server/jupyter_server/tests/services/config/__init__.py create mode 100644 server/jupyter_server/tests/services/config/test_api.py create mode 100644 server/jupyter_server/tests/services/contents/__init__.py create mode 100644 server/jupyter_server/tests/services/contents/test_api.py create mode 100644 server/jupyter_server/tests/services/contents/test_config.py create mode 100644 server/jupyter_server/tests/services/contents/test_fileio.py create mode 100644 server/jupyter_server/tests/services/contents/test_largefilemanager.py create mode 100644 server/jupyter_server/tests/services/contents/test_manager.py create mode 100644 server/jupyter_server/tests/services/kernels/__init__.py create mode 100644 server/jupyter_server/tests/services/kernels/test_api.py create mode 100644 server/jupyter_server/tests/services/kernels/test_config.py create mode 100644 server/jupyter_server/tests/services/kernels/test_cull.py create mode 100644 server/jupyter_server/tests/services/kernelspecs/__init__.py create mode 100644 server/jupyter_server/tests/services/kernelspecs/test_api.py create mode 100644 server/jupyter_server/tests/services/nbconvert/__init__.py create mode 100644 server/jupyter_server/tests/services/nbconvert/test_api.py create mode 100644 server/jupyter_server/tests/services/sessions/__init__.py create mode 100644 server/jupyter_server/tests/services/sessions/test_api.py create mode 100644 server/jupyter_server/tests/services/sessions/test_manager.py create mode 100644 server/jupyter_server/tests/test_config_manager.py create mode 100644 server/jupyter_server/tests/test_files.py create mode 100644 server/jupyter_server/tests/test_gateway.py create mode 100644 server/jupyter_server/tests/test_paths.py create mode 100644 server/jupyter_server/tests/test_serialize.py create mode 100644 server/jupyter_server/tests/test_serverapp.py create mode 100644 server/jupyter_server/tests/test_terminal.py create mode 100644 server/jupyter_server/tests/test_traittypes.py create mode 100644 server/jupyter_server/tests/test_utils.py create mode 100644 server/jupyter_server/tests/test_version.py create mode 100644 server/jupyter_server/tests/test_view.py create mode 100644 server/jupyter_server/tests/unix_sockets/__init__.py create mode 100644 server/jupyter_server/tests/unix_sockets/conftest.py create mode 100644 server/jupyter_server/tests/unix_sockets/test_api.py create mode 100644 server/jupyter_server/tests/unix_sockets/test_serverapp_integration.py create mode 100644 server/jupyter_server/tests/utils.py create mode 100644 server/jupyter_server/traittypes.py create mode 100644 server/jupyter_server/transutils.py create mode 100644 server/jupyter_server/utils.py create mode 100644 server/jupyter_server/view/__init__.py create mode 100644 server/jupyter_server/view/handlers.py create mode 100644 server/package-lock.json create mode 100644 server/package.json create mode 100644 server/pyproject.toml create mode 100644 server/readthedocs.yml create mode 100644 server/setup.cfg create mode 100644 server/setup.py create mode 100644 server/yarn.lock diff --git a/.github/workflows/check-release.yml b/.github/workflows/check-release.yml index 71b6b74..3f9b0a4 100644 --- a/.github/workflows/check-release.yml +++ b/.github/workflows/check-release.yml @@ -2,9 +2,9 @@ name: Check Release on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: - branches: [3.2.x] + branches: [3.3.x] release: types: [published] diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index e8f95c3..c678181 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -7,10 +7,10 @@ name: "CodeQL" on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: # The branches below must be a subset of the branches above - branches: [3.2.x] + branches: [3.3.x] schedule: - cron: '0 8 * * 3' diff --git a/.github/workflows/linuxjs-flaky-tests.yml b/.github/workflows/linuxjs-flaky-tests.yml index 7d26e9a..97d13b6 100644 --- a/.github/workflows/linuxjs-flaky-tests.yml +++ b/.github/workflows/linuxjs-flaky-tests.yml @@ -2,9 +2,9 @@ name: Linux JS Flaky Tests on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: - branches: [3.2.x] + branches: [3.3.x] release: types: [published] diff --git a/.github/workflows/linuxjs-tests.yml b/.github/workflows/linuxjs-tests.yml index 07c0eb2..823e3c3 100644 --- a/.github/workflows/linuxjs-tests.yml +++ b/.github/workflows/linuxjs-tests.yml @@ -2,9 +2,9 @@ name: Linux JS Tests on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: - branches: [3.2.x] + branches: [3.3.x] release: types: [published] diff --git a/.github/workflows/linuxtests.yml b/.github/workflows/linuxtests.yml index d68ea6b..c1826f9 100644 --- a/.github/workflows/linuxtests.yml +++ b/.github/workflows/linuxtests.yml @@ -2,9 +2,9 @@ name: Linux Tests on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: - branches: [3.2.x] + branches: [3.3.x] release: types: [published] diff --git a/.github/workflows/macostests.yml b/.github/workflows/macostests.yml index 4f50f5f..a71930b 100644 --- a/.github/workflows/macostests.yml +++ b/.github/workflows/macostests.yml @@ -2,9 +2,9 @@ name: macOS Tests on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: - branches: [3.2.x] + branches: [3.3.x] release: types: [published] diff --git a/.github/workflows/windowstests.yml b/.github/workflows/windowstests.yml index 1b036c1..1960adc 100644 --- a/.github/workflows/windowstests.yml +++ b/.github/workflows/windowstests.yml @@ -2,9 +2,9 @@ name: Windows Tests on: push: - branches: [3.2.x] + branches: [3.3.x] pull_request: - branches: [3.2.x] + branches: [3.3.x] release: types: [published] diff --git a/README.md b/README.md index a3b19aa..fe53373 100644 --- a/README.md +++ b/README.md @@ -68,8 +68,7 @@ jupyter lab build # Build the app dir assets (optional) Start up JupyterLab using: ```bash -mkdir -p /data/notebooks -jupyter lab --dev-mode --watch --allow-root --no-browser --notebook-dir=/data/notebooks --ip=0.0.0.0 +jupyter lab --dev-mode --watch --allow-root --no-browser --ip=0.0.0.0 ``` JupyterLab will open automatically in the browser. See the [documentation](http://jupyterlab.readthedocs.io/en/3.3.x/getting_started/starting.html) for additional details. diff --git a/packages/ui-components/style/icons/jupyter/jupyter-favicon.svg b/packages/ui-components/style/icons/jupyter/jupyter-favicon.svg index 45c2522..6f911d9 100644 --- a/packages/ui-components/style/icons/jupyter/jupyter-favicon.svg +++ b/packages/ui-components/style/icons/jupyter/jupyter-favicon.svg @@ -1,16 +1,9 @@ - - notebook - - - - - - - - - + + exilir-note-1 + + + + \ No newline at end of file diff --git a/packages/ui-components/style/icons/jupyter/jupyter.svg b/packages/ui-components/style/icons/jupyter/jupyter.svg index aee1c00..b69ac38 100644 --- a/packages/ui-components/style/icons/jupyter/jupyter.svg +++ b/packages/ui-components/style/icons/jupyter/jupyter.svg @@ -1,8 +1,8 @@ - - ElixirNote + + exilir-note-architecture - - + + \ No newline at end of file diff --git a/packages/ui-components/style/icons/jupyter/jupyterlab-wordmark.svg b/packages/ui-components/style/icons/jupyter/jupyterlab-wordmark.svg index d57f684..049f052 100644 --- a/packages/ui-components/style/icons/jupyter/jupyterlab-wordmark.svg +++ b/packages/ui-components/style/icons/jupyter/jupyterlab-wordmark.svg @@ -1,12 +1,6 @@ - - - Group - - - - - - - + + Artboard + + \ No newline at end of file diff --git a/server/.babelrc b/server/.babelrc new file mode 100644 index 0000000..c13c5f6 --- /dev/null +++ b/server/.babelrc @@ -0,0 +1,3 @@ +{ + "presets": ["es2015"] +} diff --git a/server/.eslintignore b/server/.eslintignore new file mode 100644 index 0000000..db228f0 --- /dev/null +++ b/server/.eslintignore @@ -0,0 +1,5 @@ +*.min.js +*components* +*node_modules* +*built* +*build* diff --git a/server/.eslintrc.json b/server/.eslintrc.json new file mode 100644 index 0000000..3b41a5a --- /dev/null +++ b/server/.eslintrc.json @@ -0,0 +1,13 @@ +{ + "parserOptions": { + "ecmaVersion": 6, + "sourceType": "module" + }, + "rules": { + "semi": 1, + "no-cond-assign": 2, + "no-debugger": 2, + "comma-dangle": 0, + "no-unreachable": 2 + } +} diff --git a/server/.git-blame-ignore-revs b/server/.git-blame-ignore-revs new file mode 100644 index 0000000..ef526e8 --- /dev/null +++ b/server/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Initial pre-commit reformat +42fe3bb4188a1fbd1810674776e7855cd529b8fc diff --git a/server/.gitconfig b/server/.gitconfig new file mode 100644 index 0000000..0559a09 --- /dev/null +++ b/server/.gitconfig @@ -0,0 +1,2 @@ +[blame] + ignoreRevsFile = .git-blame-ignore-revs diff --git a/server/.gitignore b/server/.gitignore new file mode 100644 index 0000000..8d26500 --- /dev/null +++ b/server/.gitignore @@ -0,0 +1,46 @@ +MANIFEST +build +dist +_build +docs/man/*.gz +docs/source/api/generated +docs/source/config.rst +docs/gh-pages +jupyter_server/i18n/*/LC_MESSAGES/*.mo +jupyter_server/i18n/*/LC_MESSAGES/nbjs.json +jupyter_server/static/style/*.min.css* +node_modules +*.py[co] +__pycache__ +*.egg-info +*~ +*.bak +.ipynb_checkpoints +.tox +.DS_Store +\#*# +.#* +.coverage* +.pytest_cache +src + +*.swp +*.map +Read the Docs +config.rst + +/.project +/.pydevproject + +# copied changelog file +docs/source/other/changelog.md + +# jetbrains ide stuff +*.iml +.idea/ + +# vscode ide stuff +*.code-workspace +.history +.vscode/* +!.vscode/*.template diff --git a/server/.gitmodules b/server/.gitmodules new file mode 100644 index 0000000..e69de29 diff --git a/server/.mailmap b/server/.mailmap new file mode 100644 index 0000000..bd6544e --- /dev/null +++ b/server/.mailmap @@ -0,0 +1,149 @@ +A. J. Holyoake ajholyoake +Aaron Culich Aaron Culich +Aron Ahmadia ahmadia +Benjamin Ragan-Kelley +Benjamin Ragan-Kelley Min RK +Benjamin Ragan-Kelley MinRK +Barry Wark Barry Wark +Ben Edwards Ben Edwards +Bradley M. Froehle Bradley M. Froehle +Bradley M. Froehle Bradley Froehle +Brandon Parsons Brandon Parsons +Brian E. Granger Brian Granger +Brian E. Granger Brian Granger <> +Brian E. Granger bgranger <> +Brian E. Granger bgranger +Christoph Gohlke cgohlke +Cyrille Rossant rossant +Damián Avila damianavila +Damián Avila damianavila +Damon Allen damontallen +Darren Dale darren.dale <> +Darren Dale Darren Dale <> +Dav Clark Dav Clark <> +Dav Clark Dav Clark +David Hirschfeld dhirschfeld +David P. Sanders David P. Sanders +David Warde-Farley David Warde-Farley <> +Doug Blank Doug Blank +Eugene Van den Bulke Eugene Van den Bulke +Evan Patterson +Evan Patterson +Evan Patterson +Evan Patterson +Evan Patterson epatters +Evan Patterson epatters +Ernie French Ernie French +Ernie French ernie french +Ernie French ernop +Fernando Perez +Fernando Perez Fernando Perez +Fernando Perez fperez <> +Fernando Perez fptest <> +Fernando Perez fptest1 <> +Fernando Perez Fernando Perez +Fernando Perez Fernando Perez <> +Fernando Perez Fernando Perez +Frank Murphy Frank Murphy +Gabriel Becker gmbecker +Gael Varoquaux gael.varoquaux <> +Gael Varoquaux gvaroquaux +Gael Varoquaux Gael Varoquaux <> +Ingolf Becker watercrossing +Jake Vanderplas Jake Vanderplas +Jakob Gager jakobgager +Jakob Gager jakobgager +Jakob Gager jakobgager +Jason Grout +Jason Grout +Jason Gors jason gors +Jason Gors jgors +Jens Hedegaard Nielsen Jens Hedegaard Nielsen +Jens Hedegaard Nielsen Jens H Nielsen +Jens Hedegaard Nielsen Jens H. Nielsen +Jez Ng Jez Ng +Jonathan Frederic Jonathan Frederic +Jonathan Frederic Jonathan Frederic +Jonathan Frederic Jonathan Frederic +Jonathan Frederic jon +Jonathan Frederic U-Jon-PC\Jon +Jonathan March Jonathan March +Jonathan March jdmarch +Jörgen Stenarson Jörgen Stenarson +Jörgen Stenarson Jorgen Stenarson +Jörgen Stenarson Jorgen Stenarson <> +Jörgen Stenarson jstenar +Jörgen Stenarson jstenar <> +Jörgen Stenarson Jörgen Stenarson +Juergen Hasch juhasch +Juergen Hasch juhasch +Julia Evans Julia Evans +Kester Tong KesterTong +Kyle Kelley Kyle Kelley +Kyle Kelley rgbkrk +Laurent Dufréchou +Laurent Dufréchou +Laurent Dufréchou laurent dufrechou <> +Laurent Dufréchou laurent.dufrechou <> +Laurent Dufréchou Laurent Dufrechou <> +Laurent Dufréchou laurent.dufrechou@gmail.com <> +Laurent Dufréchou ldufrechou +Lorena Pantano Lorena +Luis Pedro Coelho Luis Pedro Coelho +Marc Molla marcmolla +Martín Gaitán Martín Gaitán +Matthias Bussonnier Matthias BUSSONNIER +Matthias Bussonnier Bussonnier Matthias +Matthias Bussonnier Matthias BUSSONNIER +Matthias Bussonnier Matthias Bussonnier +Michael Droettboom Michael Droettboom +Nicholas Bollweg Nicholas Bollweg (Nick) +Nicolas Rougier +Nikolay Koldunov Nikolay Koldunov +Omar Andrés Zapata Mesa Omar Andres Zapata Mesa +Omar Andrés Zapata Mesa Omar Andres Zapata Mesa +Pankaj Pandey Pankaj Pandey +Pascal Schetelat pascal-schetelat +Paul Ivanov Paul Ivanov +Pauli Virtanen Pauli Virtanen <> +Pauli Virtanen Pauli Virtanen +Pierre Gerold Pierre Gerold +Pietro Berkes Pietro Berkes +Piti Ongmongkolkul piti118 +Prabhu Ramachandran Prabhu Ramachandran <> +Puneeth Chaganti Puneeth Chaganti +Robert Kern rkern <> +Robert Kern Robert Kern +Robert Kern Robert Kern +Robert Kern Robert Kern <> +Robert Marchman Robert Marchman +Satrajit Ghosh Satrajit Ghosh +Satrajit Ghosh Satrajit Ghosh +Scott Sanderson Scott Sanderson +smithj1 smithj1 +smithj1 smithj1 +Steven Johnson stevenJohnson +Steven Silvester blink1073 +S. Weber s8weber +Stefan van der Walt Stefan van der Walt +Silvia Vinyes Silvia +Silvia Vinyes silviav12 +Sylvain Corlay +Sylvain Corlay sylvain.corlay +Ted Drain TD22057 +Théophile Studer Théophile Studer +Thomas Kluyver Thomas +Thomas Spura Thomas Spura +Timo Paulssen timo +vds vds2212 +vds vds +Ville M. Vainio +Ville M. Vainio ville +Ville M. Vainio ville +Ville M. Vainio vivainio <> +Ville M. Vainio Ville M. Vainio +Ville M. Vainio Ville M. Vainio +Walter Doerwald walter.doerwald <> +Walter Doerwald Walter Doerwald <> +W. Trevor King W. Trevor King +Yoval P. y-p diff --git a/server/.pre-commit-config.yaml b/server/.pre-commit-config.yaml new file mode 100644 index 0000000..b221b5b --- /dev/null +++ b/server/.pre-commit-config.yaml @@ -0,0 +1,49 @@ +repos: + - repo: https://github.com/asottile/reorder_python_imports + rev: v2.7.1 + hooks: + - id: reorder-python-imports + + - repo: https://github.com/psf/black + rev: 22.1.0 + hooks: + - id: black + args: ["--line-length", "100"] + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.5.1 + hooks: + - id: prettier + + - repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: end-of-file-fixer + - id: check-case-conflict + - id: check-executables-have-shebangs + - id: requirements-txt-fixer + - id: check-added-large-files + - id: check-case-conflict + - id: check-toml + - id: check-yaml + - id: debug-statements + - id: forbid-new-submodules + + - repo: https://github.com/pre-commit/mirrors-eslint + rev: v8.8.0 + hooks: + - id: eslint + + - repo: https://github.com/sirosen/check-jsonschema + rev: 0.10.2 + hooks: + - id: check-jsonschema + name: "Check GitHub Workflows" + files: ^\.github/workflows/ + types: [yaml] + args: ["--schemafile", "https://json.schemastore.org/github-workflow"] diff --git a/server/.prettierignore b/server/.prettierignore new file mode 100644 index 0000000..b0e34ed --- /dev/null +++ b/server/.prettierignore @@ -0,0 +1 @@ +**/templates/*.html diff --git a/server/CHANGELOG.md b/server/CHANGELOG.md new file mode 100644 index 0000000..e69de29 diff --git a/server/CONTRIBUTING.rst b/server/CONTRIBUTING.rst new file mode 100644 index 0000000..e69de29 diff --git a/server/COPYING.md b/server/COPYING.md new file mode 100644 index 0000000..e69de29 diff --git a/server/MANIFEST.in b/server/MANIFEST.in new file mode 100644 index 0000000..70ccbaa --- /dev/null +++ b/server/MANIFEST.in @@ -0,0 +1,32 @@ +include COPYING.md +include CONTRIBUTING.rst +include README.md +include RELEASE.md +include CHANGELOG.md +include setupbase.py +include package.json + +# include everything in package_data +recursive-include jupyter_server * + +# Documentation +graft docs +exclude docs/\#* + +# Examples +graft examples + +# docs subdirs we want to skip +prune docs/build +prune docs/gh-pages +prune docs/dist + +# Patterns to exclude from any directory +global-exclude *~ +global-exclude *.pyc +global-exclude *.pyo +prune .git +prune **/.ipynb_checkpoints +prune **/.pytest_cache +prune **/.coverage +prune **/.pytest_cache diff --git a/server/README.md b/server/README.md new file mode 100644 index 0000000..4d4f824 --- /dev/null +++ b/server/README.md @@ -0,0 +1,50 @@ + +# ElixirNote Server + +[![Build Status](https://github.com/jupyter/jupyter_server/workflows/CI/badge.svg?query=branch%3Amain++)](https://github.com/jupyter-server/jupyter_server/actions?query=branch%3Amain++) +[![Documentation Status](https://readthedocs.org/projects/jupyter-server/badge/?version=latest)](http://jupyter-server.readthedocs.io/en/latest/?badge=latest) + +The Jupyter Server provides the backend (i.e. the core services, APIs, and REST endpoints) for Jupyter web applications like Jupyter notebook, JupyterLab, and Voila. + +For more information, read our [documentation here](http://jupyter-server.readthedocs.io/en/latest/?badge=latest). + +## Installation and Basic usage + +To install the latest release locally, make sure you have +[pip installed](https://pip.readthedocs.io/en/stable/installing/) and run: + + pip install jupyter_server + +Jupyter Server currently supports Python>=3.6 on Linux, OSX and Windows. + +### Versioning and Branches + +If Jupyter Server is a dependency of your project/application, it is important that you pin it to a version that works for your application. Currently, Jupyter Server only has minor and patch versions. Different minor versions likely include API-changes while patch versions do not change API. + +When a new minor version is released on PyPI, a branch for that version will be created in this repository, and the version of the main branch will be bumped to the next minor version number. That way, the main branch always reflects the latest un-released version. + +To see the changes between releases, checkout the [CHANGELOG](https://github.com/jupyter/jupyter_server/blob/main/CHANGELOG.md). + +## Usage - Running Jupyter Server + +### Running in a local installation + +Launch with: + + jupyter server + +### Testing + +See [CONTRIBUTING](https://github.com/jupyter-server/jupyter_server/blob/main/CONTRIBUTING.rst#running-tests). + +## Contributing + +If you are interested in contributing to the project, see [`CONTRIBUTING.rst`](CONTRIBUTING.rst). + +## Team Meetings and Roadmap + +- When: Thursdays [8:00am, Pacific time](https://www.thetimezoneconverter.com/?t=8%3A00%20am&tz=San%20Francisco&) +- Where: [Jovyan Zoom](https://zoom.us/my/jovyan?pwd=c0JZTHlNdS9Sek9vdzR3aTJ4SzFTQT09) +- What: [Meeting notes](https://github.com/jupyter-server/team-compass/issues/4) + +See our tentative [roadmap here](https://github.com/jupyter/jupyter_server/issues/127). diff --git a/server/RELEASE.md b/server/RELEASE.md new file mode 100644 index 0000000..a785a28 --- /dev/null +++ b/server/RELEASE.md @@ -0,0 +1,48 @@ +# Making a Jupyter Server Release + +## Using `jupyter_releaser` + +The recommended way to make a release is to use [`jupyter_releaser`](https://github.com/jupyter-server/jupyter_releaser#checklist-for-adoption). + +## Manual Release + +To create a manual release, perform the following steps: + +### Set up + +```bash +pip install tbump twine build +git pull origin $(git branch --show-current) +git clean -dffx +``` + +### Update the version and apply the tag + +```bash +echo "Enter new version" +read script_version +tbump ${script_version} +``` + +### Build the artifacts + +```bash +rm -rf dist +python -m build . +``` + +### Update the version back to dev + +```bash +echo "Enter dev version" +read dev_version +tbump ${dev_version} --no-tag +git push origin $(git branch --show-current) +``` + +### Publish the artifacts to pypi + +```bash +twine check dist/* +twine upload dist/* +``` diff --git a/server/codecov.yml b/server/codecov.yml new file mode 100644 index 0000000..eb9b9df --- /dev/null +++ b/server/codecov.yml @@ -0,0 +1,9 @@ +coverage: + status: + project: + default: + target: auto + threshold: 10 + patch: + default: + target: 0% diff --git a/server/docs/Makefile b/server/docs/Makefile new file mode 100644 index 0000000..02cb617 --- /dev/null +++ b/server/docs/Makefile @@ -0,0 +1,203 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage spelling gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + @echo " spelling to spell check the documentation" + +clean: + rm -rf $(BUILDDIR)/* + rm -rf source/config.rst + +html: source/config.rst + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +source/config.rst: + python3 autogen_config.py + @echo "Created docs for config options" + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/JupyterNotebook.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/JupyterNotebook.qhc" + +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/JupyterNotebook" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/JupyterNotebook" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +spelling: + $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling + @echo "Spell check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/spelling/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/server/docs/README.md b/server/docs/README.md new file mode 100644 index 0000000..e1c6ebf --- /dev/null +++ b/server/docs/README.md @@ -0,0 +1,3 @@ +# Jupyter Server Docs Sources + +Read [this page](https://jupyter-server.readthedocs.io/en/latest/contributors/contributing.html#building-the-docs) to build the docs. diff --git a/server/docs/autogen_config.py b/server/docs/autogen_config.py new file mode 100644 index 0000000..2892dfe --- /dev/null +++ b/server/docs/autogen_config.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +import os + +from jupyter_server.serverapp import ServerApp + +header = """\ +.. _other-full-config: + + +Config file and command line options +==================================== + +The Jupyter Server can be run with a variety of command line arguments. +A list of available options can be found below in the :ref:`options section +`. + +Defaults for these options can also be set by creating a file named +``jupyter_server_config.py`` in your Jupyter folder. The Jupyter +folder is in your home directory, ``~/.jupyter``. + +To create a ``jupyter_server_config.py`` file, with all the defaults +commented out, you can use the following command line:: + + $ jupyter server --generate-config + + +.. _options: + +Options +------- + +This list of options can be generated by running the following and hitting +enter:: + + $ jupyter server --help-all + +""" +try: + destination = os.path.join(os.path.dirname(__file__), "source/other/full-config.rst") +except: + destination = os.path.join(os.getcwd(), "full-config.rst") + +with open(destination, "w") as f: + f.write(header) + f.write(ServerApp().document_config_options()) diff --git a/server/docs/doc-requirements.txt b/server/docs/doc-requirements.txt new file mode 100644 index 0000000..4bf44e1 --- /dev/null +++ b/server/docs/doc-requirements.txt @@ -0,0 +1,12 @@ +ipykernel +jinja2 +jupyter_client +myst-parser +nbformat +prometheus_client +pydata_sphinx_theme +Send2Trash +sphinxcontrib-openapi +sphinxcontrib_github_alt +sphinxemoji +tornado diff --git a/server/docs/environment.yml b/server/docs/environment.yml new file mode 100644 index 0000000..dda5834 --- /dev/null +++ b/server/docs/environment.yml @@ -0,0 +1,7 @@ +name: jupyter_server_docs +dependencies: + - nodejs + - python + - pip + - pip: + - -r doc-requirements.txt diff --git a/server/docs/make.bat b/server/docs/make.bat new file mode 100644 index 0000000..19c5b04 --- /dev/null +++ b/server/docs/make.bat @@ -0,0 +1,263 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + echo. coverage to run coverage check of the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +REM Check if sphinx-build is available and fallback to Python version if any +%SPHINXBUILD% 2> nul +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +:sphinx_ok + + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\JupyterNotebook.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\JupyterNotebook.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "coverage" ( + %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage + if errorlevel 1 exit /b 1 + echo. + echo.Testing of coverage in the sources finished, look at the ^ +results in %BUILDDIR%/coverage/python.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/server/docs/source/_static/.gitkeep b/server/docs/source/_static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/server/docs/source/_static/jupyter_server_logo.svg b/server/docs/source/_static/jupyter_server_logo.svg new file mode 100644 index 0000000..586c16b --- /dev/null +++ b/server/docs/source/_static/jupyter_server_logo.svg @@ -0,0 +1,191 @@ + + + + + + image/svg+xml + + logo.svg + + + + logo.svg + Created using Figma 0.90 + + + + + + + + + + + + + + + + + + + + + + + + + + + server + diff --git a/server/docs/source/conf.py b/server/docs/source/conf.py new file mode 100644 index 0000000..23369d4 --- /dev/null +++ b/server/docs/source/conf.py @@ -0,0 +1,387 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Jupyter Server documentation build configuration file, created by +# sphinx-quickstart on Mon Apr 13 09:51:11 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. +import os +import os.path as osp +import shutil +import sys + +from pkg_resources import parse_version + + +HERE = osp.abspath(osp.dirname(__file__)) + + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +# DEBUG for RTD +print("DEBUG:: sys.path") +print("================") +for item in sys.path: + print(item) + +# add repo root to sys.path +# here = root/docs/source +here = os.path.abspath(os.path.dirname(__file__)) +repo_root = os.path.dirname(os.path.dirname(here)) +sys.path.insert(0, repo_root) + +print("repo_root") +print("=====================") +print(repo_root) + +# DEBUG for post insert on RTD +print("DEBUG:: Post insert to sys.path") +print("===============================") +for item in sys.path: + print(item) + +# Check if docs are being built by ReadTheDocs +# If so, generate a config.rst file and populate it with documentation about +# configuration options + +if os.environ.get("READTHEDOCS", ""): + + # Readthedocs doesn't run our Makefile, so we do this to force it to generate + # the config docs. + with open("../autogen_config.py") as f: + exec(compile(f.read(), "../autogen_config.py", "exec"), {}) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "myst_parser", + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.autosummary", + "sphinx.ext.mathjax", + "IPython.sphinxext.ipython_console_highlighting", + "sphinxcontrib_github_alt", + "sphinxcontrib.openapi", + "sphinxemoji.sphinxemoji", +] + +myst_enable_extensions = ["html_image"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".ipynb"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "Jupyter Server" +copyright = "2020, Jupyter Team, https://jupyter.org" +author = "The Jupyter Team" + +# ghissue config +github_project_url = "https://github.com/jupyter/jupyter_server" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +__version__ = "1.14.0.dev0" +# The short X.Y version. +version_parsed = parse_version(__version__) +version = f"{version_parsed.major}.{version_parsed.minor}" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "default" +# highlight_language = 'python3' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + +# # Add custom note for each doc page + +# rst_prolog = "" + +# rst_prolog += """ +# .. important:: +# This documentation covers Jupyter Server, an **early developer preview**, +# and is not suitable for general usage yet. Features and implementation are +# subject to change. + +# For production use cases, please use the stable notebook server in the +# `Jupyter Notebook repo `_ +# and `Jupyter Notebook documentation `_. +# """ + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# html_theme = 'sphinx_rtd_theme' +html_theme = "pydata_sphinx_theme" +html_logo = "_static/jupyter_server_logo.svg" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# NOTE: Sphinx's 'make html' builder will throw a warning about an unfound +# _static directory. Do not remove or comment out html_static_path +# since it is needed to properly generate _static in the build directory +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "JupyterServerdoc" + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "JupyterServer.tex", + "Jupyter Server Documentation", + "https://jupyter.org", + "manual", + ), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, "jupyterserver", "Jupyter Server Documentation", [author], 1)] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for link checks ---------------------------------------------- + +linkcheck_ignore = [r"http://127\.0\.0\.1/*"] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "JupyterServer", + "Jupyter Server Documentation", + author, + "JupyterServer", + "One line description of project.", + "Miscellaneous", + ), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + +intersphinx_mapping = { + "ipython": ("https://ipython.readthedocs.io/en/stable/", None), + "nbconvert": ("https://nbconvert.readthedocs.io/en/latest/", None), + "nbformat": ("https://nbformat.readthedocs.io/en/latest/", None), + "jupyter": ("https://jupyter.readthedocs.io/en/latest/", None), +} + +spelling_lang = "en_US" +spelling_word_list_filename = "spelling_wordlist.txt" + +# import before any doc is built, so _ is guaranteed to be injected +import jupyter_server.transutils # noqa: F401 + + +def setup(app): + dest = osp.join(HERE, "other", "changelog.md") + shutil.copy(osp.join(HERE, "..", "..", "CHANGELOG.md"), dest) diff --git a/server/docs/source/contributors/contributing.rst b/server/docs/source/contributors/contributing.rst new file mode 100644 index 0000000..f7bef4b --- /dev/null +++ b/server/docs/source/contributors/contributing.rst @@ -0,0 +1,3 @@ +.. highlight:: sh + +.. include:: ../../../CONTRIBUTING.rst diff --git a/server/docs/source/contributors/index.rst b/server/docs/source/contributors/index.rst new file mode 100644 index 0000000..0007656 --- /dev/null +++ b/server/docs/source/contributors/index.rst @@ -0,0 +1,12 @@ +Documentation for Contributors +------------------------------ + +These pages target people who are interested in contributing directly to the Jupyter Server Project. + +.. toctree:: + :caption: Contributors + :maxdepth: 1 + :name: contributors + + team-meetings + contributing diff --git a/server/docs/source/contributors/team-meetings.rst b/server/docs/source/contributors/team-meetings.rst new file mode 100644 index 0000000..947142f --- /dev/null +++ b/server/docs/source/contributors/team-meetings.rst @@ -0,0 +1,21 @@ +.. _contributors-team-meetings-roadmap-calendar: + +Team Meetings, Road Map and Calendar +==================================== + +Many of the lead Jupyter Server developers meet weekly over Zoom. These meetings are open to everyone. + +To see when the next meeting is happening, watch this Github issue: + +https://github.com/jupyter/jupyter_server/issues/126 + +Also check out Jupyter Server's roadmap where we track future plans for Jupyter Server: + +https://github.com/jupyter/jupyter_server/issues/127 + + +Jupyter Calendar: + +.. raw:: html + + diff --git a/server/docs/source/developers/contents.rst b/server/docs/source/developers/contents.rst new file mode 100644 index 0000000..28d2a33 --- /dev/null +++ b/server/docs/source/developers/contents.rst @@ -0,0 +1,289 @@ +.. _contents_api: + +Contents API +============ + +.. currentmodule:: jupyter_server.services.contents + +The Jupyter Notebook web application provides a graphical interface for +creating, opening, renaming, and deleting files in a virtual filesystem. + +The :class:`~manager.ContentsManager` class defines an abstract +API for translating these interactions into operations on a particular storage +medium. The default implementation, +:class:`~filemanager.FileContentsManager`, uses the local +filesystem of the server for storage and straightforwardly serializes notebooks +into JSON. Users can override these behaviors by supplying custom subclasses +of ContentsManager. + +This section describes the interface implemented by ContentsManager subclasses. +We refer to this interface as the **Contents API**. + +Data Model +---------- + +.. currentmodule:: jupyter_server.services.contents.manager + +Filesystem Entities +~~~~~~~~~~~~~~~~~~~ +.. _notebook models: + +ContentsManager methods represent virtual filesystem entities as dictionaries, +which we refer to as **models**. + +Models may contain the following entries: + ++--------------------+-----------+------------------------------+ +| Key | Type |Info | ++====================+===========+==============================+ +|**name** |unicode |Basename of the entity. | ++--------------------+-----------+------------------------------+ +|**path** |unicode |Full | +| | |(:ref:`API-style`) | +| | |path to the entity. | ++--------------------+-----------+------------------------------+ +|**type** |unicode |The entity type. One of | +| | |``"notebook"``, ``"file"`` or | +| | |``"directory"``. | ++--------------------+-----------+------------------------------+ +|**created** |datetime |Creation date of the entity. | ++--------------------+-----------+------------------------------+ +|**last_modified** |datetime |Last modified date of the | +| | |entity. | ++--------------------+-----------+------------------------------+ +|**content** |variable |The "content" of the entity. | +| | |(:ref:`See | +| | |Below`) | ++--------------------+-----------+------------------------------+ +|**mimetype** |unicode or |The mimetype of ``content``, | +| |``None`` |if any. (:ref:`See | +| | |Below`) | ++--------------------+-----------+------------------------------+ +|**format** |unicode or |The format of ``content``, | +| |``None`` |if any. (:ref:`See | +| | |Below`) | ++--------------------+-----------+------------------------------+ + +.. _modelcontent: + +Certain model fields vary in structure depending on the ``type`` field of the +model. There are three model types: **notebook**, **file**, and **directory**. + +- ``notebook`` models + - The ``format`` field is always ``"json"``. + - The ``mimetype`` field is always ``None``. + - The ``content`` field contains a + :class:`nbformat.notebooknode.NotebookNode` representing the .ipynb file + represented by the model. See the `NBFormat`_ documentation for a full + description. + +- ``file`` models + - The ``format`` field is either ``"text"`` or ``"base64"``. + - The ``mimetype`` field is ``text/plain`` for text-format models and + ``application/octet-stream`` for base64-format models. + - The ``content`` field is always of type ``unicode``. For text-format + file models, ``content`` simply contains the file's bytes after decoding + as UTF-8. Non-text (``base64``) files are read as bytes, base64 encoded, + and then decoded as UTF-8. + +- ``directory`` models + - The ``format`` field is always ``"json"``. + - The ``mimetype`` field is always ``None``. + - The ``content`` field contains a list of :ref:`content-free` + models representing the entities in the directory. + +.. note:: + + .. _contentfree: + + In certain circumstances, we don't need the full content of an entity to + complete a Contents API request. In such cases, we omit the ``mimetype``, + ``content``, and ``format`` keys from the model. This most commonly occurs + when listing a directory, in which circumstance we represent files within + the directory as content-less models to avoid having to recursively traverse + and serialize the entire filesystem. + +**Sample Models** + +.. code-block:: python + + # Notebook Model with Content + { + 'content': { + 'metadata': {}, + 'nbformat': 4, + 'nbformat_minor': 0, + 'cells': [ + { + 'cell_type': 'markdown', + 'metadata': {}, + 'source': 'Some **Markdown**', + }, + ], + }, + 'created': datetime(2015, 7, 25, 19, 50, 19, 19865), + 'format': 'json', + 'last_modified': datetime(2015, 7, 25, 19, 50, 19, 19865), + 'mimetype': None, + 'name': 'a.ipynb', + 'path': 'foo/a.ipynb', + 'type': 'notebook', + 'writable': True, + } + + # Notebook Model without Content + { + 'content': None, + 'created': datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), + 'format': None, + 'last_modified': datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), + 'mimetype': None, + 'name': 'a.ipynb', + 'path': 'foo/a.ipynb', + 'type': 'notebook', + 'writable': True + } + + +API Paths +~~~~~~~~~ +.. _apipaths: + +ContentsManager methods represent the locations of filesystem resources as +**API-style paths**. Such paths are interpreted as relative to the root +directory of the notebook server. For compatibility across systems, the +following guarantees are made: + +* Paths are always ``unicode``, not ``bytes``. +* Paths are not URL-escaped. +* Paths are always forward-slash (/) delimited, even on Windows. +* Leading and trailing slashes are stripped. For example, ``/foo/bar/buzz/`` + becomes ``foo/bar/buzz``. +* The empty string (``""``) represents the root directory. + + +Writing a Custom ContentsManager +-------------------------------- + +The default ContentsManager is designed for users running the notebook as an +application on a personal computer. It stores notebooks as .ipynb files on the +local filesystem, and it maps files and directories in the Notebook UI to files +and directories on disk. It is possible to override how notebooks are stored +by implementing your own custom subclass of ``ContentsManager``. For example, +if you deploy the notebook in a context where you don't trust or don't have +access to the filesystem of the notebook server, it's possible to write your +own ContentsManager that stores notebooks and files in a database. + + +Required Methods +~~~~~~~~~~~~~~~~ + +A minimal complete implementation of a custom +:class:`~manager.ContentsManager` must implement the following +methods: + +.. autosummary:: + ContentsManager.get + ContentsManager.save + ContentsManager.delete_file + ContentsManager.rename_file + ContentsManager.file_exists + ContentsManager.dir_exists + ContentsManager.is_hidden + +You may be required to specify a Checkpoints object, as the default one, +``FileCheckpoints``, could be incompatible with your custom +ContentsManager. + +Customizing Checkpoints +----------------------- +.. currentmodule:: jupyter_server.services.contents.checkpoints + +Customized Checkpoint definitions allows behavior to be +altered and extended. + +The ``Checkpoints`` and ``GenericCheckpointsMixin`` classes +(from :mod:`jupyter_server.services.contents.checkpoints`) +have reusable code and are intended to be used together, +but require the following methods to be implemented. + +.. autosummary:: + Checkpoints.rename_checkpoint + Checkpoints.list_checkpoints + Checkpoints.delete_checkpoint + GenericCheckpointsMixin.create_file_checkpoint + GenericCheckpointsMixin.create_notebook_checkpoint + GenericCheckpointsMixin.get_file_checkpoint + GenericCheckpointsMixin.get_notebook_checkpoint + +No-op example +~~~~~~~~~~~~~ + +Here is an example of a no-op checkpoints object - note the mixin +comes first. The docstrings indicate what each method should do or +return for a more complete implementation. + +.. code-block:: python + + class NoOpCheckpoints(GenericCheckpointsMixin, Checkpoints): + """requires the following methods:""" + def create_file_checkpoint(self, content, format, path): + """ -> checkpoint model""" + def create_notebook_checkpoint(self, nb, path): + """ -> checkpoint model""" + def get_file_checkpoint(self, checkpoint_id, path): + """ -> {'type': 'file', 'content': , 'format': {'text', 'base64'}}""" + def get_notebook_checkpoint(self, checkpoint_id, path): + """ -> {'type': 'notebook', 'content': }""" + def delete_checkpoint(self, checkpoint_id, path): + """deletes a checkpoint for a file""" + def list_checkpoints(self, path): + """returns a list of checkpoint models for a given file, + default just does one per file + """ + return [] + def rename_checkpoint(self, checkpoint_id, old_path, new_path): + """renames checkpoint from old path to new path""" + +See ``GenericFileCheckpoints`` in :mod:`notebook.services.contents.filecheckpoints` +for a more complete example. + +Testing +------- +.. currentmodule:: jupyter_server.services.contents.tests + +:mod:`jupyter_server.services.contents.tests` includes several test suites written +against the abstract Contents API. This means that an excellent way to test a +new ContentsManager subclass is to subclass our tests to make them use your +ContentsManager. + +.. note:: + + PGContents_ is an example of a complete implementation of a custom + ``ContentsManager``. It stores notebooks and files in PostgreSQL_ and encodes + directories as SQL relations. PGContents also provides an example of how to + re-use the notebook's tests. + +.. _NBFormat: https://nbformat.readthedocs.io/en/latest/index.html +.. _PGContents: https://github.com/quantopian/pgcontents +.. _PostgreSQL: https://www.postgresql.org/ + +Asynchronous Support +-------------------- + +An asynchronous version of the Contents API is available to run slow IO processes concurrently. + +- :class:`~manager.AsyncContentsManager` +- :class:`~filemanager.AsyncFileContentsManager` +- :class:`~largefilemanager.AsyncLargeFileManager` +- :class:`~checkpoints.AsyncCheckpoints` +- :class:`~checkpoints.AsyncGenericCheckpointsMixin` + +.. note:: + + .. _asynccontents: + + In most cases, the non-asynchronous Contents API is performant for local filesystems. + However, if the Jupyter Notebook web application is interacting with a high-latent virtual filesystem, you may see performance gains by using the asynchronous version. + For example, if you're experiencing terminal lag in the web application due to the slow and blocking file operations, the asynchronous version can reduce the lag. + Before opting in, comparing both non-async and async options' performances is recommended. diff --git a/server/docs/source/developers/dependency.rst b/server/docs/source/developers/dependency.rst new file mode 100644 index 0000000..feaf90b --- /dev/null +++ b/server/docs/source/developers/dependency.rst @@ -0,0 +1,19 @@ +Depending on Jupyter Server +=========================== + +If your project depends directly on Jupyter Server, be sure to watch Jupyter Server's ChangeLog and pin your project to a version that works for your application. Major releases represent possible backwards-compatibility breaking API changes or features. + +When a new major version in released on PyPI, a branch for that version will be created in this repository, and the version of the master branch will be bumped to the next major version number. That way, the master branch always reflects the latest un-released version. + +To install the latest patch of a given version: + +.. code-block:: console + + > pip install jupyter_server --upgrade + + +To pin your jupyter_server install to a specific version: + +.. code-block:: console + + > pip install jupyter_server==1.0.0 diff --git a/server/docs/source/developers/extensions.rst b/server/docs/source/developers/extensions.rst new file mode 100644 index 0000000..378f135 --- /dev/null +++ b/server/docs/source/developers/extensions.rst @@ -0,0 +1,553 @@ +================= +Server Extensions +================= + +A Jupyter Server extension is typically a module or package that extends to Server’s REST API/endpoints—i.e. adds extra request handlers to Server’s Tornado Web Application. + +You can check some simple examples on the `examples folder +`_ in the GitHub jupyter_server repository. + +Authoring a basic server extension +================================== + +The simplest way to write a Jupyter Server extension is to write an extension module with a ``_load_jupyter_server_extension`` function. This function should take a single argument, an instance of the ``ServerApp``. + + +.. code-block:: python + + def _load_jupyter_server_extension(serverapp: jupyter_server.serverapp.ServerApp): + """ + This function is called when the extension is loaded. + """ + pass + + +Adding extension endpoints +-------------------------- + +The easiest way to add endpoints and handle incoming requests is to subclass the ``JupyterHandler`` (which itself is a subclass of Tornado's ``RequestHandler``). + +.. code-block:: python + + from jupyter_server.base.handlers import JupyterHandler + import tornado + + class MyExtensionHandler(JupyterHandler): + + @tornado.web.authenticated + def get(self): + ... + + @tornado.web.authenticated + def post(self): + ... + +.. note:: + It is best practice to wrap each handler method with the ``authenticated`` decorator to ensure that each request is authenticated by the server. + +Then add this handler to Jupyter Server's Web Application through the ``_load_jupyter_server_extension`` function. + +.. code-block:: python + + def _load_jupyter_server_extension(serverapp: jupyter_server.serverapp.ServerApp): + """ + This function is called when the extension is loaded. + """ + handlers = [ + ('/myextension/hello', MyExtensionHandler) + ] + serverapp.web_app.add_handlers('.*$', handlers) + + +Making an extension discoverable +-------------------------------- + +To make this extension discoverable to Jupyter Server, first define a ``_jupyter_server_extension_points()`` function at the root of the module/package. This function returns metadata describing how to load the extension. Usually, this requires a ``module`` key with the import path to the extension's ``_load_jupyter_server_extension`` function. + +.. code-block:: python + + def _jupyter_server_extension_points(): + """ + Returns a list of dictionaries with metadata describing + where to find the `_load_jupyter_server_extension` function. + """ + return [ + { + "module": "my_extension" + } + ] + +Second, add the extension to the ServerApp's ``jpserver_extensions`` trait. This can be manually added by users in their ``jupyter_server_config.py`` file, + +.. code-block:: python + + c.ServerApp.jpserver_extensions = { + "my_extension": True + } + +or loaded from a JSON file in the ``jupyter_server_config.d`` directory under one of `Jupyter's paths`_. (See the `Distributing a server extension`_ section for details on how to automatically enabled your extension when users install it.) + +.. code-block:: python + + { + "ServerApp": { + "jpserver_extensions": { + "my_extension": true + } + } + } + + +Authoring a configurable extension application +============================================== + +Some extensions are full-fledged client applications that sit on top of the Jupyter Server. For example, `JupyterLab `_ is a server extension. It can be launched from the command line, configured by CLI or config files, and serves+loads static assets behind the server (i.e. html templates, Javascript, etc.) + +Jupyter Server offers a convenient base class, ``ExtensionsApp``, that handles most of the boilerplate code for building such extensions. + +Anatomy of an ``ExtensionApp`` +------------------------------ + +An ExtensionApp: + + - has traits. + - is configurable (from file or CLI) + - has a name (see the ``name`` trait). + - has an entrypoint, ``jupyter ``. + - can serve static content from the ``/static//`` endpoint. + - can add new endpoints to the Jupyter Server. + +The basic structure of an ExtensionApp is shown below: + +.. code-block:: python + + from jupyter_server.extension.application import ExtensionApp + + + class MyExtensionApp(ExtensionApp): + + # -------------- Required traits -------------- + name = "myextension" + default_url = "/myextension" + load_other_extensions = True + file_url_prefix = "/render" + + # --- ExtensionApp traits you can configure --- + static_paths = [...] + template_paths = [...] + settings = {...} + handlers = [...] + + # ----------- add custom traits below --------- + ... + + def initialize_settings(self): + ... + # Update the self.settings trait to pass extra + # settings to the underlying Tornado Web Application. + self.settings.update({'':...}) + + def initialize_handlers(self): + ... + # Extend the self.handlers trait + self.handlers.extend(...) + + def initialize_templates(self): + ... + # Change the jinja templating environment + + async def stop_extension(self): + ... + # Perform any required shut down steps + + +The ``ExtensionApp`` uses the following methods and properties to connect your extension to the Jupyter server. You do not need to define a ``_load_jupyter_server_extension`` function for these apps. Instead, overwrite the pieces below to add your custom settings, handlers and templates: + +Methods + +* ``initialize_settings()``: adds custom settings to the Tornado Web Application. +* ``initialize_handlers()``: appends handlers to the Tornado Web Application. +* ``initialize_templates()``: initialize the templating engine (e.g. jinja2) for your frontend. +* ``stop_extension()``: called on server shut down. + +Properties + +* ``name``: the name of the extension +* ``default_url``: the default URL for this extension—i.e. the landing page for this extension when launched from the CLI. +* ``load_other_extensions``: a boolean enabling/disabling other extensions when launching this extension directly. +* ``file_url_prefix``: the prefix URL added when opening a document directly from the command line. For example, classic Notebook uses ``/notebooks`` to open a document at http://localhost:8888/notebooks/path/to/notebook.ipynb. + +``ExtensionApp`` request handlers +--------------------------------- + +``ExtensionApp`` Request Handlers have a few extra properties. + +* ``config``: the ExtensionApp's config object. +* ``server_config``: the ServerApp's config object. +* ``name``: the name of the extension to which this handler is linked. +* ``static_url()``: a method that returns the url to static files (prefixed with ``/static/``). + +Jupyter Server provides a convenient mixin class for adding these properties to any ``JupyterHandler``. For example, the basic server extension handler in the section above becomes: + +.. code-block:: python + + from jupyter_server.base.handlers import JupyterHandler + from jupyter_server.extension.handler import ExtensionHandlerMixin + import tornado + + + class MyExtensionHandler(ExtensionHandlerMixin, JupyterHandler): + + @tornado.web.authenticated + def get(self): + ... + + @tornado.web.authenticated + def post(self): + ... + + +Jinja templating from frontend extensions +----------------------------------------- + +Many Jupyter frontend applications use Jinja for basic HTML templating. Since this is common enough, Jupyter Server provides some extra mixin that integrate Jinja with Jupyter server extensions. + +Use ``ExtensionAppJinjaMixin`` to automatically add a Jinja templating environment to an ``ExtensionApp``. This adds a ``_jinja2_env`` setting to Tornado Web Server's settings that will be used by request handlers. + +.. code-block:: python + + + from jupyter_server.extension.application import ExtensionApp, ExtensionAppJinjaMixin + + + class MyExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): + ... + + +Pair the example above with ``ExtensionHandlers`` that also inherit the ``ExtensionHandlerJinjaMixin`` mixin. This will automatically load HTML templates from the Jinja templating environment created by the ``ExtensionApp``. + + +.. code-block:: python + + + from jupyter_server.base.handlers import JupyterHandler + from jupyter_server.extension.handler import ( + ExtensionHandlerMixin, + ExtensionHandlerJinjaMixin + ) + import tornado + + class MyExtensionHandler( + ExtensionHandlerMixin, + ExtensionHandlerJinjaMixin, + JupyterHandler + ): + + @tornado.web.authenticated + def get(self): + ... + + @tornado.web.authenticated + def post(self): + ... + + +.. note:: The mixin classes in this example must come before the base classes, ``ExtensionApp`` and ``ExtensionHandler``. + + +Making an ``ExtensionApp`` discoverable +--------------------------------------- + +To make an ``ExtensionApp`` discoverable by Jupyter Server, add the ``app`` key+value pair to the ``_jupyter_server_extension_points()`` function example above: + +.. code-block:: python + + from myextension import MyExtensionApp + + + def _jupyter_server_extension_points(): + """ + Returns a list of dictionaries with metadata describing + where to find the `_load_jupyter_server_extension` function. + """ + return [ + { + "module": "myextension", + "app": MyExtensionApp + } + ] + + +Launching an ``ExtensionApp`` +----------------------------- + +To launch the application, simply call the ``ExtensionApp``'s ``launch_instance`` method. + +.. code-block:: python + + launch_instance = MyFrontend.launch_instance + launch_instance() + + +To make your extension executable from anywhere on your system, point an entry-point at the ``launch_instance`` method in the extension's ``setup.py``: + +.. code-block:: python + + from setuptools import setup + + + setup( + name='myfrontend', + ... + entry_points={ + 'console_scripts': [ + 'jupyter-myextension = myextension:launch_instance' + ] + } + ) + +``ExtensionApp`` as a classic Notebook server extension +------------------------------------------------------- + +An extension that extends ``ExtensionApp`` should still work with the old Tornado server from the classic Jupyter Notebook. The ``ExtensionApp`` class +provides a method, ``load_classic_server_extension``, that handles the extension initialization. Simply define a ``load_jupyter_server_extension`` reference +pointing at the ``load_classic_server_extension`` method: + +.. code-block:: python + + # This is typically defined in the root `__init__.py` + # file of the extension package. + load_jupyter_server_extension = MyExtensionApp.load_classic_server_extension + + +If the extension is enabled, the extension will be loaded when the server starts. + + +Distributing a server extension +=============================== + +Putting it all together, authors can distribute their extension following this steps: + +1. Add a ``_jupyter_server_extension_points()`` function at the extension's root. + This function should likely live in the ``__init__.py`` found at the root of the extension package. It will look something like this: + + .. code-block:: python + + # Found in the __init__.py of package + + def _jupyter_server_extension_points(): + return [ + { + "module": "myextension.app", + "app": MyExtensionApp + } + ] + +2. Create an extension by writing a ``_load_jupyter_server_extension()`` function or subclassing ``ExtensionApp``. + This is where the extension logic will live (i.e. custom extension handlers, config, etc). See the sections above for more information on how to create an extension. + +3. Add the following JSON config file to the extension package. + The file should be named after the extension (e.g. ``myextension.json``) and saved in a subdirectory of the package with the prefix: ``jupyter-config/jupyter_server_config.d/``. The extension package will have a similar structure to this example: + + .. code-block:: + + myextension + ├── myextension/ + │ ├── __init__.py + │ └── app.py + ├── jupyter-config/ + │ └── jupyter_server_config.d/ + │ └── myextension.json + └── setup.py + + The contents of the JSON file will tell Jupyter Server to load the extension when a user installs the package: + + .. code-block:: json + + { + "ServerApp": { + "jpserver_extensions": { + "myextension": true + } + } + } + + When the extension is installed, this JSON file will be copied to the ``jupyter_server_config.d`` directory found in one of `Jupyter's paths`_. + + Users can toggle the enabling/disableing of extension using the command: + + .. code-block:: console + + jupyter server disable myextension + + which will change the boolean value in the JSON file above. + +4. Create a ``setup.py`` that automatically enables the extension. + Add a few extra lines the extension package's ``setup`` function + + .. code-block:: python + + from setuptools import setup + + setup( + name="myextension", + ... + include_package_data=True, + data_files=[ + ( + "etc/jupyter/jupyter_server_config.d", + ["jupyter-config/jupyter_server_config.d/myextension.json"] + ), + ] + + ) + + + + +.. links + +.. _`Jupyter's paths`: https://jupyter.readthedocs.io/en/latest/use/jupyter-directories.html + + +Migrating an extension to use Jupyter Server +============================================ + +If you're a developer of a `classic Notebook Server`_ extension, your extension should be able to work with *both* the classic notebook server and ``jupyter_server``. + +There are a few key steps to make this happen: + +1. Point Jupyter Server to the ``load_jupyter_server_extension`` function with a new reference name. + The ``load_jupyter_server_extension`` function was the key to loading a server extension in the classic Notebook Server. Jupyter Server expects the name of this function to be prefixed with an underscore—i.e. ``_load_jupyter_server_extension``. You can easily achieve this by adding a reference to the old function name with the new name in the same module. + + .. code-block:: python + + def load_jupyter_server_extension(nb_server_app): + ... + + # Reference the old function name with the new function name. + + _load_jupyter_server_extension = load_jupyter_server_extension + +2. Add new data files to your extension package that enable it with Jupyter Server. + This new file can go next to your classic notebook server data files. Create a new sub-directory, ``jupyter_server_config.d``, and add a new ``.json`` file there: + + .. raw:: html + +
+        myextension
+        ├── myextension/
+        │   ├── __init__.py
+        │   └── app.py
+        ├── jupyter-config/
+        │   └── jupyter_notebook_config.d/
+        │       └── myextension.json
+        │   └── jupyter_server_config.d/
+        │       └── myextension.json
+        └── setup.py
+        
+ + The new ``.json`` file should look something like this (you'll notice the changes in the configured class and trait names): + + .. code-block:: json + + { + "ServerApp": { + "jpserver_extensions": { + "myextension": true + } + } + } + + Update your extension package's ``setup.py`` so that the data-files are moved into the jupyter configuration directories when users download the package. + + .. code-block:: python + + from setuptools import setup + + setup( + name="myextension", + ... + include_package_data=True, + data_files=[ + ( + "etc/jupyter/jupyter_server_config.d", + ["jupyter-config/jupyter_server_config.d/myextension.json"] + ), + ( + "etc/jupyter/jupyter_notebook_config.d", + ["jupyter-config/jupyter_notebook_config.d/myextension.json"] + ), + ] + + ) + +3. (Optional) Point extension at the new favicon location. + The favicons in the Jupyter Notebook have been moved to a new location in Jupyter Server. If your extension is using one of these icons, you'll want to add a set of redirect handlers this. (In ``ExtensionApp``, this is handled automatically). + + This usually means adding a chunk to your ``load_jupyter_server_extension`` function similar to this: + + .. code-block:: python + + def load_jupyter_server_extension(nb_server_app): + + web_app = nb_server_app.web_app + host_pattern = '.*$' + base_url = web_app.settings['base_url'] + + # Add custom extensions handler. + custom_handlers = [ + ... + ] + + # Favicon redirects. + favicon_redirects = [ + ( + url_path_join(base_url, "/static/favicons/favicon.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico") + ), + ( + url_path_join(base_url, "/static/favicons/favicon-busy-1.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-1.ico")} + ), + ( + url_path_join(base_url, "/static/favicons/favicon-busy-2.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-2.ico")} + ), + ( + url_path_join(base_url, "/static/favicons/favicon-busy-3.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-3.ico")} + ), + ( + url_path_join(base_url, "/static/favicons/favicon-file.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-file.ico")} + ), + ( + url_path_join(base_url, "/static/favicons/favicon-notebook.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-notebook.ico")} + ), + ( + url_path_join(base_url, "/static/favicons/favicon-terminal.ico"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-terminal.ico")} + ), + ( + url_path_join(base_url, "/static/logo/logo.png"), + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")} + ), + ] + + web_app.add_handlers( + host_pattern, + custom_handlers + favicon_redirects + ) + + +.. _`classic Notebook Server`: https://jupyter-notebook.readthedocs.io/en/stable/extending/handlers.html diff --git a/server/docs/source/developers/index.rst b/server/docs/source/developers/index.rst new file mode 100644 index 0000000..b8f5140 --- /dev/null +++ b/server/docs/source/developers/index.rst @@ -0,0 +1,16 @@ +Documentation for Developers +---------------------------- + +These pages target people writing Jupyter Web applications and server extensions, or people who need to dive deeper in Jupyter Server's REST API and configuration system. + +.. toctree:: + :caption: Developers + :maxdepth: 1 + :name: developers + + dependency + rest-api + extensions + savehooks + contents + websocket-protocols diff --git a/server/docs/source/developers/rest-api.rst b/server/docs/source/developers/rest-api.rst new file mode 100644 index 0000000..ab5b627 --- /dev/null +++ b/server/docs/source/developers/rest-api.rst @@ -0,0 +1,7 @@ +The REST API +============ + +An interactive version is available +`here `_. + +.. openapi:: ../../../jupyter_server/services/api/api.yaml diff --git a/server/docs/source/developers/savehooks.rst b/server/docs/source/developers/savehooks.rst new file mode 100644 index 0000000..900649d --- /dev/null +++ b/server/docs/source/developers/savehooks.rst @@ -0,0 +1,84 @@ +File save hooks +=============== + +You can configure functions that are run whenever a file is saved. There are +two hooks available: + +* ``ContentsManager.pre_save_hook`` runs on the API path and model with + content. This can be used for things like stripping output that people don't + like adding to VCS noise. +* ``FileContentsManager.post_save_hook`` runs on the filesystem path and model + without content. This could be used to commit changes after every save, for + instance. + +They are both called with keyword arguments:: + + pre_save_hook(model=model, path=path, contents_manager=cm) + post_save_hook(model=model, os_path=os_path, contents_manager=cm) + +Examples +-------- + +These can both be added to :file:`jupyter_server_config.py`. + +A pre-save hook for stripping output:: + + def scrub_output_pre_save(model, **kwargs): + """scrub output before saving notebooks""" + # only run on notebooks + if model['type'] != 'notebook': + return + # only run on nbformat v4 + if model['content']['nbformat'] != 4: + return + + for cell in model['content']['cells']: + if cell['cell_type'] != 'code': + continue + cell['outputs'] = [] + cell['execution_count'] = None + + c.FileContentsManager.pre_save_hook = scrub_output_pre_save + +A post-save hook to make a script equivalent whenever the notebook is saved +(replacing the ``--script`` option in older versions of the notebook): + +.. code-block:: python + + import io + import os + from jupyter_server.utils import to_api_path + + _script_exporter = None + + def script_post_save(model, os_path, contents_manager, **kwargs): + """convert notebooks to Python script after save with nbconvert + + replaces `ipython notebook --script` + """ + from nbconvert.exporters.script import ScriptExporter + + if model['type'] != 'notebook': + return + + global _script_exporter + + if _script_exporter is None: + _script_exporter = ScriptExporter(parent=contents_manager) + + log = contents_manager.log + + base, ext = os.path.splitext(os_path) + py_fname = base + '.py' + script, resources = _script_exporter.from_filename(os_path) + script_fname = base + resources.get('output_extension', '.txt') + log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) + + with io.open(script_fname, 'w', encoding='utf-8') as f: + f.write(script) + + c.FileContentsManager.post_save_hook = script_post_save + + +This could be a simple call to ``jupyter nbconvert --to script``, but spawning +the subprocess every time is quite slow. diff --git a/server/docs/source/developers/websocket-protocols.rst b/server/docs/source/developers/websocket-protocols.rst new file mode 100644 index 0000000..e4cf093 --- /dev/null +++ b/server/docs/source/developers/websocket-protocols.rst @@ -0,0 +1,155 @@ +.. _websocket_protocols: + +WebSocket kernel wire protocols +=============================== + +The Jupyter Server needs to pass messages between kernels and the Jupyter web application. Kernels use ZeroMQ sockets, and the web application uses a WebSocket. + +ZeroMQ wire protocol +-------------------- + +The kernel wire protocol over ZeroMQ takes advantage of multipart messages, allowing to decompose a message into parts and to send and receive them unmerged. The following table shows the message format (the beginning has been omitted for clarity): + +.. list-table:: Format of a kernel message over ZeroMQ socket (indices refer to parts, not bytes) + :header-rows: 1 + + * - ... + - 0 + - 1 + - 2 + - 3 + - 4 + - 5 + - ... + * - ... + - header + - parent_header + - metadata + - content + - buffer_0 + - buffer_1 + - ... + +See also the `Jupyter Client documentation `_. + +Note that a set of ZeroMQ sockets, one for each channel (shell, iopub, etc.), are multiplexed into one WebSocket. Thus, the channel name must be encoded in WebSocket messages. + +WebSocket protocol negotiation +------------------------------ + +When opening a WebSocket, the Jupyter web application can optionally provide a list of subprotocols it supports (see e.g. the `MDN documentation `_). If nothing is provided (empty list), then the Jupyter Server assumes the default protocol will be used. Otherwise, the Jupyter Server must select one of the provided subprotocols, or none of them. If none of them is selected, the Jupyter Server must reply with an empty string, which means that the default protocol will be used. + +Default WebSocket protocol +-------------------------- + +The Jupyter Server must support the default protocol, in which a kernel message is serialized over WebSocket as follows: + +.. list-table:: Format of a kernel message over WebSocket (indices refer to bytes) + :header-rows: 1 + + * - 0 + - 4 + - 8 + - ... + - offset_0 + - offset_1 + - offset_2 + - ... + * - offset_0 + - offset_1 + - offset_2 + - ... + - msg + - buffer_0 + - buffer_1 + - ... + +Where: + +* ``offset_0`` is the position of the kernel message (``msg``) from the beginning of this message, in bytes. +* ``offset_1`` is the position of the first binary buffer (``buffer_0``) from the beginning of this message, in bytes (optional). +* ``offset_2`` is the position of the second binary buffer (``buffer_1``) from the beginning of this message, in bytes (optional). +* ``msg`` is the kernel message, excluding binary buffers and including the channel name, as a UTF8-encoded stringified JSON. +* ``buffer_0`` is the first binary buffer (optional). +* ``buffer_1`` is the second binary buffer (optional). + +The message can be deserialized by parsing ``msg`` as a JSON object (after decoding it to a string): + +.. code-block:: python + + msg = { + 'channel': channel, + 'header': header, + 'parent_header': parent_header, + 'metadata': metadata, + 'content': content + } + +Then retrieving the channel name, and updating with the buffers, if any: + +.. code-block:: python + + buffers = { + [ + buffer_0, + buffer_1 + # ... + ] + } + +``v1.kernel.websocket.jupyter.org`` protocol +-------------------------------------------- + +The Jupyter Server can optionally support the ``v1.kernel.websocket.jupyter.org`` protocol, in which a kernel message is serialized over WebSocket as follows: + +.. list-table:: Format of a kernel message over WebSocket (indices refer to bytes) + :header-rows: 1 + + * - 0 + - 8 + - 16 + - ... + - 8*offset_number + - offset_0 + - offset_1 + - offset_2 + - offset_3 + - offset_4 + - offset_5 + - offset_6 + - ... + * - offset_number + - offset_0 + - offset_1 + - ... + - offset_n + - channel + - header + - parent_header + - metadata + - content + - buffer_0 + - buffer_1 + - ... + +Where: + +* ``offset_number`` is a 64-bit (little endian) unsigned integer. +* ``offset_0`` to ``offset_n`` are 64-bit (little endian) unsigned integers (with ``n=offset_number-1``). +* ``channel`` is a UTF-8 encoded string containing the channel for the message (shell, iopub, etc.). +* ``header``, ``parent_header``, ``metadata``, and ``content`` are UTF-8 encoded JSON text representing the given part of a message in the Jupyter message protocol. +* ``offset_n`` is the number of bytes in the message. +* The message can be deserialized from the ``bin_msg`` serialized message as follows (Python code): + +.. code-block:: python + + import json + channel = bin_msg[offset_0:offset_1].decode('utf-8') + header = json.loads(bin_msg[offset_1:offset_2]) + parent_header = json.loads(bin_msg[offset_2:offset_3]) + metadata = json.loads(bin_msg[offset_3:offset_4]) + content = json.loads(bin_msg[offset_4:offset_5]) + buffer_0 = bin_msg[offset_5:offset_6] + buffer_1 = bin_msg[offset_6:offset_7] + # ... + last_buffer = bin_msg[offset_n_minus_1:offset_n] diff --git a/server/docs/source/index.rst b/server/docs/source/index.rst new file mode 100644 index 0000000..11922b9 --- /dev/null +++ b/server/docs/source/index.rst @@ -0,0 +1,46 @@ +Welcome! +======== + +You've landed on the documentation pages for the **Jupyter Server** Project. Some other pages you may have been looking for: + +* `Jupyter Server Github Repo `_, the source code we describe in this code. +* `Jupyter Notebook Github Repo `_ , the source code for the classic Notebook. +* `JupyterLab Github Repo `_, the JupyterLab server wich runs on the Jupyter Server. + + +Introduction +------------ + +Jupyter Server is the backend—the core services, APIs, and `REST endpoints`_—to Jupyter web applications. + +.. note:: + + Jupyter Server is a replacement for the Tornado Web Server in `Jupyter Notebook`_. Jupyter web applications should move to using Jupyter Server. For help, see the :ref:`migrate_from_notebook` page. + +.. _Tornado: https://www.tornadoweb.org/en/stable/ +.. _Jupyter Notebook: https://github.com/jupyter/notebook +.. _REST endpoints: https://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyter_server/main/jupyter_server/services/api/api.yaml + +Who's this for? +--------------- + +The Jupyter Server is a highly technical piece of the Jupyter Stack, so we've separated documentation to help specific personas: + +1. :ref:`Users `: people using Jupyter web applications. +2. :ref:`Operators `: people deploying or serving Jupyter web applications to others. +3. :ref:`Developers `: people writing Jupyter Server extensions and web applications. +4. :ref:`Contributors `: people contributing directly to the Jupyter Server library. + +If you finds gaps in our documentation, please open an issue (or better, a pull request) on the Jupyter Server `Github repo `_. + +Table of Contents +----------------- + +.. toctree:: + :maxdepth: 2 + + Users + Operators + Developers + Contributors + Other diff --git a/server/docs/source/operators/configuring-extensions.rst b/server/docs/source/operators/configuring-extensions.rst new file mode 100644 index 0000000..839ae72 --- /dev/null +++ b/server/docs/source/operators/configuring-extensions.rst @@ -0,0 +1,59 @@ +.. _configure-multiple-extensions: + +Configuring Extensions +====================== + +Some Jupyter Server extensions are also configurable applications. There are two ways to configure such extensions: i) pass arguments to the extension's entry point or ii) list configurable options in a Jupyter config file. + +Jupyter Server looks for an extension's config file in a set of specific paths. Use the ``jupyter`` entry point to list these paths: + +.. code-block:: console + + > jupyter --paths + + config: + /Users/username/.jupyter + /usr/local/etc/jupyter + /etc/jupyter + data: + /Users/username/Library/Jupyter + /usr/local/share/jupyter + /usr/share/jupyter + runtime: + /Users/username/Library/Jupyter/runtime + + +Extension config from file +-------------------------- + +Jupyter Server expects the file to be named after the extension's name like so: ``jupyter_{name}_config``. For example, the Jupyter Notebook's config file is ``jupyter_notebook_config``. + +Configuration files can be Python or JSON files. + +In Python config files, each trait will be prefixed with ``c.`` that links the trait to the config loader. For example, Jupyter Notebook config might look like: + +.. code-block:: python + + # jupyter_notebook_config.py + + c.NotebookApp.mathjax_enabled = False + + +A Jupyter Server will automatically load config for each enabled extension. You can configure each extension by creating their corresponding Jupyter config file. + + +Extension config on the command line +------------------------------------ + +Server extension applications can also be configured from the command line, and multiple extension can be configured at the same time. Simply pass the traits (with their appropriate prefix) to the ``jupyter server`` entrypoint, e.g.: + +.. code-block:: console + + > jupyter server --ServerApp.port=9999 --MyExtension1.trait=False --MyExtension2.trait=True + + +This will also work with any extension entrypoints that allow other extensions to run side-by-side, e.g.: + +.. code-block:: console + + > jupyter myextension --ServerApp.port=9999 --MyExtension1.trait=False --MyExtension2.trait=True diff --git a/server/docs/source/operators/index.rst b/server/docs/source/operators/index.rst new file mode 100644 index 0000000..41354bc --- /dev/null +++ b/server/docs/source/operators/index.rst @@ -0,0 +1,15 @@ +Documentation for Operators +=========================== + +These pages are targeted at people using, configuring, and/or deploying multiple Jupyter Web Application with Jupyter Server. + +.. toctree:: + :caption: Operators + :maxdepth: 1 + :name: operators + + multiple-extensions + configuring-extensions + migrate-from-nbserver + public-server + security diff --git a/server/docs/source/operators/ipython_security.asc b/server/docs/source/operators/ipython_security.asc new file mode 100644 index 0000000..9543681 --- /dev/null +++ b/server/docs/source/operators/ipython_security.asc @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v2.0.22 (GNU/Linux) + +mQINBFMx2LoBEAC9xU8JiKI1VlCJ4PT9zqhU5nChQZ06/bj1BBftiMJG07fdGVO0 +ibOn4TrCoRYaeRlet0UpHzxT4zDa5h3/usJaJNTSRwtWePw2o7Lik8J+F3LionRf +8Jz81WpJ+81Klg4UWKErXjBHsu/50aoQm6ZNYG4S2nwOmMVEC4nc44IAA0bb+6kW +saFKKzEDsASGyuvyutdyUHiCfvvh5GOC2h9mXYvl4FaMW7K+d2UgCYERcXDNy7C1 +Bw+uepQ9ELKdG4ZpvonO6BNr1BWLln3wk93AQfD5qhfsYRJIyj0hJlaRLtBU3i6c +xs+gQNF4mPmybpPSGuOyUr4FYC7NfoG7IUMLj+DYa6d8LcMJO+9px4IbdhQvzGtC +qz5av1TX7/+gnS4L8C9i1g8xgI+MtvogngPmPY4repOlK6y3l/WtxUPkGkyYkn3s +RzYyE/GJgTwuxFXzMQs91s+/iELFQq/QwmEJf+g/QYfSAuM+lVGajEDNBYVAQkxf +gau4s8Gm0GzTZmINilk+7TxpXtKbFc/Yr4A/fMIHmaQ7KmJB84zKwONsQdVv7Jjj +0dpwu8EIQdHxX3k7/Q+KKubEivgoSkVwuoQTG15X9xrOsDZNwfOVQh+JKazPvJtd +SNfep96r9t/8gnXv9JI95CGCQ8lNhXBUSBM3BDPTbudc4b6lFUyMXN0mKQARAQAB +tCxJUHl0aG9uIFNlY3VyaXR5IFRlYW0gPHNlY3VyaXR5QGlweXRob24ub3JnPokC +OAQTAQIAIgUCUzHYugIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQEwJc +LcmZYkjuXg//R/t6nMNQmf9W1h52IVfUbRAVmvZ5d063hQHKV2dssxtnA2dRm/x5 +JZu8Wz7ZrEZpyqwRJO14sxN1/lC3v+zs9XzYXr2lBTZuKCPIBypYVGIynCuWJBQJ +rWnfG4+u1RHahnjqlTWTY1C/le6v7SjAvCb6GbdA6k4ZL2EJjQlRaHDmzw3rV/+l +LLx6/tYzIsotuflm/bFumyOMmpQQpJjnCkWIVjnRICZvuAn97jLgtTI0+0Rzf4Zb +k2BwmHwDRqWCTTcRI9QvTl8AzjW+dNImN22TpGOBPfYj8BCZ9twrpKUbf+jNqJ1K +THQzFtpdJ6SzqiFVm74xW4TKqCLkbCQ/HtVjTGMGGz/y7KTtaLpGutQ6XE8SSy6P +EffSb5u+kKlQOWaH7Mc3B0yAojz6T3j5RSI8ts6pFi6pZhDg9hBfPK2dT0v/7Mkv +E1Z7q2IdjZnhhtGWjDAMtDDn2NbY2wuGoa5jAWAR0WvIbEZ3kOxuLE5/ZOG1FyYm +noJRliBz7038nT92EoD5g1pdzuxgXtGCpYyyjRZwaLmmi4CvA+oThKmnqWNY5lyY +ricdNHDiyEXK0YafJL1oZgM86MSb0jKJMp5U11nUkUGzkroFfpGDmzBwAzEPgeiF +40+qgsKB9lqwb3G7PxvfSi3XwxfXgpm1cTyEaPSzsVzve3d1xeqb7Yq5Ag0EUzHY +ugEQALQ5FtLdNoxTxMsgvrRr1ejLiUeRNUfXtN1TYttOfvAhfBVnszjtkpIW8DCB +JF/bA7ETiH8OYYn/Fm6MPI5H64IHEncpzxjf57jgpXd9CA9U2OMk/P1nve5zYchP +QmP2fJxeAWr0aRH0Mse5JS5nCkh8Xv4nAjsBYeLTJEVOb1gPQFXOiFcVp3gaKAzX +GWOZ/mtG/uaNsabH/3TkcQQEgJefd11DWgMB7575GU+eME7c6hn3FPITA5TC5HUX +azvjv/PsWGTTVAJluJ3fUDvhpbGwYOh1uV0rB68lPpqVIro18IIJhNDnccM/xqko +4fpJdokdg4L1wih+B04OEXnwgjWG8OIphR/oL/+M37VV2U7Om/GE6LGefaYccC9c +tIaacRQJmZpG/8RsimFIY2wJ07z8xYBITmhMmOt0bLBv0mU0ym5KH9Dnru1m9QDO +AHwcKrDgL85f9MCn+YYw0d1lYxjOXjf+moaeW3izXCJ5brM+MqVtixY6aos3YO29 +J7SzQ4aEDv3h/oKdDfZny21jcVPQxGDui8sqaZCi8usCcyqWsKvFHcr6vkwaufcm +3Knr2HKVotOUF5CDZybopIz1sJvY/5Dx9yfRmtivJtglrxoDKsLi1rQTlEQcFhCS +ACjf7txLtv03vWHxmp4YKQFkkOlbyhIcvfPVLTvqGerdT2FHABEBAAGJAh8EGAEC +AAkFAlMx2LoCGwwACgkQEwJcLcmZYkgK0BAAny0YUugpZldiHzYNf8I6p2OpiDWv +ZHaguTTPg2LJSKaTd+5UHZwRFIWjcSiFu+qTGLNtZAdcr0D5f991CPvyDSLYgOwb +Jm2p3GM2KxfECWzFbB/n/PjbZ5iky3+5sPlOdBR4TkfG4fcu5GwUgCkVe5u3USAk +C6W5lpeaspDz39HAPRSIOFEX70+xV+6FZ17B7nixFGN+giTpGYOEdGFxtUNmHmf+ +waJoPECyImDwJvmlMTeP9jfahlB6Pzaxt6TBZYHetI/JR9FU69EmA+XfCSGt5S+0 +Eoc330gpsSzo2VlxwRCVNrcuKmG7PsFFANok05ssFq1/Djv5rJ++3lYb88b8HSP2 +3pQJPrM7cQNU8iPku9yLXkY5qsoZOH+3yAia554Dgc8WBhp6fWh58R0dIONQxbbo +apNdwvlI8hKFB7TiUL6PNShE1yL+XD201iNkGAJXbLMIC1ImGLirUfU267A3Cop5 +hoGs179HGBcyj/sKA3uUIFdNtP+NndaP3v4iYhCitdVCvBJMm6K3tW88qkyRGzOk +4PW422oyWKwbAPeMk5PubvEFuFAIoBAFn1zecrcOg85RzRnEeXaiemmmH8GOe1Xu +Kh+7h8XXyG6RPFy8tCcLOTk+miTqX+4VWy+kVqoS2cQ5IV8WsJ3S7aeIy0H89Z8n +5vmLc+Ibz+eT+rM= +=XVDe +-----END PGP PUBLIC KEY BLOCK----- diff --git a/server/docs/source/operators/migrate-from-nbserver.rst b/server/docs/source/operators/migrate-from-nbserver.rst new file mode 100644 index 0000000..d635d3b --- /dev/null +++ b/server/docs/source/operators/migrate-from-nbserver.rst @@ -0,0 +1,36 @@ +.. _migrate_from_notebook: + +Migrating from Notebook Server +============================== + +To migrate from notebook server to plain jupyter server, follow these steps: + +- Rename your ``jupyter_notebook_config.py`` file to ``jupyter_server_config.py``. +- Rename all ``c.NotebookApp`` traits to ``c.ServerApp``. + +For example if you have the following ``jupyter_notebook_config.py``. + +.. code-block:: python + + c.NotebookApp.allow_credentials = False + c.NotebookApp.port = 8889 + c.NotebookApp.password_required = True + + +You will have to create the following ``jupyter_server_config.py`` file. + +.. code-block:: python + + c.ServerApp.allow_credentials = False + c.ServerApp.port = 8889 + c.ServerApp.password_required = True + + +Running Jupyter Notebook on Jupyter Server +========================================== + +If you want to switch to Jupyter Server, but you still want to serve `Jupyter Notebook `_ to users, you can try `NBClassic `_. + +NBClassic is a Jupyter Server extension that serves the Notebook frontend (i.e. all static assets) on top of Jupyter Server. It even loads Jupyter Notebook's config files. + +.. warning:: NBClassic will only work for a limited time. Jupyter Server is likely to evolve beyond a point where Jupyter Notebook frontend will no longer work with the underlying server. Consider switching to `JupyterLab `_ or `nteract `_ where there is active development happening. diff --git a/server/docs/source/operators/multiple-extensions.rst b/server/docs/source/operators/multiple-extensions.rst new file mode 100644 index 0000000..0579e8e --- /dev/null +++ b/server/docs/source/operators/multiple-extensions.rst @@ -0,0 +1,89 @@ + +.. _managing-multiple-extensions: + +Managing multiple extensions +---------------------------- + +One of the major benefits of Jupyter Server is that you can run serve multiple Jupyter frontend applications above the same Tornado web server. That's because every Jupyter frontend application is now a server extension. When you run a Jupyter Server will multiple extensions enabled, each extension appends its own set of handlers and static assets to the server. + +Listing extensions +~~~~~~~~~~~~~~~~~~ + +When you install a Jupyter Server extension, it *should* automatically add itself to your list of enabled extensions. You can see a list of installed extensions by calling: + +.. code-block:: console + + > jupyter server extension list + + config dir: /Users/username/etc/jupyter + myextension enabled + - Validating myextension... + myextension OK + +Enabling/disabling extensions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You enable/disable an extension using the following commands: + +.. code-block:: console + + > jupyter server extension enable myextension + + Enabling: myextension + - Validating myextension... + myextension OK + - Extension successfully enabled. + + + > jupyter server extension disable myextension + + Disabling: jupyter_home + - Validating jupyter_home... + jupyter_home OK + - Extension successfully disabled. + + +Running an extensions from its entrypoint +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Extensions that are also Jupyter applications (i.e. Notebook, JupyterLab, Voila, etc.) can be launched +from a CLI entrypoint. For example, launch Jupyter Notebook using: + +.. code-block:: console + + > jupyter notebook + + +Jupyter Server will automatically start a server and the browser will be routed to Jupyter Notebook's default URL (typically, ``/tree``). + +Other enabled extension will still be available to the user. The entrypoint simply offers a more direct (backwards compatible) launching mechanism. + +Launching a server with multiple extensions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If multiple extensions are enabled, a Jupyter Server can be launched directly: + +.. code-block:: console + + > jupyter server + + [I 2020-03-23 15:44:53.290 ServerApp] Serving notebooks from local directory: /Users/username/path + [I 2020-03-23 15:44:53.290 ServerApp] Jupyter Server 0.3.0.dev is running at: + [I 2020-03-23 15:44:53.290 ServerApp] http://localhost:8888/?token=<...> + [I 2020-03-23 15:44:53.290 ServerApp] or http://127.0.0.1:8888/?token=<...> + [I 2020-03-23 15:44:53.290 ServerApp] Use Control-C to stop this server and shut down all kernels (twice to skip confirmation). + [I 2020-03-23 15:44:53.290 ServerApp] Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at https://jupyter.org/community.html. + [C 2020-03-23 15:44:53.296 ServerApp] + + To access the server, open this file in a browser: + file:///Users/username/pathjpserver-####-open.html + Or copy and paste one of these URLs: + http://localhost:8888/?token=<...> + or http://127.0.0.1:8888/?token=<...> + + +Extensions can also be enabled manually from the Jupyter Server entrypoint using the ``jpserver_extensions`` trait: + +.. code-block:: console + + > jupyter server --ServerApp.jpserver_extensions='{"myextension":{"enabled": True}}' diff --git a/server/docs/source/operators/public-server.rst b/server/docs/source/operators/public-server.rst new file mode 100644 index 0000000..cfe75fd --- /dev/null +++ b/server/docs/source/operators/public-server.rst @@ -0,0 +1,443 @@ +.. _working_remotely: + +Running a public Jupyter Server +=============================== + + +The Jupyter Server uses a :ref:`two-process kernel +architecture ` based on ZeroMQ_, as well as Tornado_ for +serving HTTP requests. + +.. note:: + By default, Jupyter Server runs locally at 127.0.0.1:8888 + and is accessible only from `localhost`. You may access the + server from the browser using `http://127.0.0.1:8888`. + +This document describes how you can +:ref:`secure a Jupyter server ` and how to +:ref:`run it on a public interface `. + +.. important:: + + **This is not the multi-user server you are looking for**. This document + describes how you can run a public server with a single user. This should + only be done by someone who wants remote access to their personal machine. + Even so, doing this requires a thorough understanding of the set-ups + limitations and security implications. If you allow multiple users to + access a Jupyter server as it is described in this document, their + commands may collide, clobber and overwrite each other. + + If you want a multi-user server, the official solution is JupyterHub_. + To use JupyterHub, you need a Unix server (typically Linux) running + somewhere that is accessible to your users on a network. This may run over + the public internet, but doing so introduces additional + `security concerns `_. + + + +.. _ZeroMQ: https://zeromq.org/ + +.. _Tornado: with Found to http://www.tornadoweb.org/en/stable/ + +.. _JupyterHub: https://jupyterhub.readthedocs.io/en/latest/ + +.. _Jupyter_server_security: + +Securing a Jupyter server +------------------------- + +You can protect your Jupyter server with a simple single password. As of +notebook 5.0 this can be done automatically. To set up a password manually you +can configure the :attr:`ServerApp.password` setting in +:file:`jupyter_server_config.py`. + + +Prerequisite: A Jupyter server configuration file +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Check to see if you have a Jupyter server configuration file, +:file:`jupyter_server_config.py`. The default location for this file +is your Jupyter folder located in your home directory: + + - Windows: :file:`C:\\Users\\USERNAME\\.jupyter\\jupyter_server_config.py` + - OS X: :file:`/Users/USERNAME/.jupyter/jupyter_server_config.py` + - Linux: :file:`/home/USERNAME/.jupyter/jupyter_server_config.py` + +If you don't already have a Jupyter folder, or if your Jupyter folder doesn't contain +a Jupyter server configuration file, run the following command:: + + $ jupyter server --generate-config + +This command will create the Jupyter folder if necessary, and create a Jupyter +server configuration file, :file:`jupyter_server_config.py`, in this folder. + + +Automatic Password setup +~~~~~~~~~~~~~~~~~~~~~~~~ + +As of notebook 5.3, the first time you log-in using a token, the server should +give you the opportunity to setup a password from the user interface. + +You will be presented with a form asking for the current _token_, as well as +your _new_ _password_ ; enter both and click on ``Login and setup new password``. + +Next time you need to log in you'll be able to use the new password instead of +the login token, otherwise follow the procedure to set a password from the +command line. + +The ability to change the password at first login time may be disabled by +integrations by setting the ``--ServerApp.allow_password_change=False`` + + +Starting at notebook version 5.0, you can enter and store a password for your +server with a single command. :command:`jupyter server password` will +prompt you for your password and record the hashed password in your +:file:`jupyter_server_config.json`. + +.. code-block:: bash + + $ jupyter server password + Enter password: **** + Verify password: **** + [JupyterPasswordApp] Wrote hashed password to /Users/you/.jupyter/jupyter_server_config.json + +This can be used to reset a lost password; or if you believe your credentials +have been leaked and desire to change your password. Changing your password will +invalidate all logged-in sessions after a server restart. + +.. _hashed-pw: + +Preparing a hashed password +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can prepare a hashed password manually, using the function +:func:`notebook.auth.security.passwd`: + +.. code-block:: ipython + + In [1]: from jupyter_server.auth import passwd + In [2]: passwd() + Enter password: + Verify password: + Out[2]: 'sha1:67c9e60bb8b6:9ffede0825894254b2e042ea597d771089e11aed' + +.. caution:: + + :func:`~notebook.auth.security.passwd` when called with no arguments + will prompt you to enter and verify your password such as + in the above code snippet. Although the function can also + be passed a string as an argument such as ``passwd('mypassword')``, please + **do not** pass a string as an argument inside an IPython session, as it + will be saved in your input history. + +Adding hashed password to your notebook configuration file +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You can then add the hashed password to your +:file:`jupyter_server_config.py`. The default location for this file +:file:`jupyter_server_config.py` is in your Jupyter folder in your home +directory, ``~/.jupyter``, e.g.:: + + c.ServerApp.password = u'sha1:67c9e60bb8b6:9ffede0825894254b2e042ea597d771089e11aed' + +Automatic password setup will store the hash in ``jupyter_server_config.json`` +while this method stores the hash in ``jupyter_server_config.py``. The ``.json`` +configuration options take precedence over the ``.py`` one, thus the manual +password may not take effect if the Json file has a password set. + + +Using SSL for encrypted communication +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +When using a password, it is a good idea to also use SSL with a web +certificate, so that your hashed password is not sent unencrypted by your +browser. + +.. important:: + Web security is rapidly changing and evolving. We provide this document + as a convenience to the user, and recommend that the user keep current on + changes that may impact security, such as new releases of OpenSSL. + The Open Web Application Security Project (`OWASP`_) website is a good resource + on general security issues and web practices. + +You can start the notebook to communicate via a secure protocol mode by setting +the ``certfile`` option to your self-signed certificate, i.e. ``mycert.pem``, +with the command:: + + $ jupyter server --certfile=mycert.pem --keyfile mykey.key + +.. tip:: + + A self-signed certificate can be generated with ``openssl``. For example, + the following command will create a certificate valid for 365 days with + both the key and certificate data written to the same file:: + + $ openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout mykey.key -out mycert.pem + +When starting the notebook server, your browser may warn that your self-signed +certificate is insecure or unrecognized. If you wish to have a fully +compliant self-signed certificate that will not raise warnings, it is possible +(but rather involved) to create one, as explained in detail in this +`tutorial`_. Alternatively, you may use `Let's Encrypt`_ to acquire a free SSL +certificate and follow the steps in :ref:`using-lets-encrypt` to set up a +public server. + +.. _OWASP: https://www.owasp.org/index.php/Main_Page +.. _tutorial: https://arstechnica.com/information-technology/2009/12/how-to-get-set-with-a-secure-sertificate-for-free/ + +.. _jupyter_public_server: + +Running a public notebook server +-------------------------------- + +If you want to access your notebook server remotely via a web browser, +you can do so by running a public notebook server. For optimal security +when running a public notebook server, you should first secure the +server with a password and SSL/HTTPS as described in +:ref:`jupyter_server_security`. + +Start by creating a certificate file and a hashed password, as explained in +:ref:`jupyter_server_security`. + +If you don't already have one, create a +config file for the notebook using the following command line:: + + $ jupyter server --generate-config + +In the ``~/.jupyter`` directory, edit the notebook config file, +``jupyter_server_config.py``. By default, the notebook config file has +all fields commented out. The minimum set of configuration options that +you should uncomment and edit in :file:`jupyter_server_config.py` is the +following:: + + # Set options for certfile, ip, password, and toggle off + # browser auto-opening + c.ServerApp.certfile = u'/absolute/path/to/your/certificate/mycert.pem' + c.ServerApp.keyfile = u'/absolute/path/to/your/certificate/mykey.key' + # Set ip to '*' to bind on all interfaces (ips) for the public server + c.ServerApp.ip = '*' + c.ServerApp.password = u'sha1:bcd259ccf...' + c.ServerApp.open_browser = False + + # It is a good idea to set a known, fixed port for server access + c.ServerApp.port = 9999 + +You can then start the notebook using the ``jupyter server`` command. + +.. _using-lets-encrypt: + +Using Let's Encrypt +~~~~~~~~~~~~~~~~~~~ +`Let's Encrypt`_ provides free SSL/TLS certificates. You can also set up a +public server using a `Let's Encrypt`_ certificate. + +:ref:`jupyter_public_server` will be similar when using a Let's Encrypt +certificate with a few configuration changes. Here are the steps: + +1. Create a `Let's Encrypt certificate `_. +2. Use :ref:`hashed-pw` to create one. +3. If you don't already have config file for the notebook, create one + using the following command: + + .. code-block:: bash + + $ jupyter server --generate-config + +4. In the ``~/.jupyter`` directory, edit the notebook config file, +``jupyter_server_config.py``. By default, the notebook config file has +all fields commented out. The minimum set of configuration options that +you should to uncomment and edit in :file:`jupyter_server_config.py` is the +following:: + + # Set options for certfile, ip, password, and toggle off + # browser auto-opening + c.ServerApp.certfile = u'/absolute/path/to/your/certificate/fullchain.pem' + c.ServerApp.keyfile = u'/absolute/path/to/your/certificate/privkey.pem' + # Set ip to '*' to bind on all interfaces (ips) for the public server + c.ServerApp.ip = '*' + c.ServerApp.password = u'sha1:bcd259ccf...' + c.ServerApp.open_browser = False + + # It is a good idea to set a known, fixed port for server access + c.ServerApp.port = 9999 + +You can then start the notebook using the ``jupyter server`` command. + +.. important:: + + **Use 'https'.** + Keep in mind that when you enable SSL support, you must access the + notebook server over ``https://``, not over plain ``http://``. The startup + message from the server prints a reminder in the console, but *it is easy + to overlook this detail and think the server is for some reason + non-responsive*. + + **When using SSL, always access the notebook server with 'https://'.** + +You may now access the public server by pointing your browser to +``https://your.host.com:9999`` where ``your.host.com`` is your public server's +domain. + +.. _`Let's Encrypt`: https://letsencrypt.org + + +Firewall Setup +~~~~~~~~~~~~~~ + +To function correctly, the firewall on the computer running the jupyter +notebook server must be configured to allow connections from client +machines on the access port ``c.ServerApp.port`` set in +:file:`jupyter_server_config.py` to allow connections to the +web interface. The firewall must also allow connections from +127.0.0.1 (localhost) on ports from 49152 to 65535. +These ports are used by the server to communicate with the notebook kernels. +The kernel communication ports are chosen randomly by ZeroMQ, and may require +multiple connections per kernel, so a large range of ports must be accessible. + +Running the notebook with a customized URL prefix +------------------------------------------------- + +The notebook dashboard, which is the landing page with an overview +of the notebooks in your working directory, is typically found and accessed +at the default URL ``http://localhost:8888/``. + +If you prefer to customize the URL prefix for the notebook dashboard, you can +do so through modifying ``jupyter_server_config.py``. For example, if you +prefer that the notebook dashboard be located with a sub-directory that +contains other ipython files, e.g. ``http://localhost:8888/ipython/``, +you can do so with configuration options like the following (see above for +instructions about modifying ``jupyter_server_config.py``): + +.. code-block:: python + + c.ServerApp.base_url = '/ipython/' + +Embedding the notebook in another website +----------------------------------------- + +Sometimes you may want to embed the notebook somewhere on your website, +e.g. in an IFrame. To do this, you may need to override the +Content-Security-Policy to allow embedding. Assuming your website is at +`https://mywebsite.example.com`, you can embed the notebook on your website +with the following configuration setting in +:file:`jupyter_server_config.py`: + +.. code-block:: python + + c.ServerApp.tornado_settings = { + 'headers': { + 'Content-Security-Policy': "frame-ancestors https://mywebsite.example.com 'self' " + } + } + +When embedding the notebook in a website using an iframe, +consider putting the notebook in single-tab mode. +Since the notebook opens some links in new tabs by default, +single-tab mode keeps the notebook from opening additional tabs. +Adding the following to :file:`~/.jupyter/custom/custom.js` will enable +single-tab mode: + +.. code-block:: javascript + + define(['base/js/namespace'], function(Jupyter){ + Jupyter._target = '_self'; + }); + + +Using a gateway server for kernel management +-------------------------------------------- + +You are now able to redirect the management of your kernels to a Gateway Server +(i.e., `Jupyter Kernel Gateway `_ or +`Jupyter Enterprise Gateway `_) +simply by specifying a Gateway url via the following command-line option: + + .. code-block:: bash + + $ jupyter notebook --gateway-url=http://my-gateway-server:8888 + +the environment: + + .. code-block:: bash + + JUPYTER_GATEWAY_URL=http://my-gateway-server:8888 + +or in :file:`jupyter_notebook_config.py`: + + .. code-block:: python + + c.GatewayClient.url = http://my-gateway-server:8888 + +When provided, all kernel specifications will be retrieved from the specified Gateway server and all +kernels will be managed by that server. This option enables the ability to target kernel processes +against managed clusters while allowing for the notebook's management to remain local to the Notebook +server. + +Known issues +------------ + +Proxies +~~~~~~~ + +When behind a proxy, especially if your system or browser is set to autodetect +the proxy, the notebook web application might fail to connect to the server's +websockets, and present you with a warning at startup. In this case, you need +to configure your system not to use the proxy for the server's address. + +For example, in Firefox, go to the Preferences panel, Advanced section, +Network tab, click 'Settings...', and add the address of the Jupyter server +to the 'No proxy for' field. + +Content-Security-Policy (CSP) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Certain `security guidelines +`_ +recommend that servers use a Content-Security-Policy (CSP) header to prevent +cross-site scripting vulnerabilities, specifically limiting to ``default-src: +https:`` when possible. This directive causes two problems with Jupyter. +First, it disables execution of inline javascript code, which is used +extensively by Jupyter. Second, it limits communication to the https scheme, +and prevents WebSockets from working because they communicate via the wss +scheme (or ws for insecure communication). Jupyter uses WebSockets for +interacting with kernels, so when you visit a server with such a CSP, your +browser will block attempts to use wss, which will cause you to see +"Connection failed" messages from jupyter notebooks, or simply no response +from jupyter terminals. By looking in your browser's javascript console, you +can see any error messages that will explain what is failing. + +To avoid these problem, you need to add ``'unsafe-inline'`` and ``connect-src +https: wss:`` to your CSP header, at least for pages served by jupyter. (That +is, you can leave your CSP unchanged for other parts of your website.) Note +that multiple CSP headers are allowed, but successive CSP headers can only +restrict the policy; they cannot loosen it. For example, if your server sends +both of these headers + + Content-Security-Policy "default-src https: 'unsafe-inline'" + Content-Security-Policy "connect-src https: wss:" + +the first policy will already eliminate wss connections, so the second has no +effect. Therefore, you can't simply add the second header; you have to +actually modify your CSP header to look more like this: + + Content-Security-Policy "default-src https: 'unsafe-inline'; connect-src https: wss:" + + + +Docker CMD +~~~~~~~~~~ + +Using ``jupyter server`` as a +`Docker CMD `_ results in +kernels repeatedly crashing, likely due to a lack of `PID reaping +`_. +To avoid this, use the `tini `_ ``init`` as your +Dockerfile `ENTRYPOINT`:: + + # Add Tini. Tini operates as a process subreaper for jupyter. This prevents + # kernel crashes. + ENV TINI_VERSION v0.6.0 + ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini + RUN chmod +x /usr/bin/tini + ENTRYPOINT ["/usr/bin/tini", "--"] + + EXPOSE 8888 + CMD ["jupyter", "server", "--port=8888", "--no-browser", "--ip=0.0.0.0"] diff --git a/server/docs/source/operators/security.rst b/server/docs/source/operators/security.rst new file mode 100644 index 0000000..87148cb --- /dev/null +++ b/server/docs/source/operators/security.rst @@ -0,0 +1,376 @@ +.. _server_security: + +Security in the Jupyter Server +============================== + +Since access to the Jupyter Server means access to running arbitrary code, +it is important to restrict access to the server. +For this reason, Jupyter Server uses a token-based authentication that is **on by default**. + +.. note:: + + If you enable a password for your server, + token authentication is not enabled by default. + +When token authentication is enabled, the server uses a token to authenticate requests. +This token can be provided to login to the server in three ways: + +- in the ``Authorization`` header, e.g.:: + + Authorization: token abcdef... + +- In a URL parameter, e.g.:: + + https://my-server/tree/?token=abcdef... + +- In the password field of the login form that will be shown to you if you are not logged in. + +When you start a Jupyter server with token authentication enabled (default), +a token is generated to use for authentication. +This token is logged to the terminal, so that you can copy/paste the URL into your browser:: + + [I 11:59:16.597 ServerApp] The Jupyter Server is running at: + http://localhost:8888/?token=c8de56fa4deed24899803e93c227592aef6538f93025fe01 + + +If the Jupyter server is going to open your browser automatically, +an *additional* token is generated for launching the browser. +This additional token can be used only once, +and is used to set a cookie for your browser once it connects. +After your browser has made its first request with this one-time-token, +the token is discarded and a cookie is set in your browser. + +At any later time, you can see the tokens and URLs for all of your running servers with :command:`jupyter server list`:: + + $ jupyter server list + Currently running servers: + http://localhost:8888/?token=abc... :: /home/you/notebooks + https://0.0.0.0:9999/?token=123... :: /tmp/public + http://localhost:8889/ :: /tmp/has-password + +For servers with token-authentication enabled, the URL in the above listing will include the token, +so you can copy and paste that URL into your browser to login. +If a server has no token (e.g. it has a password or has authentication disabled), +the URL will not include the token argument. +Once you have visited this URL, +a cookie will be set in your browser and you won't need to use the token again, +unless you switch browsers, clear your cookies, or start a Jupyter server on a new port. + +Alternatives to token authentication +------------------------------------ + +If a generated token doesn't work well for you, +you can set a password for your server. +:command:`jupyter server password` will prompt you for a password, +and store the hashed password in your :file:`jupyter_server_config.json`. + +.. versionadded:: 5.0 + + :command:`jupyter server password` command is added. + + +It is possible disable authentication altogether by setting the token and password to empty strings, +but this is **NOT RECOMMENDED**, unless authentication or access restrictions are handled at a different layer in your web application: + +.. sourcecode:: python + + c.ServerApp.token = '' + c.ServerApp.password = '' + +Authorization +------------- + +.. versionadded:: 2.0 + +Authorization in Jupyter Server serves to provide finer grained control of access to its +API resources. With authentication, requests are accepted if the current user is known by +the server. Thus it can restrain access to specific users, but there is no way to give allowed +users more or less permissions. Jupyter Server provides a thin and extensible authorization layer +which checks if the current user is authorized to make a specific request. + +This is done by calling a ``is_authorized(handler, user, action, resource)`` method before each +request handler. Each request is labeled as either a "read", "write", or "execute" ``action``: + +- "read" wraps all ``GET`` and ``HEAD`` requests. + In general, read permissions grants access to read but not modify anything about the given resource. +- "write" wraps all ``POST``, ``PUT``, ``PATCH``, and ``DELETE`` requests. + In general, write permissions grants access to modify the given resource. +- "execute" wraps all requests to ZMQ/Websocket channels (terminals and kernels). + Execute is a special permission that usually corresponds to arbitrary execution, + such as via a kernel or terminal. + These permissions should generally be considered sufficient to perform actions equivalent + to ~all other permissions via other means. + +The ``resource`` being accessed refers to the resource name in the Jupyter Server's API endpoints. +In most cases, this is matches the field after `/api/`. +For instance, values for ``resource`` in the endpoints provided by the base jupyter server package, +and the corresponding permissions: + +.. list-table:: + :header-rows: 1 + + * - resource + - read + - write + - execute + - endpoints + + * - *resource name* + - *what can you do with read permissions?* + - *what can you do with write permissions?* + - *what can you do with execute permissions, if anything?* + - ``/api/...`` *what endpoints are governed by this resource?* + + * - api + - read server status (last activity, number of kernels, etc.), OpenAPI specification + - + - + - ``/api/status``, ``/api/spec.yaml`` + * - csp + - + - report content-security-policy violations + - + - ``/api/security/csp-report`` + * - config + - read frontend configuration, such as for notebook extensions + - modify frontend configuration + - + - ``/api/config`` + * - contents + - read files + - modify files (create, modify, delete) + - + - ``/api/contents``, ``/view``, ``/files`` + * - kernels + - list kernels, get status of kernels + - start, stop, and restart kernels + - Connect to kernel websockets, send/recv kernel messages. + **This generally means arbitrary code execution, + and should usually be considered equivalent to having all other permissions.** + - ``/api/kernels`` + * - kernelspecs + - read, list information about available kernels + - + - + - ``/api/kernelspecs`` + * - nbconvert + - render notebooks to other formats via nbconvert. + **Note: depending on server-side configuration, + this *could* involve execution.** + - + - + - ``/api/nbconvert`` + * - server + - + - Shutdown the server + - + - ``/api/shutdown`` + * - sessions + - list current sessions (association of documents to kernels) + - create, modify, and delete existing sessions, + which includes starting, stopping, and deleting kernels. + - + - ``/api/sessions`` + * - terminals + - list running terminals and their last activity + - start new terminals, stop running terminals + - Connect to terminal websockets, execute code in a shell. + **This generally means arbitrary code execution, + and should usually be considered equivalent to having all other permissions.** + - ``/api/terminals`` + + +Extensions may define their own resources. +Extension resources should start with ``extension_name:`` to avoid namespace conflicts. + +If ``is_authorized(...)`` returns ``True``, the request is made; otherwise, a +``HTTPError(403)`` (403 means "Forbidden") error is raised, and the request is blocked. + +By default, authorization is turned off—i.e. ``is_authorized()`` always returns ``True`` and +all authenticated users are allowed to make all types of requests. To turn-on authorization, pass +a class that inherits from ``Authorizer`` to the ``ServerApp.authorizer_class`` +parameter, implementing a ``is_authorized()`` method with your desired authorization logic, as +follows: + +.. sourcecode:: python + + from jupyter_server.auth import Authorizer + + class MyAuthorizationManager(Authorizer): + """Class for authorizing access to resources in the Jupyter Server. + + All authorizers used in Jupyter Server should inherit from + AuthorizationManager and, at the very minimum, override and implement + an `is_authorized` method with the following signature. + + The `is_authorized` method is called by the `@authorized` decorator in + JupyterHandler. If it returns True, the incoming request to the server + is accepted; if it returns False, the server returns a 403 (Forbidden) error code. + """ + + def is_authorized(self, handler: JupyterHandler, user: Any, action: str, resource: str) -> bool: + """A method to determine if `user` is authorized to perform `action` + (read, write, or execute) on the `resource` type. + + Parameters + ------------ + user : usually a dict or string + A truthy model representing the authenticated user. + A username string by default, + but usually a dict when integrating with an auth provider. + + action : str + the category of action for the current request: read, write, or execute. + + resource : str + the type of resource (i.e. contents, kernels, files, etc.) the user is requesting. + + Returns True if user authorized to make request; otherwise, returns False. + """ + return True # implement your authorization logic here + +The ``is_authorized()`` method will automatically be called whenever a handler is decorated with +``@authorized`` (from ``jupyter_server.auth``), similarly to the +``@authenticated`` decorator for authorization (from ``tornado.web``). + +Security in notebook documents +============================== + +As Jupyter Server become more popular for sharing and collaboration, +the potential for malicious people to attempt to exploit the notebook +for their nefarious purposes increases. IPython 2.0 introduced a +security model to prevent execution of untrusted code without explicit +user input. + +The problem +----------- + +The whole point of Jupyter is arbitrary code execution. We have no +desire to limit what can be done with a notebook, which would negatively +impact its utility. + +Unlike other programs, a Jupyter notebook document includes output. +Unlike other documents, that output exists in a context that can execute +code (via Javascript). + +The security problem we need to solve is that no code should execute +just because a user has **opened** a notebook that **they did not +write**. Like any other program, once a user decides to execute code in +a notebook, it is considered trusted, and should be allowed to do +anything. + +Our security model +------------------ + +- Untrusted HTML is always sanitized +- Untrusted Javascript is never executed +- HTML and Javascript in Markdown cells are never trusted +- **Outputs** generated by the user are trusted +- Any other HTML or Javascript (in Markdown cells, output generated by + others) is never trusted +- The central question of trust is "Did the current user do this?" + +The details of trust +-------------------- + +When a notebook is executed and saved, a signature is computed from a +digest of the notebook's contents plus a secret key. This is stored in a +database, writable only by the current user. By default, this is located at:: + + ~/.local/share/jupyter/nbsignatures.db # Linux + ~/Library/Jupyter/nbsignatures.db # OS X + %APPDATA%/jupyter/nbsignatures.db # Windows + +Each signature represents a series of outputs which were produced by code the +current user executed, and are therefore trusted. + +When you open a notebook, the server computes its signature, and checks if it's +in the database. If a match is found, HTML and Javascript +output in the notebook will be trusted at load, otherwise it will be +untrusted. + +Any output generated during an interactive session is trusted. + +Updating trust +************** + +A notebook's trust is updated when the notebook is saved. If there are +any untrusted outputs still in the notebook, the notebook will not be +trusted, and no signature will be stored. If all untrusted outputs have +been removed (either via ``Clear Output`` or re-execution), then the +notebook will become trusted. + +While trust is updated per output, this is only for the duration of a +single session. A newly loaded notebook file is either trusted or not in its +entirety. + +Explicit trust +************** + +Sometimes re-executing a notebook to generate trusted output is not an +option, either because dependencies are unavailable, or it would take a +long time. Users can explicitly trust a notebook in two ways: + +- At the command-line, with:: + + jupyter trust /path/to/notebook.ipynb + +- After loading the untrusted notebook, with ``File / Trust Notebook`` + +These two methods simply load the notebook, compute a new signature, and add +that signature to the user's database. + +Reporting security issues +------------------------- + +If you find a security vulnerability in Jupyter, either a failure of the +code to properly implement the model described here, or a failure of the +model itself, please report it to security@ipython.org. + +If you prefer to encrypt your security reports, +you can use :download:`this PGP public key `. + +Affected use cases +------------------ + +Some use cases that work in Jupyter 1.0 became less convenient in +2.0 as a result of the security changes. We do our best to minimize +these annoyances, but security is always at odds with convenience. + +Javascript and CSS in Markdown cells +************************************ + +While never officially supported, it had become common practice to put +hidden Javascript or CSS styling in Markdown cells, so that they would +not be visible on the page. Since Markdown cells are now sanitized (by +`Google Caja `__), all Javascript +(including click event handlers, etc.) and CSS will be stripped. + +We plan to provide a mechanism for notebook themes, but in the meantime +styling the notebook can only be done via either ``custom.css`` or CSS +in HTML output. The latter only have an effect if the notebook is +trusted, because otherwise the output will be sanitized just like +Markdown. + +Collaboration +************* + +When collaborating on a notebook, people probably want to see the +outputs produced by their colleagues' most recent executions. Since each +collaborator's key will differ, this will result in each share starting +in an untrusted state. There are three basic approaches to this: + +- re-run notebooks when you get them (not always viable) +- explicitly trust notebooks via ``jupyter trust`` or the notebook menu + (annoying, but easy) +- share a notebook signatures database, and use configuration dedicated to the + collaboration while working on the project. + +To share a signatures database among users, you can configure: + +.. code-block:: python + + c.NotebookNotary.data_dir = "/path/to/signature_dir" + +to specify a non-default path to the SQLite database (of notebook hashes, +essentially). diff --git a/server/docs/source/other/faq.rst b/server/docs/source/other/faq.rst new file mode 100644 index 0000000..db9e263 --- /dev/null +++ b/server/docs/source/other/faq.rst @@ -0,0 +1,13 @@ +.. _faq: + + +Frequently asked questions +========================== + +Here is a list of questions we think you might have. This list will always be growing, so please feel free to add your question+anwer to this page! |:rocket:| + + +Can I configure multiple extensions at once? +-------------------------------------------- + +Checkout our "Operator" docs on how to :ref:`configure extensions `. |:closed_book:| diff --git a/server/docs/source/other/full-config.rst b/server/docs/source/other/full-config.rst new file mode 100644 index 0000000..55d708a --- /dev/null +++ b/server/docs/source/other/full-config.rst @@ -0,0 +1,1390 @@ +.. _other-full-config: + + +Config file and command line options +==================================== + +The Jupyter Server can be run with a variety of command line arguments. +A list of available options can be found below in the :ref:`options section +`. + +Defaults for these options can also be set by creating a file named +``jupyter_server_config.py`` in your Jupyter folder. The Jupyter +folder is in your home directory, ``~/.jupyter``. + +To create a ``jupyter_server_config.py`` file, with all the defaults +commented out, you can use the following command line:: + + $ jupyter server --generate-config + + +.. _options: + +Options +------- + +This list of options can be generated by running the following and hitting +enter:: + + $ jupyter server --help-all + + + + +Application.log_datefmt : Unicode + Default: ``'%Y-%m-%d %H:%M:%S'`` + + The date format used by logging formatters for %(asctime)s + +Application.log_format : Unicode + Default: ``'[%(name)s]%(highlevel)s %(message)s'`` + + The Logging format template + +Application.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` + Default: ``30`` + + Set the log level by value or name. + +Application.show_config : Bool + Default: ``False`` + + Instead of starting the Application, dump configuration to stdout + +Application.show_config_json : Bool + Default: ``False`` + + Instead of starting the Application, dump configuration to stdout (as JSON) + +JupyterApp.answer_yes : Bool + Default: ``False`` + + Answer yes to any prompts. + +JupyterApp.config_file : Unicode + Default: ``''`` + + Full path of a config file. + +JupyterApp.config_file_name : Unicode + Default: ``''`` + + Specify a config file to load. + +JupyterApp.generate_config : Bool + Default: ``False`` + + Generate default config file. + +JupyterApp.log_datefmt : Unicode + Default: ``'%Y-%m-%d %H:%M:%S'`` + + The date format used by logging formatters for %(asctime)s + +JupyterApp.log_format : Unicode + Default: ``'[%(name)s]%(highlevel)s %(message)s'`` + + The Logging format template + +JupyterApp.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` + Default: ``30`` + + Set the log level by value or name. + +JupyterApp.show_config : Bool + Default: ``False`` + + Instead of starting the Application, dump configuration to stdout + +JupyterApp.show_config_json : Bool + Default: ``False`` + + Instead of starting the Application, dump configuration to stdout (as JSON) + +ServerApp.allow_credentials : Bool + Default: ``False`` + + Set the Access-Control-Allow-Credentials: true header + +ServerApp.allow_origin : Unicode + Default: ``''`` + + Set the Access-Control-Allow-Origin header + + Use '*' to allow any origin to access your server. + + Takes precedence over allow_origin_pat. + + +ServerApp.allow_origin_pat : Unicode + Default: ``''`` + + Use a regular expression for the Access-Control-Allow-Origin header + + Requests from an origin matching the expression will get replies with: + + Access-Control-Allow-Origin: origin + + where `origin` is the origin of the request. + + Ignored if allow_origin is set. + + +ServerApp.allow_password_change : Bool + Default: ``True`` + + Allow password to be changed at login for the Jupyter server. + + While logging in with a token, the Jupyter server UI will give the opportunity to + the user to enter a new password at the same time that will replace + the token login mechanism. + + This can be set to false to prevent changing password from the UI/API. + + +ServerApp.allow_remote_access : Bool + Default: ``False`` + + Allow requests where the Host header doesn't point to a local server + + By default, requests get a 403 forbidden response if the 'Host' header + shows that the browser thinks it's on a non-local domain. + Setting this option to True disables this check. + + This protects against 'DNS rebinding' attacks, where a remote web server + serves you a page and then changes its DNS to send later requests to a + local IP, bypassing same-origin checks. + + Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, + along with hostnames configured in local_hostnames. + + +ServerApp.allow_root : Bool + Default: ``False`` + + Whether to allow the user to run the server as root. + +ServerApp.answer_yes : Bool + Default: ``False`` + + Answer yes to any prompts. + +ServerApp.base_url : Unicode + Default: ``'/'`` + + The base URL for the Jupyter server. + + Leading and trailing slashes can be omitted, + and will automatically be added. + + +ServerApp.browser : Unicode + Default: ``''`` + + Specify what command to use to invoke a web + browser when starting the server. If not specified, the + default browser will be determined by the `webbrowser` + standard library module, which allows setting of the + BROWSER environment variable to override it. + + +ServerApp.certfile : Unicode + Default: ``''`` + + The full path to an SSL/TLS certificate file. + +ServerApp.client_ca : Unicode + Default: ``''`` + + The full path to a certificate authority certificate for SSL/TLS client authentication. + +ServerApp.config_file : Unicode + Default: ``''`` + + Full path of a config file. + +ServerApp.config_file_name : Unicode + Default: ``''`` + + Specify a config file to load. + +ServerApp.config_manager_class : Type + Default: ``'jupyter_server.services.config.manager.ConfigManager'`` + + The config manager class to use + +ServerApp.contents_manager_class : Type + Default: ``'jupyter_server.services.contents.largefilemanager.LargeFileM...`` + + The content manager class to use. + +ServerApp.cookie_options : Dict + Default: ``{}`` + + Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details. + +ServerApp.cookie_secret : Bytes + Default: ``b''`` + + The random bytes used to secure cookies. + By default this is a new random number every time you start the server. + Set it to a value in a config file to enable logins to persist across server sessions. + + Note: Cookie secrets should be kept private, do not share config files with + cookie_secret stored in plaintext (you can read the value from a file). + + +ServerApp.cookie_secret_file : Unicode + Default: ``''`` + + The file where the cookie secret is stored. + +ServerApp.custom_display_url : Unicode + Default: ``''`` + + Override URL shown to users. + + Replace actual URL, including protocol, address, port and base URL, + with the given value when displaying URL to the users. Do not change + the actual connection URL. If authentication token is enabled, the + token is added to the custom URL automatically. + + This option is intended to be used when the URL to display to the user + cannot be determined reliably by the Jupyter server (proxified + or containerized setups for example). + +ServerApp.default_url : Unicode + Default: ``'/'`` + + The default URL to redirect to from `/` + +ServerApp.disable_check_xsrf : Bool + Default: ``False`` + + Disable cross-site-request-forgery protection + + Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, + requiring API requests to either: + + - originate from pages served by this server (validated with XSRF cookie and token), or + - authenticate with a token + + Some anonymous compute resources still desire the ability to run code, + completely without authentication. + These services can disable all authentication and security checks, + with the full knowledge of what that implies. + + +ServerApp.extra_services : List + Default: ``[]`` + + handlers that should be loaded at higher priority than the default services + +ServerApp.extra_static_paths : List + Default: ``[]`` + + Extra paths to search for serving static files. + + This allows adding javascript/css to be available from the Jupyter server machine, + or overriding individual files in the IPython + +ServerApp.extra_template_paths : List + Default: ``[]`` + + Extra paths to search for serving jinja templates. + + Can be used to override templates from jupyter_server.templates. + +ServerApp.file_to_run : Unicode + Default: ``''`` + + No description + +ServerApp.generate_config : Bool + Default: ``False`` + + Generate default config file. + +ServerApp.get_secure_cookie_kwargs : Dict + Default: ``{}`` + + Extra keyword arguments to pass to `get_secure_cookie`. See tornado's get_secure_cookie docs for details. + +ServerApp.iopub_data_rate_limit : Float + Default: ``1000000`` + + (bytes/sec) + Maximum rate at which stream output can be sent on iopub before they are + limited. + +ServerApp.iopub_msg_rate_limit : Float + Default: ``1000`` + + (msgs/sec) + Maximum rate at which messages can be sent on iopub before they are + limited. + +ServerApp.ip : Unicode + Default: ``'localhost'`` + + The IP address the Jupyter server will listen on. + +ServerApp.jinja_environment_options : Dict + Default: ``{}`` + + Supply extra arguments that will be passed to Jinja environment. + +ServerApp.jinja_template_vars : Dict + Default: ``{}`` + + Extra variables to supply to jinja templates when rendering. + +ServerApp.jpserver_extensions : Dict + Default: ``{}`` + + Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order. + +ServerApp.kernel_manager_class : Type + Default: ``'jupyter_server.services.kernels.kernelmanager.MappingKernelM...`` + + The kernel manager class to use. + +ServerApp.kernel_spec_manager_class : Type + Default: ``'jupyter_client.kernelspec.KernelSpecManager'`` + + + The kernel spec manager class to use. Should be a subclass + of `jupyter_client.kernelspec.KernelSpecManager`. + + The Api of KernelSpecManager is provisional and might change + without warning between this version of Jupyter and the next stable one. + + +ServerApp.keyfile : Unicode + Default: ``''`` + + The full path to a private key file for usage with SSL/TLS. + +ServerApp.local_hostnames : List + Default: ``['localhost']`` + + Hostnames to allow as local when allow_remote_access is False. + + Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted + as local as well. + + +ServerApp.log_datefmt : Unicode + Default: ``'%Y-%m-%d %H:%M:%S'`` + + The date format used by logging formatters for %(asctime)s + +ServerApp.log_format : Unicode + Default: ``'[%(name)s]%(highlevel)s %(message)s'`` + + The Logging format template + +ServerApp.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` + Default: ``30`` + + Set the log level by value or name. + +ServerApp.login_handler_class : Type + Default: ``'jupyter_server.auth.login.LoginHandler'`` + + The login handler class to use. + +ServerApp.logout_handler_class : Type + Default: ``'jupyter_server.auth.logout.LogoutHandler'`` + + The logout handler class to use. + +ServerApp.max_body_size : Int + Default: ``536870912`` + + + Sets the maximum allowed size of the client request body, specified in + the Content-Length request header field. If the size in a request + exceeds the configured value, a malformed HTTP message is returned to + the client. + + Note: max_body_size is applied even in streaming mode. + + +ServerApp.max_buffer_size : Int + Default: ``536870912`` + + + Gets or sets the maximum amount of memory, in bytes, that is allocated + for use by the buffer manager. + + +ServerApp.notebook_dir : Unicode + Default: ``''`` + + DEPRECATED, use root_dir. + +ServerApp.open_browser : Bool + Default: ``False`` + + Whether to open in a browser after starting. + The specific browser used is platform dependent and + determined by the python standard library `webbrowser` + module, unless it is overridden using the --browser + (ServerApp.browser) configuration option. + + +ServerApp.password : Unicode + Default: ``''`` + + Hashed password to use for web authentication. + + To generate, type in a python/IPython shell: + + from jupyter_server.auth import passwd; passwd() + + The string should be of the form type:salt:hashed-password. + + +ServerApp.password_required : Bool + Default: ``False`` + + Forces users to use a password for the Jupyter server. + This is useful in a multi user environment, for instance when + everybody in the LAN can access each other's machine through ssh. + + In such a case, serving on localhost is not secure since + any user can connect to the Jupyter server via ssh. + + + +ServerApp.port : Int + Default: ``8888`` + + The port the Jupyter server will listen on. + +ServerApp.port_retries : Int + Default: ``50`` + + The number of additional ports to try if the specified port is not available. + +ServerApp.pylab : Unicode + Default: ``'disabled'`` + + + DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. + + +ServerApp.quit_button : Bool + Default: ``True`` + + If True, display controls to shut down the Jupyter server, such as menu items or buttons. + +ServerApp.rate_limit_window : Float + Default: ``3`` + + (sec) Time window used to + check the message and data rate limits. + +ServerApp.reraise_server_extension_failures : Bool + Default: ``False`` + + Reraise exceptions encountered loading server extensions? + +ServerApp.root_dir : Unicode + Default: ``''`` + + The directory to use for notebooks and kernels. + +ServerApp.session_manager_class : Type + Default: ``'jupyter_server.services.sessions.sessionmanager.SessionManager'`` + + The session manager class to use. + +ServerApp.show_config : Bool + Default: ``False`` + + Instead of starting the Application, dump configuration to stdout + +ServerApp.show_config_json : Bool + Default: ``False`` + + Instead of starting the Application, dump configuration to stdout (as JSON) + +ServerApp.shutdown_no_activity_timeout : Int + Default: ``0`` + + Shut down the server after N seconds with no kernels or terminals running and no activity. This can be used together with culling idle kernels (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when it's not in use. This is not precisely timed: it may shut down up to a minute later. 0 (the default) disables this automatic shutdown. + +ServerApp.ssl_options : Dict + Default: ``{}`` + + Supply SSL options for the tornado HTTPServer. + See the tornado docs for details. + +ServerApp.terminado_settings : Dict + Default: ``{}`` + + Supply overrides for terminado. Currently only supports "shell_command". + +ServerApp.terminals_enabled : Bool + Default: ``True`` + + Set to False to disable terminals. + + This does *not* make the server more secure by itself. + Anything the user can in a terminal, they can also do in a notebook. + + Terminals may also be automatically disabled if the terminado package + is not available. + + +ServerApp.token : Unicode + Default: ``''`` + + Token used for authenticating first-time connections to the server. + + When no password is enabled, + the default is to generate a new, random token. + + Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. + + +ServerApp.tornado_settings : Dict + Default: ``{}`` + + Supply overrides for the tornado.web.Application that the Jupyter server uses. + +ServerApp.trust_xheaders : Bool + Default: ``False`` + + Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL + +ServerApp.webbrowser_open_new : Int + Default: ``2`` + + Specify where to open the server on startup. This is the + `new` argument passed to the standard library method `webbrowser.open`. + The behaviour is not guaranteed, but depends on browser support. Valid + values are: + + - 2 opens a new tab, + - 1 opens a new window, + - 0 opens in an existing window. + + See the `webbrowser.open` documentation for details. + + +ServerApp.websocket_compression_options : Any + Default: ``None`` + + + Set the tornado compression options for websocket connections. + + This value will be returned from :meth:`WebSocketHandler.get_compression_options`. + None (default) will disable compression. + A dict (even an empty one) will enable compression. + + See the tornado docs for WebSocketHandler.get_compression_options for details. + + +ServerApp.websocket_url : Unicode + Default: ``''`` + + The base URL for websockets, + if it differs from the HTTP server (hint: it almost certainly doesn't). + + Should be in the form of an HTTP origin: ws[s]://hostname[:port] + + +ConnectionFileMixin.connection_file : Unicode + Default: ``''`` + + JSON file in which to store connection info [default: kernel-.json] + + This file will contain the IP, ports, and authentication key needed to connect + clients to this kernel. By default, this file will be created in the security dir + of the current profile, but can be specified by absolute path. + + +ConnectionFileMixin.control_port : Int + Default: ``0`` + + set the control (ROUTER) port [default: random] + +ConnectionFileMixin.hb_port : Int + Default: ``0`` + + set the heartbeat port [default: random] + +ConnectionFileMixin.iopub_port : Int + Default: ``0`` + + set the iopub (PUB) port [default: random] + +ConnectionFileMixin.ip : Unicode + Default: ``''`` + + Set the kernel's IP address [default localhost]. + If the IP address is something other than localhost, then + Consoles on other machines will be able to connect + to the Kernel, so be careful! + +ConnectionFileMixin.shell_port : Int + Default: ``0`` + + set the shell (ROUTER) port [default: random] + +ConnectionFileMixin.stdin_port : Int + Default: ``0`` + + set the stdin (ROUTER) port [default: random] + +ConnectionFileMixin.transport : any of ``'tcp'``|``'ipc'`` (case-insensitive) + Default: ``'tcp'`` + + No description + +KernelManager.autorestart : Bool + Default: ``True`` + + Should we autorestart the kernel if it dies. + +KernelManager.connection_file : Unicode + Default: ``''`` + + JSON file in which to store connection info [default: kernel-.json] + + This file will contain the IP, ports, and authentication key needed to connect + clients to this kernel. By default, this file will be created in the security dir + of the current profile, but can be specified by absolute path. + + +KernelManager.control_port : Int + Default: ``0`` + + set the control (ROUTER) port [default: random] + +KernelManager.hb_port : Int + Default: ``0`` + + set the heartbeat port [default: random] + +KernelManager.iopub_port : Int + Default: ``0`` + + set the iopub (PUB) port [default: random] + +KernelManager.ip : Unicode + Default: ``''`` + + Set the kernel's IP address [default localhost]. + If the IP address is something other than localhost, then + Consoles on other machines will be able to connect + to the Kernel, so be careful! + +KernelManager.kernel_cmd : List + Default: ``[]`` + + DEPRECATED: Use kernel_name instead. + + The Popen Command to launch the kernel. + Override this if you have a custom kernel. + If kernel_cmd is specified in a configuration file, + Jupyter does not pass any arguments to the kernel, + because it cannot make any assumptions about the + arguments that the kernel understands. In particular, + this means that the kernel does not receive the + option --debug if it given on the Jupyter command line. + + +KernelManager.shell_port : Int + Default: ``0`` + + set the shell (ROUTER) port [default: random] + +KernelManager.shutdown_wait_time : Float + Default: ``5.0`` + + Time to wait for a kernel to terminate before killing it, in seconds. + +KernelManager.stdin_port : Int + Default: ``0`` + + set the stdin (ROUTER) port [default: random] + +KernelManager.transport : any of ``'tcp'``|``'ipc'`` (case-insensitive) + Default: ``'tcp'`` + + No description + +Session.buffer_threshold : Int + Default: ``1024`` + + Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling. + +Session.check_pid : Bool + Default: ``True`` + + Whether to check PID to protect against calls after fork. + + This check can be disabled if fork-safety is handled elsewhere. + + +Session.copy_threshold : Int + Default: ``65536`` + + Threshold (in bytes) beyond which a buffer should be sent without copying. + +Session.debug : Bool + Default: ``False`` + + Debug output in the Session + +Session.digest_history_size : Int + Default: ``65536`` + + The maximum number of digests to remember. + + The digest history will be culled when it exceeds this value. + + +Session.item_threshold : Int + Default: ``64`` + + The maximum number of items for a container to be introspected for custom serialization. + Containers larger than this are pickled outright. + + +Session.key : CBytes + Default: ``b''`` + + execution key, for signing messages. + +Session.keyfile : Unicode + Default: ``''`` + + path to file containing execution key. + +Session.metadata : Dict + Default: ``{}`` + + Metadata dictionary, which serves as the default top-level metadata dict for each message. + +Session.packer : DottedObjectName + Default: ``'json'`` + + The name of the packer for serializing messages. + Should be one of 'json', 'pickle', or an import name + for a custom callable serializer. + +Session.session : CUnicode + Default: ``''`` + + The UUID identifying this session. + +Session.signature_scheme : Unicode + Default: ``'hmac-sha256'`` + + The digest scheme used to construct the message signatures. + Must have the form 'hmac-HASH'. + +Session.unpacker : DottedObjectName + Default: ``'json'`` + + The name of the unpacker for unserializing messages. + Only used with custom functions for `packer`. + +Session.username : Unicode + Default: ``'username'`` + + Username for the Session. Default is your system username. + +MultiKernelManager.default_kernel_name : Unicode + Default: ``'python3'`` + + The name of the default kernel to start + +MultiKernelManager.kernel_manager_class : DottedObjectName + Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` + + The kernel manager class. This is configurable to allow + subclassing of the KernelManager for customized behavior. + + +MultiKernelManager.shared_context : Bool + Default: ``True`` + + Share a single zmq.Context to talk to all my kernels + +MappingKernelManager.allow_tracebacks : Bool + Default: ``True`` + + Whether to send tracebacks to clients on exceptions. + +MappingKernelManager.allowed_message_types : List + Default: ``[]`` + + White list of allowed kernel message types. + When the list is empty, all message types are allowed. + + +MappingKernelManager.buffer_offline_messages : Bool + Default: ``True`` + + Whether messages from kernels whose frontends have disconnected should be buffered in-memory. + + When True (default), messages are buffered and replayed on reconnect, + avoiding lost messages due to interrupted connectivity. + + Disable if long-running kernels will produce too much output while + no frontends are connected. + + +MappingKernelManager.cull_busy : Bool + Default: ``False`` + + Whether to consider culling kernels which are busy. + Only effective if cull_idle_timeout > 0. + +MappingKernelManager.cull_connected : Bool + Default: ``False`` + + Whether to consider culling kernels which have one or more connections. + Only effective if cull_idle_timeout > 0. + +MappingKernelManager.cull_idle_timeout : Int + Default: ``0`` + + Timeout (in seconds) after which a kernel is considered idle and ready to be culled. + Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled + for users with poor network connections. + +MappingKernelManager.cull_interval : Int + Default: ``300`` + + The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value. + +MappingKernelManager.default_kernel_name : Unicode + Default: ``'python3'`` + + The name of the default kernel to start + +MappingKernelManager.kernel_info_timeout : Float + Default: ``60`` + + Timeout for giving up on a kernel (in seconds). + + On starting and restarting kernels, we check whether the + kernel is running and responsive by sending kernel_info_requests. + This sets the timeout in seconds for how long the kernel can take + before being presumed dead. + This affects the MappingKernelManager (which handles kernel restarts) + and the ZMQChannelsHandler (which handles the startup). + + +MappingKernelManager.kernel_manager_class : DottedObjectName + Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` + + The kernel manager class. This is configurable to allow + subclassing of the KernelManager for customized behavior. + + +MappingKernelManager.root_dir : Unicode + Default: ``''`` + + No description + +MappingKernelManager.shared_context : Bool + Default: ``True`` + + Share a single zmq.Context to talk to all my kernels + +MappingKernelManager.traceback_replacement_message : Unicode + Default: ``'An exception occurred at runtime, which is not shown due to ...`` + + Message to print when allow_tracebacks is False, and an exception occurs + +KernelSpecManager.ensure_native_kernel : Bool + Default: ``True`` + + If there is no Python kernelspec registered and the IPython + kernel is available, ensure it is added to the spec list. + + +KernelSpecManager.kernel_spec_class : Type + Default: ``'jupyter_client.kernelspec.KernelSpec'`` + + The kernel spec class. This is configurable to allow + subclassing of the KernelSpecManager for customized behavior. + + +KernelSpecManager.whitelist : Set + Default: ``set()`` + + Whitelist of allowed kernel names. + + By default, all installed kernels are allowed. + + +ContentsManager.allow_hidden : Bool + Default: ``False`` + + Allow access to hidden files + +ContentsManager.checkpoints : Instance + Default: ``None`` + + No description + +ContentsManager.checkpoints_class : Type + Default: ``'jupyter_server.services.contents.checkpoints.Checkpoints'`` + + No description + +ContentsManager.checkpoints_kwargs : Dict + Default: ``{}`` + + No description + +ContentsManager.files_handler_class : Type + Default: ``'jupyter_server.files.handlers.FilesHandler'`` + + handler class to use when serving raw file requests. + + Default is a fallback that talks to the ContentsManager API, + which may be inefficient, especially for large files. + + Local files-based ContentsManagers can use a StaticFileHandler subclass, + which will be much more efficient. + + Access to these files should be Authenticated. + + +ContentsManager.files_handler_params : Dict + Default: ``{}`` + + Extra parameters to pass to files_handler_class. + + For example, StaticFileHandlers generally expect a `path` argument + specifying the root directory from which to serve files. + + +ContentsManager.hide_globs : List + Default: ``['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dyl...`` + + + Glob patterns to hide in file and directory listings. + + +ContentsManager.pre_save_hook : Any + Default: ``None`` + + Python callable or importstring thereof + + To be called on a contents model prior to save. + + This can be used to process the structure, + such as removing notebook outputs or other side effects that + should not be saved. + + It will be called as (all arguments passed by keyword):: + + hook(path=path, model=model, contents_manager=self) + + - model: the model to be saved. Includes file contents. + Modifying this dict will affect the file that is stored. + - path: the API path of the save destination + - contents_manager: this ContentsManager instance + + +ContentsManager.root_dir : Unicode + Default: ``'/'`` + + No description + +ContentsManager.untitled_directory : Unicode + Default: ``'Untitled Folder'`` + + The base name used when creating untitled directories. + +ContentsManager.untitled_file : Unicode + Default: ``'untitled'`` + + The base name used when creating untitled files. + +ContentsManager.untitled_notebook : Unicode + Default: ``'Untitled'`` + + The base name used when creating untitled notebooks. + +FileManagerMixin.use_atomic_writing : Bool + Default: ``True`` + + By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. + This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). + If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) + +FileContentsManager.allow_hidden : Bool + Default: ``False`` + + Allow access to hidden files + +FileContentsManager.checkpoints : Instance + Default: ``None`` + + No description + +FileContentsManager.checkpoints_class : Type + Default: ``'jupyter_server.services.contents.checkpoints.Checkpoints'`` + + No description + +FileContentsManager.checkpoints_kwargs : Dict + Default: ``{}`` + + No description + +FileContentsManager.delete_to_trash : Bool + Default: ``True`` + + If True (default), deleting files will send them to the + platform's trash/recycle bin, where they can be recovered. If False, + deleting files really deletes them. + +FileContentsManager.files_handler_class : Type + Default: ``'jupyter_server.files.handlers.FilesHandler'`` + + handler class to use when serving raw file requests. + + Default is a fallback that talks to the ContentsManager API, + which may be inefficient, especially for large files. + + Local files-based ContentsManagers can use a StaticFileHandler subclass, + which will be much more efficient. + + Access to these files should be Authenticated. + + +FileContentsManager.files_handler_params : Dict + Default: ``{}`` + + Extra parameters to pass to files_handler_class. + + For example, StaticFileHandlers generally expect a `path` argument + specifying the root directory from which to serve files. + + +FileContentsManager.hide_globs : List + Default: ``['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dyl...`` + + + Glob patterns to hide in file and directory listings. + + +FileContentsManager.post_save_hook : Any + Default: ``None`` + + Python callable or importstring thereof + + to be called on the path of a file just saved. + + This can be used to process the file on disk, + such as converting the notebook to a script or HTML via nbconvert. + + It will be called as (all arguments passed by keyword):: + + hook(os_path=os_path, model=model, contents_manager=instance) + + - path: the filesystem path to the file just written + - model: the model representing the file + - contents_manager: this ContentsManager instance + + +FileContentsManager.pre_save_hook : Any + Default: ``None`` + + Python callable or importstring thereof + + To be called on a contents model prior to save. + + This can be used to process the structure, + such as removing notebook outputs or other side effects that + should not be saved. + + It will be called as (all arguments passed by keyword):: + + hook(path=path, model=model, contents_manager=self) + + - model: the model to be saved. Includes file contents. + Modifying this dict will affect the file that is stored. + - path: the API path of the save destination + - contents_manager: this ContentsManager instance + + +FileContentsManager.root_dir : Unicode + Default: ``''`` + + No description + +FileContentsManager.untitled_directory : Unicode + Default: ``'Untitled Folder'`` + + The base name used when creating untitled directories. + +FileContentsManager.untitled_file : Unicode + Default: ``'untitled'`` + + The base name used when creating untitled files. + +FileContentsManager.untitled_notebook : Unicode + Default: ``'Untitled'`` + + The base name used when creating untitled notebooks. + +FileContentsManager.use_atomic_writing : Bool + Default: ``True`` + + By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. + This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). + If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) + +NotebookNotary.algorithm : any of ``'blake2s'``|``'sha512'``|``'md5'``|``'sha3_512'``|``'sha3_224'``|``'blake2b'``|``'sha384'``|``'sha1'``|``'sha3_256'``|``'sha256'``|``'sha224'``|``'sha3_384'`` + Default: ``'sha256'`` + + The hashing algorithm used to sign notebooks. + +NotebookNotary.db_file : Unicode + Default: ``''`` + + The sqlite file in which to store notebook signatures. + By default, this will be in your Jupyter data directory. + You can set it to ':memory:' to disable sqlite writing to the filesystem. + + +NotebookNotary.secret : Bytes + Default: ``b''`` + + The secret key with which notebooks are signed. + +NotebookNotary.secret_file : Unicode + Default: ``''`` + + The file where the secret key is stored. + +NotebookNotary.store_factory : Callable + Default: ``traitlets.Undefined`` + + A callable returning the storage backend for notebook signatures. + The default uses an SQLite database. + +GatewayMappingKernelManager.allow_tracebacks : Bool + Default: ``True`` + + Whether to send tracebacks to clients on exceptions. + +GatewayMappingKernelManager.allowed_message_types : List + Default: ``[]`` + + White list of allowed kernel message types. + When the list is empty, all message types are allowed. + + +GatewayMappingKernelManager.buffer_offline_messages : Bool + Default: ``True`` + + Whether messages from kernels whose frontends have disconnected should be buffered in-memory. + + When True (default), messages are buffered and replayed on reconnect, + avoiding lost messages due to interrupted connectivity. + + Disable if long-running kernels will produce too much output while + no frontends are connected. + + +GatewayMappingKernelManager.cull_busy : Bool + Default: ``False`` + + Whether to consider culling kernels which are busy. + Only effective if cull_idle_timeout > 0. + +GatewayMappingKernelManager.cull_connected : Bool + Default: ``False`` + + Whether to consider culling kernels which have one or more connections. + Only effective if cull_idle_timeout > 0. + +GatewayMappingKernelManager.cull_idle_timeout : Int + Default: ``0`` + + Timeout (in seconds) after which a kernel is considered idle and ready to be culled. + Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled + for users with poor network connections. + +GatewayMappingKernelManager.cull_interval : Int + Default: ``300`` + + The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value. + +GatewayMappingKernelManager.default_kernel_name : Unicode + Default: ``'python3'`` + + The name of the default kernel to start + +GatewayMappingKernelManager.kernel_info_timeout : Float + Default: ``60`` + + Timeout for giving up on a kernel (in seconds). + + On starting and restarting kernels, we check whether the + kernel is running and responsive by sending kernel_info_requests. + This sets the timeout in seconds for how long the kernel can take + before being presumed dead. + This affects the MappingKernelManager (which handles kernel restarts) + and the ZMQChannelsHandler (which handles the startup). + + +GatewayMappingKernelManager.kernel_manager_class : DottedObjectName + Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` + + The kernel manager class. This is configurable to allow + subclassing of the KernelManager for customized behavior. + + +GatewayMappingKernelManager.root_dir : Unicode + Default: ``''`` + + No description + +GatewayMappingKernelManager.shared_context : Bool + Default: ``True`` + + Share a single zmq.Context to talk to all my kernels + +GatewayMappingKernelManager.traceback_replacement_message : Unicode + Default: ``'An exception occurred at runtime, which is not shown due to ...`` + + Message to print when allow_tracebacks is False, and an exception occurs + +GatewayKernelSpecManager.ensure_native_kernel : Bool + Default: ``True`` + + If there is no Python kernelspec registered and the IPython + kernel is available, ensure it is added to the spec list. + + +GatewayKernelSpecManager.kernel_spec_class : Type + Default: ``'jupyter_client.kernelspec.KernelSpec'`` + + The kernel spec class. This is configurable to allow + subclassing of the KernelSpecManager for customized behavior. + + +GatewayKernelSpecManager.whitelist : Set + Default: ``set()`` + + Whitelist of allowed kernel names. + + By default, all installed kernels are allowed. + + + + +GatewayClient.auth_token : Unicode + Default: ``None`` + + The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN env var) + + +GatewayClient.ca_certs : Unicode + Default: ``None`` + + The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) + + +GatewayClient.client_cert : Unicode + Default: ``None`` + + The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) + + +GatewayClient.client_key : Unicode + Default: ``None`` + + The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) + + +GatewayClient.connect_timeout : Float + Default: ``60.0`` + + The time allowed for HTTP connection establishment with the Gateway server. + (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) + +GatewayClient.env_whitelist : Unicode + Default: ``''`` + + A comma-separated list of environment variable names that will be included, along with + their values, in the kernel startup request. The corresponding `env_whitelist` configuration + value must also be set on the Gateway server - since that configuration value indicates which + environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var) + +GatewayClient.headers : Unicode + Default: ``'{}'`` + + Additional HTTP headers to pass on the request. This value will be converted to a dict. + (JUPYTER_GATEWAY_HEADERS env var) + + +GatewayClient.http_pwd : Unicode + Default: ``None`` + + The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) + + +GatewayClient.http_user : Unicode + Default: ``None`` + + The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) + + +GatewayClient.kernels_endpoint : Unicode + Default: ``'/api/kernels'`` + + The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) + +GatewayClient.kernelspecs_endpoint : Unicode + Default: ``'/api/kernelspecs'`` + + The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) + +GatewayClient.kernelspecs_resource_endpoint : Unicode + Default: ``'/kernelspecs'`` + + The gateway endpoint for accessing kernelspecs resources + (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) + +GatewayClient.request_timeout : Float + Default: ``60.0`` + + The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var) + +GatewayClient.url : Unicode + Default: ``None`` + + The url of the Kernel or Enterprise Gateway server where + kernel specifications are defined and kernel management takes place. + If defined, this Notebook server acts as a proxy for all kernel + management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) + + +GatewayClient.validate_cert : Bool + Default: ``True`` + + For HTTPS requests, determines if server's certificate should be validated or not. + (JUPYTER_GATEWAY_VALIDATE_CERT env var) + +GatewayClient.ws_url : Unicode + Default: ``None`` + + The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value + will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) diff --git a/server/docs/source/other/index.rst b/server/docs/source/other/index.rst new file mode 100644 index 0000000..e3ddd8d --- /dev/null +++ b/server/docs/source/other/index.rst @@ -0,0 +1,10 @@ +Other helpful documentation +--------------------------- + +.. toctree:: + :maxdepth: 1 + + links + faq + full-config + changelog diff --git a/server/docs/source/other/links.rst b/server/docs/source/other/links.rst new file mode 100644 index 0000000..935ddc5 --- /dev/null +++ b/server/docs/source/other/links.rst @@ -0,0 +1,9 @@ +List of helpful links +===================== + +* :ref:`Frequently Asked Questions ` +* `Jupyter Server Github Repo `_ +* `JupyterLab Github Repo `_ +* `Jupyter Notebook Github Repo `_ +* `Jupyterhub Github Repo `_ +* `Jupyter Zoom Channel `_ diff --git a/server/docs/source/users/configuration.rst b/server/docs/source/users/configuration.rst new file mode 100644 index 0000000..b6ede5b --- /dev/null +++ b/server/docs/source/users/configuration.rst @@ -0,0 +1,68 @@ +.. _user-configuring-a-jupyter-server: + +Configuring a Jupyter Server +============================ + +Using a Jupyter config file +--------------------------- + +By default, Jupyter Server looks for server-specific configuration in a ``jupyter_server_config`` file located on a Jupyter path. To list the paths where Jupyter Server will look, run: + +.. code-block:: console + + $ jupyter --paths + + config: + /Users/username/.jupyter + /usr/local/etc/jupyter + /etc/jupyter + data: + /Users/username/Library/Jupyter + /usr/local/share/jupyter + /usr/share/jupyter + runtime: + /Users/username/Library/Jupyter/runtime + + +The paths under ``config`` are listed in order of precedence. If the same trait is listed in multiple places, it will be set to the value from the file will highest precendence. + + +Jupyter Server uses IPython's traitlets system for configuration. Traits can be listed in a Python or JSON config file. You can quickly create a ``jupyter_server_config.py`` file in the ``.jupyter`` directory, with all the defaults commented out, use the following command: + +.. code-block:: console + + $ jupyter server --generate-config + +In Python files, these traits will have the prefix ``c.ServerApp``. For example, your configuration file could look like: + +.. code-block:: python + + # inside a jupyter_server_config.py file. + + c.ServerApp.port = 9999 + +The same configuration in JSON, looks like: + +.. code-block:: json + + { + "ServerApp": { + "port": 9999 + } + } + + +Using the CLI +------------- + +Alternatively, you can configure Jupyter Server when launching from the command line using CLI args. Prefix each argument with ``--ServerApp`` like so: + +.. code-block:: console + + $ jupyter server --ServerApp.port=9999 + + +Full configuration list +----------------------- + +See the full list of configuration options for the server :ref:`here `. diff --git a/server/docs/source/users/help.rst b/server/docs/source/users/help.rst new file mode 100644 index 0000000..b290e1b --- /dev/null +++ b/server/docs/source/users/help.rst @@ -0,0 +1,8 @@ +.. _user-getting-help: + +Getting Help +============ + +If you run into any issues or bugs, please open an `issue on Github `_. + +We'd also love to have you come by our :ref:`Team Meetings `. diff --git a/server/docs/source/users/index.rst b/server/docs/source/users/index.rst new file mode 100644 index 0000000..3f0bab7 --- /dev/null +++ b/server/docs/source/users/index.rst @@ -0,0 +1,15 @@ +Documentation for Users +======================= + +The Jupyter Server is a highly technical piece of the Jupyter Stack, so users probably won't import or install this library directly. These pages are to meant to help you in case you run into issues or bugs. + + +.. toctree:: + :caption: Users + :maxdepth: 1 + :name: users + + installation + configuration + launching + help diff --git a/server/docs/source/users/installation.rst b/server/docs/source/users/installation.rst new file mode 100644 index 0000000..3bc0e37 --- /dev/null +++ b/server/docs/source/users/installation.rst @@ -0,0 +1,19 @@ +.. _user-installation: + +Installation +============ + +Most Jupyter users will **never need to install Jupyter Server manually**. Jupyter Web applications will include the (correct version) of Jupyter Server as a dependency. It's best to let those applications handle installation, because they may require a specific version of Jupyter Server. + +If you decide to install manually, run: + +.. code-block:: bash + + pip install jupyter_server + + +You upgrade or downgrade to a specific version of Jupyter Server by adding an operator to the command above: + +.. code-block:: bash + + pip install jupyter_server==1.0 diff --git a/server/docs/source/users/launching.rst b/server/docs/source/users/launching.rst new file mode 100644 index 0000000..1338b74 --- /dev/null +++ b/server/docs/source/users/launching.rst @@ -0,0 +1,26 @@ +.. _user-launching-a-bare-jupyter-server: + +Launching a bare Jupyter Server +=============================== + +Most of the time, you won't need to start the Jupyter Server directly. Jupyter Web Applications (like Jupyter Notebook, Jupyterlab, Voila, etc.) come with their own entry points that start a server automatically. + +Sometimes, though, it can be useful to start Jupyter Server directly when you want to run multiple Jupyter Web applications at the same time. For more details, see the :ref:`Managing multiple extensions ` page. If these extensions are enabled, you can simple run the following: + +.. code-block:: bash + + > jupyter server + + [I 2020-03-20 15:48:20.903 ServerApp] Serving notebooks from local directory: /Users/username/home + [I 2020-03-20 15:48:20.903 ServerApp] Jupyter Server 1.0.0 is running at: + [I 2020-03-20 15:48:20.903 ServerApp] http://localhost:8888/?token=<...> + [I 2020-03-20 15:48:20.903 ServerApp] or http://127.0.0.1:8888/?token=<...> + [I 2020-03-20 15:48:20.903 ServerApp] Use Control-C to stop this server and shut down all kernels (twice to skip confirmation). + [I 2020-03-20 15:48:20.903 ServerApp] Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at https://jupyter.org/community.html. + [C 2020-03-20 15:48:20.907 ServerApp] + + To access the server, open this file in a browser: + file:///Users/username/jpserver-###-open.html + Or copy and paste one of these URLs: + http://localhost:8888/?token=<...> + or http://127.0.0.1:8888/?token=<...> diff --git a/server/examples/authorization/README.md b/server/examples/authorization/README.md new file mode 100644 index 0000000..28fe0df --- /dev/null +++ b/server/examples/authorization/README.md @@ -0,0 +1,84 @@ +# Authorization in a simple Jupyter Notebook Server + +This folder contains the following examples: + +1. a "read-only" Jupyter Notebook Server +2. a read/write Server without the ability to execute code on kernels. +3. a "temporary notebook server", i.e. read and execute notebooks but cannot save/write files. + +## How does it work? + +To add a custom authorization system to the Jupyter Server, you will need to write your own `Authorizer` subclass and pass it to Jupyter's configuration system (i.e. by file or CLI). + +The examples below demonstrate some basic implementations of an `Authorizer`. + +```python +from jupyter_server.auth import Authorizer + + +class MyCustomAuthorizer(Authorizer): + """Custom authorization manager.""" + + # Define my own method here for handling authorization. + # The argument signature must have `self`, `handler`, `user`, `action`, and `resource`. + def is_authorized(self, handler, user, action, resource): + """My override for handling authorization in Jupyter services.""" + + # Add logic here to check if user is allowed. + # For example, here is an example of a read-only server + if action != "read": + return False + + return True + +# Pass this custom class to Jupyter Server +c.ServerApp.authorizer_class = MyCustomAuthorizer +``` + +In the `jupyter_nbclassic_readonly_config.py` + +## Try it out! + +### Read-only example + +1. Install nbclassic using `pip`. + + pip install nbclassic + +2. Navigate to the jupyter_authorized_server `examples/` folder. + +3. Launch nbclassic and load `jupyter_nbclassic_readonly_config.py`: + + jupyter nbclassic --config=jupyter_nbclassic_readonly_config.py + +4. Try creating a notebook, running a notebook in a cell, etc. You should see a `403: Forbidden` error. + +### Read+Write example + +1. Install nbclassic using `pip`. + + pip install nbclassic + +2. Navigate to the jupyter_authorized_server `examples/` folder. + +3. Launch nbclassic and load `jupyter_nbclassic_rw_config.py`: + + jupyter nbclassic --config=jupyter_nbclassic_rw_config.py + +4. Try running a cell in a notebook. You should see a `403: Forbidden` error. + +### Temporary notebook server example + +This configuration allows everything except saving files. + +1. Install nbclassic using `pip`. + + pip install nbclassic + +2. Navigate to the jupyter_authorized_server `examples/` folder. + +3. Launch nbclassic and load `jupyter_temporary_config.py`: + + jupyter nbclassic --config=jupyter_temporary_config.py + +4. Edit a notebook, run a cell, etc. Everything works fine. Then try to save your changes... you should see a `403: Forbidden` error. diff --git a/server/examples/authorization/jupyter_nbclassic_readonly_config.py b/server/examples/authorization/jupyter_nbclassic_readonly_config.py new file mode 100644 index 0000000..292644c --- /dev/null +++ b/server/examples/authorization/jupyter_nbclassic_readonly_config.py @@ -0,0 +1,14 @@ +from jupyter_server.auth import Authorizer + + +class ReadOnly(Authorizer): + """Authorizer that makes Jupyter Server a read-only server.""" + + def is_authorized(self, handler, user, action, resource): + """Only allows `read` operations.""" + if action != "read": + return False + return True + + +c.ServerApp.authorizer_class = ReadOnly diff --git a/server/examples/authorization/jupyter_nbclassic_rw_config.py b/server/examples/authorization/jupyter_nbclassic_rw_config.py new file mode 100644 index 0000000..261efcf --- /dev/null +++ b/server/examples/authorization/jupyter_nbclassic_rw_config.py @@ -0,0 +1,14 @@ +from jupyter_server.auth import Authorizer + + +class ReadWriteOnly(Authorizer): + """Authorizer class that makes Jupyter Server a read/write-only server.""" + + def is_authorized(self, handler, user, action, resource): + """Only allows `read` and `write` operations.""" + if action not in {"read", "write"}: + return False + return True + + +c.ServerApp.authorizer_class = ReadWriteOnly diff --git a/server/examples/authorization/jupyter_temporary_config.py b/server/examples/authorization/jupyter_temporary_config.py new file mode 100644 index 0000000..e1bd2fb --- /dev/null +++ b/server/examples/authorization/jupyter_temporary_config.py @@ -0,0 +1,14 @@ +from jupyter_server.auth import Authorizer + + +class TemporaryServerPersonality(Authorizer): + """Authorizer that prevents modifying files via the contents service""" + + def is_authorized(self, handler, user, action, resource): + """Allow everything but write on contents""" + if action == "write" and resource == "contents": + return False + return True + + +c.ServerApp.authorizer_class = TemporaryServerPersonality diff --git a/server/examples/simple/README.md b/server/examples/simple/README.md new file mode 100644 index 0000000..f41eb5b --- /dev/null +++ b/server/examples/simple/README.md @@ -0,0 +1,204 @@ +# Jupyter Server Simple Extension Example + +This folder contains example of simple extensions on top of Jupyter Server and review configuration aspects. + +## Install + +You need `python3` to build and run the server extensions. + +```bash +# Clone, create a conda env and install from source. +git clone https://github.com/jupyter/jupyter_server && \ + cd examples/simple && \ + conda create -y -n jupyter-server-example python=3.7 && \ + conda activate jupyter-server-example && \ + pip install -e .[test] +``` + +**OPTIONAL** If you want to build the Typescript code, you need [npm](https://www.npmjs.com) on your local environement. Compiled javascript is provided as artifact in this repository, so this Typescript build step is optional. The Typescript source and configuration have been taken from https://github.com/markellekelly/jupyter-server-example. + +```bash +npm install && \ + npm run build +``` + +## No Extension + +Ensure Jupyter Server is starting without any extension enabled. + +```bash +# Run this command from a shell. +jupyter server +``` + +Browse the default home page, it should show a white page in your browser with the following content: `A Jupyter Server is running.` + +```bash +# Jupyter Server default Home Page. +open http://localhost:8888 +``` + +## Extension 1 + +```bash +# Start the jupyter server activating simple_ext1 extension. +jupyter server --ServerApp.jpserver_extensions="{'simple_ext1': True}" +``` + +Now you can render `Extension 1` Server content in your browser. + +```bash +# Home page as defined by default_url = '/default'. +open http://localhost:8888/simple_ext1/default +# HTML static page. +open http://localhost:8888/static/simple_ext1/home.html +open http://localhost:8888/static/simple_ext1/test.html +# Content from Handlers. +open http://localhost:8888/simple_ext1/params/test?var1=foo +# Content from Template. +open http://localhost:8888/simple_ext1/template1/test +# Content from Template with Typescript. +open http://localhost:8888/simple_ext1/typescript +# Error content. +open http://localhost:8888/simple_ext1/nope +# Redirect. +open http://localhost:8888/simple_ext1/redirect +# Favicon static content. +open http://localhost:8888/static/simple_ext1/favicon.ico +``` + +You can also start the server extension with python modules. + +```bash +python -m simple_ext1 +``` + +To live reload the server as you change the extension, you can also enable [the `debug` mode for Tornado](https://www.tornadoweb.org/en/stable/guide/running.html#debug-mode-and-automatic-reloading): + +```bash +jupyter server --ServerApp.jpserver_extensions="{'simple_ext1': True}" --ServerApp.tornado_settings="{'debug': True}" +``` + +## Extension 1 and Extension 2 + +The following command starts both the `simple_ext1` and `simple_ext2` extensions. + +```bash +# Start the jupyter server, it will load both simple_ext1 and simple_ext2 based on the provided trait. +jupyter server --ServerApp.jpserver_extensions="{'simple_ext1': True, 'simple_ext2': True}" +``` + +Check that the previous `Extension 1` content is still available ant that you can also render `Extension 2` Server content in your browser. + +```bash +# HTML static page. +open http://localhost:8888/static/simple_ext2/test.html +# Content from Handlers. +open http://localhost:8888/simple_ext2/params/test?var1=foo +``` + +## Work with Entrypoints + +Optionally, you can copy `simple_ext1.json` and `simple_ext2.json` configuration to your env `etc` folder and start only Extension 1, which will also start Extension 2. + +```bash +pip uninstall -y jupyter_server_example && \ + python setup.py install && \ + cp -r ./etc $(dirname $(which jupyter))/.. +``` + +```bash +# Start the jupyter server extension simple_ext1, it will also load simple_ext2 because of load_other_extensions = True.. +# When you invoke with the entrypoint, the default url will be opened in your browser. +jupyter simple-ext1 +``` + +## Configuration + +Stop any running server (with `CTRL+C`) and start with additional configuration on the command line. + +The provided settings via CLI will override the configuration that reside in the files (`jupyter_server_example1_config.py`...) + +```bash +jupyter simple-ext1 --SimpleApp1.configA="ConfigA from command line" +``` + +Check the log, it should return on startup print the Config object. + +The content of the Config is based on the trait you have defined via the `CLI` and in the `jupyter_server_example1_config.py`. + +``` +[SimpleApp1] Config {'SimpleApp1': {'configA': 'ConfigA from file', 'configB': 'ConfigB from file', 'configC': 'ConfigC from file'}} +[SimpleApp1] Config {'SimpleApp1': {'configA': 'ConfigA from file', 'configB': 'ConfigB from file', 'configC': 'ConfigC from file'}} +[SimpleApp2] WARNING | Config option `configD` not recognized by `SimpleApp2`. Did you mean one of: `configA, configB, configC`? +[SimpleApp2] Config {'SimpleApp2': {'configD': 'ConfigD from file'}} +[SimpleApp1] Config {'SimpleApp1': {'configA': 'ConfigA from command line', 'configB': 'ConfigB from file', 'configC': 'ConfigC from file'}} +``` + +## Only Extension 2 + +Now stop agin the server and start with only `Extension 2`. + +```bash +# Start the jupyter server extension simple_ext2, it will NOT load simple_ext1 because of load_other_extensions = False. +jupyter simple-ext2 +``` + +Try with the above links to check that only Extension 2 is responding (Extension 1 URLs should give you an 404 error). + +## Extension 11 extends Extension 1 + +`Extension 11` extends `Extension 1` and brings a few more configs. + +```bash +# TODO `--generate-config` returns an exception `"The ExtensionApp has not ServerApp "` +jupyter simple-ext11 --generate-config && vi ~/.jupyter/jupyter_config.py`. +``` + +The generated configuration should contains the following. + +```bash +# TODO +``` + +The `hello`, `ignore_js` and `simple11_dir` are traits defined on the SimpleApp11 class. + +It also implements additional flags and aliases for these traits. + +- The `--hello` flag will log on startup `Hello Simple11 - You have provided the --hello flag or defined a c.SimpleApp1.hello == True` +- The `ignore_js` flag +- The `--simple11-dir` alias will set `SimpleExt11.simple11_dir` settings + +Stop any running server and then start the simple-ext11. + +```bash +jupyter simple-ext11 --hello --simple11-dir any_folder +# You can also launch with a module +python -m simple_ext11 --hello +# TODO FIX the following command, simple11 does not work launching with jpserver_extensions parameter. +jupyter server --ServerApp.jpserver_extensions="{'simple_ext11': True}" --hello --simple11-dir any_folder +``` + +Ensure the following URLs respond correctly. + +```bash +# Jupyter Server Home Page. +open http://localhost:8888/ +# TODO Fix Default URL, it does not show on startup. +# Home page as defined by default_url = '/default'. +open http://localhost:8888/simple_ext11/default +# HTML static page. +open http://localhost:8888/static/simple_ext11/test.html +# Content from Handlers. +open http://localhost:8888/simple_ext11/params/test?var1=foo +# Content from Template. +open http://localhost:8888/simple_ext11/template1/test +# Content from Template with Typescript. +open http://localhost:8888/simple_ext11/typescript +# Error content. +open http://localhost:8888/simple_ext11/nope +# Redirect. +open http://localhost:8888/simple_ext11/redirect +# Favicon static content. +open http://localhost:8888/static/simple_ext11/favicon.ico +``` diff --git a/server/examples/simple/conftest.py b/server/examples/simple/conftest.py new file mode 100644 index 0000000..366de33 --- /dev/null +++ b/server/examples/simple/conftest.py @@ -0,0 +1 @@ +from jupyter_server.conftest import * # noqa diff --git a/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json b/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json new file mode 100644 index 0000000..fd4b771 --- /dev/null +++ b/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json @@ -0,0 +1,7 @@ +{ + "ServerApp": { + "jpserver_extensions": { + "simple_ext1": true + } + } +} diff --git a/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json b/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json new file mode 100644 index 0000000..6857ee2 --- /dev/null +++ b/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json @@ -0,0 +1,7 @@ +{ + "ServerApp": { + "jpserver_extensions": { + "simple_ext11": true + } + } +} diff --git a/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json b/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json new file mode 100644 index 0000000..287a167 --- /dev/null +++ b/server/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json @@ -0,0 +1,7 @@ +{ + "ServerApp": { + "jpserver_extensions": { + "simple_ext2": true + } + } +} diff --git a/server/examples/simple/jupyter_server_config.py b/server/examples/simple/jupyter_server_config.py new file mode 100644 index 0000000..b994a71 --- /dev/null +++ b/server/examples/simple/jupyter_server_config.py @@ -0,0 +1,6 @@ +# Configuration file for jupyter-server extensions. +# ------------------------------------------------------------------------------ +# Application(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ +## The date format used by logging formatters for %(asctime)s +c.Application.log_datefmt = "%Y-%m-%d %H:%M:%S Simple_Extensions_Example" diff --git a/server/examples/simple/jupyter_simple_ext11_config.py b/server/examples/simple/jupyter_simple_ext11_config.py new file mode 100644 index 0000000..d2baa13 --- /dev/null +++ b/server/examples/simple/jupyter_simple_ext11_config.py @@ -0,0 +1 @@ +c.SimpleApp11.ignore_js = True diff --git a/server/examples/simple/jupyter_simple_ext1_config.py b/server/examples/simple/jupyter_simple_ext1_config.py new file mode 100644 index 0000000..f40b66a --- /dev/null +++ b/server/examples/simple/jupyter_simple_ext1_config.py @@ -0,0 +1,4 @@ +c.SimpleApp1.configA = "ConfigA from file" +c.SimpleApp1.configB = "ConfigB from file" +c.SimpleApp1.configC = "ConfigC from file" +c.SimpleApp1.configD = "ConfigD from file" diff --git a/server/examples/simple/jupyter_simple_ext2_config.py b/server/examples/simple/jupyter_simple_ext2_config.py new file mode 100644 index 0000000..f145cbb --- /dev/null +++ b/server/examples/simple/jupyter_simple_ext2_config.py @@ -0,0 +1 @@ +c.SimpleApp2.configD = "ConfigD from file" diff --git a/server/examples/simple/package.json b/server/examples/simple/package.json new file mode 100644 index 0000000..37f76ba --- /dev/null +++ b/server/examples/simple/package.json @@ -0,0 +1,18 @@ +{ + "name": "jupyter-server-example", + "version": "0.0.1", + "private": true, + "scripts": { + "build": "tsc -p src && webpack", + "clean": "rimraf build", + "prepublishOnly": "npm run build" + }, + "dependencies": {}, + "devDependencies": { + "rifraf": "2.0.3", + "webpack": "~4.29.6", + "webpack-cli": "^3.3.0", + "whatwg-fetch": "~2.0.3", + "typescript": "3.6.4" + } +} diff --git a/server/examples/simple/pyproject.toml b/server/examples/simple/pyproject.toml new file mode 100644 index 0000000..d4ff206 --- /dev/null +++ b/server/examples/simple/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["jupyter_packaging~=0.5.0", "setuptools>=40.8.0", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/server/examples/simple/pytest.ini b/server/examples/simple/pytest.ini new file mode 100644 index 0000000..31e2592 --- /dev/null +++ b/server/examples/simple/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +# Disable any upper exclusion. +norecursedirs = diff --git a/server/examples/simple/setup.py b/server/examples/simple/setup.py new file mode 100644 index 0000000..bf9ee4a --- /dev/null +++ b/server/examples/simple/setup.py @@ -0,0 +1,58 @@ +import os + +from jupyter_packaging import create_cmdclass +from setuptools import setup + + +VERSION = "0.0.1" + + +def get_data_files(): + """Get the data files for the package.""" + data_files = [ + ("etc/jupyter/jupyter_server_config.d", "etc/jupyter/jupyter_server_config.d/", "*.json"), + ] + + def add_data_files(path): + for (dirpath, dirnames, filenames) in os.walk(path): + if filenames: + paths = [(dirpath, dirpath, filename) for filename in filenames] + data_files.extend(paths) + + # Add all static and templates folders. + add_data_files("simple_ext1/static") + add_data_files("simple_ext1/templates") + add_data_files("simple_ext2/static") + add_data_files("simple_ext2/templates") + return data_files + + +cmdclass = create_cmdclass(data_files_spec=get_data_files()) + +setup_args = dict( + name="jupyter_server_example", + version=VERSION, + description="Jupyter Server Example", + long_description=open("README.md").read(), + python_requires=">=3.7", + install_requires=[ + "jupyter_server", + "jinja2", + ], + extras_require={ + "test": ["pytest"], + }, + include_package_data=True, + cmdclass=cmdclass, + entry_points={ + "console_scripts": [ + "jupyter-simple-ext1 = simple_ext1.application:main", + "jupyter-simple-ext11 = simple_ext11.application:main", + "jupyter-simple-ext2 = simple_ext2.application:main", + ] + }, +) + + +if __name__ == "__main__": + setup(**setup_args) diff --git a/server/examples/simple/simple_ext1/__init__.py b/server/examples/simple/simple_ext1/__init__.py new file mode 100644 index 0000000..7b0c65c --- /dev/null +++ b/server/examples/simple/simple_ext1/__init__.py @@ -0,0 +1,5 @@ +from .application import SimpleApp1 + + +def _jupyter_server_extension_paths(): + return [{"module": "simple_ext1.application", "app": SimpleApp1}] diff --git a/server/examples/simple/simple_ext1/__main__.py b/server/examples/simple/simple_ext1/__main__.py new file mode 100644 index 0000000..317a0bd --- /dev/null +++ b/server/examples/simple/simple_ext1/__main__.py @@ -0,0 +1,4 @@ +from .application import main + +if __name__ == "__main__": + main() diff --git a/server/examples/simple/simple_ext1/application.py b/server/examples/simple/simple_ext1/application.py new file mode 100644 index 0000000..91e734f --- /dev/null +++ b/server/examples/simple/simple_ext1/application.py @@ -0,0 +1,61 @@ +import os + +from traitlets import Unicode + +from .handlers import DefaultHandler +from .handlers import ErrorHandler +from .handlers import ParameterHandler +from .handlers import RedirectHandler +from .handlers import TemplateHandler +from .handlers import TypescriptHandler +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.application import ExtensionAppJinjaMixin + +DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") +DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "templates") + + +class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): + + # The name of the extension. + name = "simple_ext1" + + # The url that your extension will serve its homepage. + extension_url = "/simple_ext1/default" + + # Should your extension expose other server extensions when launched directly? + load_other_extensions = True + + # Local path to static files directory. + static_paths = [DEFAULT_STATIC_FILES_PATH] + + # Local path to templates directory. + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] + + configA = Unicode("", config=True, help="Config A example.") + + configB = Unicode("", config=True, help="Config B example.") + + configC = Unicode("", config=True, help="Config C example.") + + def initialize_handlers(self): + self.handlers.extend( + [ + (r"/{}/default".format(self.name), DefaultHandler), + (r"/{}/params/(.+)$".format(self.name), ParameterHandler), + (r"/{}/template1/(.*)$".format(self.name), TemplateHandler), + (r"/{}/redirect".format(self.name), RedirectHandler), + (r"/{}/typescript/?".format(self.name), TypescriptHandler), + (r"/{}/(.*)", ErrorHandler), + ] + ) + + def initialize_settings(self): + self.log.info("Config {}".format(self.config)) + + +# ----------------------------------------------------------------------------- +# Main entry point +# ----------------------------------------------------------------------------- + +main = launch_new_instance = SimpleApp1.launch_instance diff --git a/server/examples/simple/simple_ext1/handlers.py b/server/examples/simple/simple_ext1/handlers.py new file mode 100644 index 0000000..1f412e8 --- /dev/null +++ b/server/examples/simple/simple_ext1/handlers.py @@ -0,0 +1,51 @@ +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerMixin +from jupyter_server.utils import url_escape + + +class DefaultHandler(ExtensionHandlerMixin, JupyterHandler): + def get(self): + # The name of the extension to which this handler is linked. + self.log.info("Extension Name in {} Default Handler: {}".format(self.name, self.name)) + # A method for getting the url to static files (prefixed with /static/). + self.log.info( + "Static URL for / in simple_ext1 Default Handler: {}".format(self.static_url(path="/")) + ) + self.write("

Hello Simple 1 - I am the default...

") + self.write("Config in {} Default Handler: {}".format(self.name, self.config)) + + +class RedirectHandler(ExtensionHandlerMixin, JupyterHandler): + def get(self): + self.redirect("/static/{}/favicon.ico".format(self.name)) + + +class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): + def get(self, matched_part=None, *args, **kwargs): + var1 = self.get_argument("var1", default=None) + components = [x for x in self.request.path.split("/") if x] + self.write("

Hello Simple App 1 from Handler.

") + self.write("

matched_part: {}

".format(url_escape(matched_part))) + self.write("

var1: {}

".format(url_escape(var1))) + self.write("

components: {}

".format(components)) + + +class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): + pass + + +class TypescriptHandler(BaseTemplateHandler): + def get(self): + self.write(self.render_template("typescript.html")) + + +class TemplateHandler(BaseTemplateHandler): + def get(self, path): + """Optionaly, you can print(self.get_template('simple1.html'))""" + self.write(self.render_template("simple1.html", path=path)) + + +class ErrorHandler(BaseTemplateHandler): + def get(self, path): + self.write(self.render_template("error.html", path=path)) diff --git a/server/examples/simple/simple_ext1/static/bundle.js b/server/examples/simple/simple_ext1/static/bundle.js new file mode 100644 index 0000000..9017d38 --- /dev/null +++ b/server/examples/simple/simple_ext1/static/bundle.js @@ -0,0 +1,144 @@ +/******/ (function (modules) { + // webpackBootstrap + /******/ // The module cache + /******/ var installedModules = {}; // The require function + /******/ + /******/ /******/ function __webpack_require__(moduleId) { + /******/ + /******/ // Check if module is in cache + /******/ if (installedModules[moduleId]) { + /******/ return installedModules[moduleId].exports; + /******/ + } // Create a new module (and put it into the cache) + /******/ /******/ var module = (installedModules[moduleId] = { + /******/ i: moduleId, + /******/ l: false, + /******/ exports: {}, + /******/ + }); // Execute the module function + /******/ + /******/ /******/ modules[moduleId].call( + module.exports, + module, + module.exports, + __webpack_require__ + ); // Flag the module as loaded + /******/ + /******/ /******/ module.l = true; // Return the exports of the module + /******/ + /******/ /******/ return module.exports; + /******/ + } // expose the modules object (__webpack_modules__) + /******/ + /******/ + /******/ /******/ __webpack_require__.m = modules; // expose the module cache + /******/ + /******/ /******/ __webpack_require__.c = installedModules; // define getter function for harmony exports + /******/ + /******/ /******/ __webpack_require__.d = function (exports, name, getter) { + /******/ if (!__webpack_require__.o(exports, name)) { + /******/ Object.defineProperty(exports, name, { + enumerable: true, + get: getter, + }); + /******/ + } + /******/ + }; // define __esModule on exports + /******/ + /******/ /******/ __webpack_require__.r = function (exports) { + /******/ if (typeof Symbol !== "undefined" && Symbol.toStringTag) { + /******/ Object.defineProperty(exports, Symbol.toStringTag, { + value: "Module", + }); + /******/ + } + /******/ Object.defineProperty(exports, "__esModule", { value: true }); + /******/ + }; // create a fake namespace object // mode & 1: value is a module id, require it // mode & 2: merge all properties of value into the ns // mode & 4: return value when already ns object // mode & 8|1: behave like require + /******/ + /******/ /******/ /******/ /******/ /******/ /******/ __webpack_require__.t = + function (value, mode) { + /******/ if (mode & 1) value = __webpack_require__(value); + /******/ if (mode & 8) return value; + /******/ if ( + mode & 4 && + typeof value === "object" && + value && + value.__esModule + ) + return value; + /******/ var ns = Object.create(null); + /******/ __webpack_require__.r(ns); + /******/ Object.defineProperty(ns, "default", { + enumerable: true, + value: value, + }); + /******/ if (mode & 2 && typeof value != "string") + for (var key in value) + __webpack_require__.d( + ns, + key, + function (key) { + return value[key]; + }.bind(null, key) + ); + /******/ return ns; + /******/ + }; // getDefaultExport function for compatibility with non-harmony modules + /******/ + /******/ /******/ __webpack_require__.n = function (module) { + /******/ var getter = + module && module.__esModule + ? /******/ function getDefault() { + return module["default"]; + } + : /******/ function getModuleExports() { + return module; + }; + /******/ __webpack_require__.d(getter, "a", getter); + /******/ return getter; + /******/ + }; // Object.prototype.hasOwnProperty.call + /******/ + /******/ /******/ __webpack_require__.o = function (object, property) { + return Object.prototype.hasOwnProperty.call(object, property); + }; // __webpack_public_path__ + /******/ + /******/ /******/ __webpack_require__.p = ""; // Load entry module and return exports + /******/ + /******/ + /******/ /******/ return __webpack_require__((__webpack_require__.s = 0)); + /******/ +})( + /************************************************************************/ + /******/ { + /***/ "./simple_ext1/static/index.js": + /*!*************************************!*\ + !*** ./simple_ext1/static/index.js ***! + \*************************************/ + /*! no static exports found */ + /***/ function (module, exports) { + eval( + 'function main() {\n let div = document.getElementById("mydiv");\n div.innerText = "Hello from Typescript";\n}\nwindow.addEventListener(\'load\', main);\n\n\n//# sourceURL=webpack:///./simple_ext1/static/index.js?' + ); + + /***/ + }, + + /***/ 0: + /*!*******************************************!*\ + !*** multi ./simple_ext1/static/index.js ***! + \*******************************************/ + /*! no static exports found */ + /***/ function (module, exports, __webpack_require__) { + eval( + 'module.exports = __webpack_require__(/*! ./simple_ext1/static/index.js */"./simple_ext1/static/index.js");\n\n\n//# sourceURL=webpack:///multi_./simple_ext1/static/index.js?' + ); + + /***/ + }, + + /******/ + } +); diff --git a/server/examples/simple/simple_ext1/static/favicon.ico b/server/examples/simple/simple_ext1/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..2d1bcff7ca0ed6c77fdf1b016986f9137a43d31a GIT binary patch literal 32038 zcmeI4U5FG{7ROsAA|fG?5JY4_k?9XRYJ%itpLE5Qd5}CTSqV$nU6$YuB0hvnSGNw% z5*9HcG7rljAx|Of2kb(SAVHR88S)UqAR>zoOCCg)ARz>&t7k?;dw>7xx;=NQtGcVF zKWDlc&eW}2_nxnN>fUqDJvFJ+medQWmtRit+nf6R3#ru4RoYv-{|oj1pzih8{rmA$ z>X)ykQb&*0?z2BjrJnyZmCAA}6nlX!-e20#FZiogt6PGQF1WkXh4N{j-~(lMcd%XX zp0?w_-+`wNviJo^c=^??1z);#+$d~?5U`?YYa`~5LEYVy==z5f1&y%TDiN>!_!faaOK zz)`@=Gc(@YMTPaV`5WePw!nPgxXgFuS>S8nAt2ijsI0nKwNcw{$&x9S_k(v1u;)56 z=S5xBc20fQ%SXQWMDyQeKsFskr&YcBUKXx)_{ccree}EA0o@>9cZD+!56sA4(YRvcV_c;7|FUkpY1j9noOLR1dbFTv@I&_u>sHrgHu!^A=)D5GXX^9L zO2c@VXT7=(f@ZH|N$@uA6%w5!I*DrogP03!nwkqWC&uNs({`Hcp?AjMG3$@k22We+K#J_PrGf7)ViZ3WZF61JvUb#7=|1Zp*z<@#RZPFS3Rl1{Bx& zE%V~J@q$|vaDPs9dZspNI}CF2K5U1t6YzSC>pY-+?sveCfZ{h=_XH}dPS4b4KJAp% z=Y(+dQvQYSH6& zyudZ0JU&&EPkcv}(^KH8;k+Um-IjqhXFEE2CtX^A5LTJ9&JynXT3Z)210ufdf^#%=BL9M%57^x`)52zg|A zS2TQDPi?#McxoX7d!23Y_5p3po<#b8m?@Osr~YuJu=G+C58bNIK?B_!gzkCdu-BKe zcoUCNw9-@V1pTx|J_Qz7BhR9jp7R{ZPsx3eF`0yyZ#M+T)x0O_Vxqw z)(zhKd|10d)WKH{8<^v&_hUXiQ*Brq;P({IIB#+%50tvUPw!OjytA|u`#B>X#jA}k zH@&=*_jWE_baQt7FqWA=a_IO^;@R*=d}hDie{(iB!(_YKozlc|}f7BjLd$nQXKj$b13N3ho?lW72VNEc;`Pz-6VHZ-4KYT+_?2Qv=h<@} z%oQs=j|C}4hJ7n;{60WDq!w$X?8mQ`BI8${3{J(1AKEcU`f5BB^ZXe3-bI$&f$4Hn zJi-+h6pQ4&%PTes4aMs3`!>M2NIeGoX}VB}6$8yWWy!E$%V3pdgT$E^ZxOGXhUUp^ zF^?r2CLKbbVZT=9LvQ zMXuKIbK0n_>F48az^8iAZaaQ184i84dVXQ)-lruUSu(XMw|egBJQSaIesS<>`{x(+ zafm!W3e{J*PW>}7Pv>#UT{u57{Oh`Ue#hBb!8>E~&KS(N=o(e7@f@n#hA|}G|AhAb z`Ca#OXLBWUe!BQAHvh~!H}&6*sIteuC*SIL1>17}jCSBGUl9#kcik@6&#!BF0p9sS zOa1#Y8d(BYaBvphhx6;8E||ADGjLnx4k3vA(kJc(~tW# z`5RNU{0&d9-Z;7EC=Xr!MnCk@ibgFTNZ&;1mt6a;2jyuU1!e$~rTnr8g+GKrn&>a`&h=QT)?OPD|7olnKSR;BkN3d*@Ig7j-GjKXsbTzyAgHjXGc09z{zu2mOBJO67NWe|e94FLO8z_n~;`w=J-5$`k*Y59etBx_}q))DJym zi%Y*C_SQFBkE>(xB;I&iRiR>fntb>PkIPWkh=V=?G<=-~5itr16V zRQ3NcZmnM?d>n@upyS?zuQ`02t7)fliP+)?d}}o+P+4`*xvCgF*PGZGaYOk>>zhx+ zvv}{+7QmzY+PF6VBl6usCgP{%jK=MfdyCA)l}uRQe%5hb zIxbyccXm$h8he#2?eIh{?>_J;HWk3*E-=o&Iy%$&d({pSNB%5S3_4ZZ%63_of9>j!QznRL|!36W;!!X}4TdcfCF2#w> zByX53l2Nh-pJ^Qp)@|1NQId7Dj$tBKW2`;VNura)1`-=cY#_0L#0C-@NNganfy4$9 z8%S&*v4O+}n%lrA#e~9S9rgI97yl+UtQhfTeODCQSByU_{wIy>6vw(wfX-G$t@1_N&h7y__Q}eEax-{u^-b>Dyp1;VA3Pm2 z$$vqY572S(wTmnJ2W<<=!NIPlb*>qd+uXOC|6*iP?vTL|d3D@7%D+)glIhp$t8%Th zMlsfRBl+@eKr(c~4>V%uztwUr+%LR*By!f2yVeP9^;HgCEuRH>$*n`)zU9A(epm7z z$z?>3H~rkk==?`=BbjICSg&r@=C8r$KS~Gkm*8t8^IvWIMF%69|7`1El=I(>9!B~5 z2crXBNA>#`TL**q{)x5W@i2VWelcome to Simple App 1 Home Page. diff --git a/server/examples/simple/simple_ext1/static/index.d.ts b/server/examples/simple/simple_ext1/static/index.d.ts new file mode 100644 index 0000000..1be3002 --- /dev/null +++ b/server/examples/simple/simple_ext1/static/index.d.ts @@ -0,0 +1 @@ +declare function main(): void; diff --git a/server/examples/simple/simple_ext1/static/index.js b/server/examples/simple/simple_ext1/static/index.js new file mode 100644 index 0000000..4cc84b9 --- /dev/null +++ b/server/examples/simple/simple_ext1/static/index.js @@ -0,0 +1,5 @@ +function main() { + let div = document.getElementById("mydiv"); + div.innerText = "Hello from Typescript"; +} +window.addEventListener("load", main); diff --git a/server/examples/simple/simple_ext1/static/test.html b/server/examples/simple/simple_ext1/static/test.html new file mode 100644 index 0000000..8c0bb51 --- /dev/null +++ b/server/examples/simple/simple_ext1/static/test.html @@ -0,0 +1 @@ +

Hello Simple App 1 from test HTML page.

diff --git a/server/examples/simple/simple_ext1/templates/error.html b/server/examples/simple/simple_ext1/templates/error.html new file mode 100644 index 0000000..1379574 --- /dev/null +++ b/server/examples/simple/simple_ext1/templates/error.html @@ -0,0 +1,21 @@ +{% extends "page.html" %} + +{% block site %} + +
+ {% block h1_error %} +

Error Page

+

{{status_code}} : {{status_message}}

+ {% endblock h1_error %} + + {% block error_detail %} + {% if message %} +

{% trans %}The error was:{% endtrans %}

+
+
{{message}}
+
+ {% endif %} + {% endblock error_detail %} +
+ +{% endblock %} diff --git a/server/examples/simple/simple_ext1/templates/page.html b/server/examples/simple/simple_ext1/templates/page.html new file mode 100644 index 0000000..cf01c9a --- /dev/null +++ b/server/examples/simple/simple_ext1/templates/page.html @@ -0,0 +1,20 @@ + + + + + {% block title %}Jupyter Server 1{% endblock %} + {% block favicon %}{% endblock %} + + + {% block meta %} + {% endblock %} + + +
+ {% block site %} + {% endblock site %} +
+ {% block after_site %} + {% endblock after_site %} + + diff --git a/server/examples/simple/simple_ext1/templates/simple1.html b/server/examples/simple/simple_ext1/templates/simple1.html new file mode 100644 index 0000000..95d8403 --- /dev/null +++ b/server/examples/simple/simple_ext1/templates/simple1.html @@ -0,0 +1,19 @@ + + + + + + + +

Hello Simple App 1 from Template.

+

Path: {{path}}

+ diff --git a/server/examples/simple/simple_ext1/templates/typescript.html b/server/examples/simple/simple_ext1/templates/typescript.html new file mode 100644 index 0000000..e825cf3 --- /dev/null +++ b/server/examples/simple/simple_ext1/templates/typescript.html @@ -0,0 +1,21 @@ + + + + + + + + +
+

Hello world!

+
+ diff --git a/server/examples/simple/simple_ext11/__init__.py b/server/examples/simple/simple_ext11/__init__.py new file mode 100644 index 0000000..abe0f73 --- /dev/null +++ b/server/examples/simple/simple_ext11/__init__.py @@ -0,0 +1,5 @@ +from .application import SimpleApp11 + + +def _jupyter_server_extension_paths(): + return [{"module": "simple_ext11.application", "app": SimpleApp11}] diff --git a/server/examples/simple/simple_ext11/__main__.py b/server/examples/simple/simple_ext11/__main__.py new file mode 100644 index 0000000..317a0bd --- /dev/null +++ b/server/examples/simple/simple_ext11/__main__.py @@ -0,0 +1,4 @@ +from .application import main + +if __name__ == "__main__": + main() diff --git a/server/examples/simple/simple_ext11/application.py b/server/examples/simple/simple_ext11/application.py new file mode 100644 index 0000000..82411dd --- /dev/null +++ b/server/examples/simple/simple_ext11/application.py @@ -0,0 +1,75 @@ +import os + +from simple_ext1.application import SimpleApp1 +from traitlets import Bool +from traitlets import observe +from traitlets import Unicode + +from jupyter_server.serverapp import aliases +from jupyter_server.serverapp import flags + +DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "./../simple_ext1/static") +DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "./../simple_ext1/templates") + + +class SimpleApp11(SimpleApp1): + flags["hello"] = ({"SimpleApp11": {"hello": True}}, "Say hello on startup.") + aliases.update( + { + "simple11-dir": "SimpleApp11.simple11_dir", + } + ) + + # The name of the extension. + name = "simple_ext11" + + # Te url that your extension will serve its homepage. + extension_url = "/simple_ext11/default" + + # Local path to static files directory. + static_paths = [DEFAULT_STATIC_FILES_PATH] + + # Local path to templates directory. + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] + + simple11_dir = Unicode("", config=True, help="Simple directory") + + hello = Bool( + False, + config=True, + help="Say hello", + ) + + ignore_js = Bool( + False, + config=True, + help="Ignore Javascript", + ) + + @observe("ignore_js") + def _update_ignore_js(self, change): + """TODO Does the observe work?""" + self.log.info("ignore_js has just changed: {}".format(change)) + + @property + def simple11_dir_formatted(self): + return "/" + self.simple11_dir + + def initialize_settings(self): + self.log.info("hello: {}".format(self.hello)) + if self.hello == True: + self.log.info( + "Hello Simple11: You have launched with --hello flag or defined 'c.SimpleApp1.hello == True' in your config file" + ) + self.log.info("ignore_js: {}".format(self.ignore_js)) + super().initialize_settings() + + def initialize_handlers(self): + super().initialize_handlers() + + +# ----------------------------------------------------------------------------- +# Main entry point +# ----------------------------------------------------------------------------- + +main = launch_new_instance = SimpleApp11.launch_instance diff --git a/server/examples/simple/simple_ext2/__init__.py b/server/examples/simple/simple_ext2/__init__.py new file mode 100644 index 0000000..ffe7bc4 --- /dev/null +++ b/server/examples/simple/simple_ext2/__init__.py @@ -0,0 +1,7 @@ +from .application import SimpleApp2 + + +def _jupyter_server_extension_paths(): + return [ + {"module": "simple_ext2.application", "app": SimpleApp2}, + ] diff --git a/server/examples/simple/simple_ext2/__main__.py b/server/examples/simple/simple_ext2/__main__.py new file mode 100644 index 0000000..317a0bd --- /dev/null +++ b/server/examples/simple/simple_ext2/__main__.py @@ -0,0 +1,4 @@ +from .application import main + +if __name__ == "__main__": + main() diff --git a/server/examples/simple/simple_ext2/application.py b/server/examples/simple/simple_ext2/application.py new file mode 100644 index 0000000..fcda51d --- /dev/null +++ b/server/examples/simple/simple_ext2/application.py @@ -0,0 +1,53 @@ +import os + +from traitlets import Unicode + +from .handlers import ErrorHandler +from .handlers import IndexHandler +from .handlers import ParameterHandler +from .handlers import TemplateHandler +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.application import ExtensionAppJinjaMixin + +DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") +DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "templates") + + +class SimpleApp2(ExtensionAppJinjaMixin, ExtensionApp): + + # The name of the extension. + name = "simple_ext2" + + # Te url that your extension will serve its homepage. + extension_url = "/simple_ext2" + + # Should your extension expose other server extensions when launched directly? + load_other_extensions = True + + # Local path to static files directory. + static_paths = [DEFAULT_STATIC_FILES_PATH] + + # Local path to templates directory. + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] + + configD = Unicode("", config=True, help="Config D example.") + + def initialize_handlers(self): + self.handlers.extend( + [ + (r"/simple_ext2/params/(.+)$", ParameterHandler), + (r"/simple_ext2/template", TemplateHandler), + (r"/simple_ext2/?", IndexHandler), + (r"/simple_ext2/(.*)", ErrorHandler), + ] + ) + + def initialize_settings(self): + self.log.info("Config {}".format(self.config)) + + +# ----------------------------------------------------------------------------- +# Main entry point +# ----------------------------------------------------------------------------- + +main = launch_new_instance = SimpleApp2.launch_instance diff --git a/server/examples/simple/simple_ext2/handlers.py b/server/examples/simple/simple_ext2/handlers.py new file mode 100644 index 0000000..2e37fe8 --- /dev/null +++ b/server/examples/simple/simple_ext2/handlers.py @@ -0,0 +1,34 @@ +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerMixin +from jupyter_server.utils import url_escape + + +class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): + def get(self, matched_part=None, *args, **kwargs): + var1 = self.get_argument("var1", default=None) + components = [x for x in self.request.path.split("/") if x] + self.write("

Hello Simple App 2 from Handler.

") + self.write("

matched_part: {}

".format(url_escape(matched_part))) + self.write("

var1: {}

".format(url_escape(var1))) + self.write("

components: {}

".format(components)) + + +class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): + pass + + +class IndexHandler(BaseTemplateHandler): + def get(self): + self.write(self.render_template("index.html")) + + +class TemplateHandler(BaseTemplateHandler): + def get(self, path): + print(self.get_template("simple_ext2.html")) + self.write(self.render_template("simple_ext2.html", path=path)) + + +class ErrorHandler(BaseTemplateHandler): + def get(self, path): + self.write(self.render_template("error.html")) diff --git a/server/examples/simple/simple_ext2/static/favicon.ico b/server/examples/simple/simple_ext2/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..2d1bcff7ca0ed6c77fdf1b016986f9137a43d31a GIT binary patch literal 32038 zcmeI4U5FG{7ROsAA|fG?5JY4_k?9XRYJ%itpLE5Qd5}CTSqV$nU6$YuB0hvnSGNw% z5*9HcG7rljAx|Of2kb(SAVHR88S)UqAR>zoOCCg)ARz>&t7k?;dw>7xx;=NQtGcVF zKWDlc&eW}2_nxnN>fUqDJvFJ+medQWmtRit+nf6R3#ru4RoYv-{|oj1pzih8{rmA$ z>X)ykQb&*0?z2BjrJnyZmCAA}6nlX!-e20#FZiogt6PGQF1WkXh4N{j-~(lMcd%XX zp0?w_-+`wNviJo^c=^??1z);#+$d~?5U`?YYa`~5LEYVy==z5f1&y%TDiN>!_!faaOK zz)`@=Gc(@YMTPaV`5WePw!nPgxXgFuS>S8nAt2ijsI0nKwNcw{$&x9S_k(v1u;)56 z=S5xBc20fQ%SXQWMDyQeKsFskr&YcBUKXx)_{ccree}EA0o@>9cZD+!56sA4(YRvcV_c;7|FUkpY1j9noOLR1dbFTv@I&_u>sHrgHu!^A=)D5GXX^9L zO2c@VXT7=(f@ZH|N$@uA6%w5!I*DrogP03!nwkqWC&uNs({`Hcp?AjMG3$@k22We+K#J_PrGf7)ViZ3WZF61JvUb#7=|1Zp*z<@#RZPFS3Rl1{Bx& zE%V~J@q$|vaDPs9dZspNI}CF2K5U1t6YzSC>pY-+?sveCfZ{h=_XH}dPS4b4KJAp% z=Y(+dQvQYSH6& zyudZ0JU&&EPkcv}(^KH8;k+Um-IjqhXFEE2CtX^A5LTJ9&JynXT3Z)210ufdf^#%=BL9M%57^x`)52zg|A zS2TQDPi?#McxoX7d!23Y_5p3po<#b8m?@Osr~YuJu=G+C58bNIK?B_!gzkCdu-BKe zcoUCNw9-@V1pTx|J_Qz7BhR9jp7R{ZPsx3eF`0yyZ#M+T)x0O_Vxqw z)(zhKd|10d)WKH{8<^v&_hUXiQ*Brq;P({IIB#+%50tvUPw!OjytA|u`#B>X#jA}k zH@&=*_jWE_baQt7FqWA=a_IO^;@R*=d}hDie{(iB!(_YKozlc|}f7BjLd$nQXKj$b13N3ho?lW72VNEc;`Pz-6VHZ-4KYT+_?2Qv=h<@} z%oQs=j|C}4hJ7n;{60WDq!w$X?8mQ`BI8${3{J(1AKEcU`f5BB^ZXe3-bI$&f$4Hn zJi-+h6pQ4&%PTes4aMs3`!>M2NIeGoX}VB}6$8yWWy!E$%V3pdgT$E^ZxOGXhUUp^ zF^?r2CLKbbVZT=9LvQ zMXuKIbK0n_>F48az^8iAZaaQ184i84dVXQ)-lruUSu(XMw|egBJQSaIesS<>`{x(+ zafm!W3e{J*PW>}7Pv>#UT{u57{Oh`Ue#hBb!8>E~&KS(N=o(e7@f@n#hA|}G|AhAb z`Ca#OXLBWUe!BQAHvh~!H}&6*sIteuC*SIL1>17}jCSBGUl9#kcik@6&#!BF0p9sS zOa1#Y8d(BYaBvphhx6;8E||ADGjLnx4k3vA(kJc(~tW# z`5RNU{0&d9-Z;7EC=Xr!MnCk@ibgFTNZ&;1mt6a;2jyuU1!e$~rTnr8g+GKrn&>a`&h=QT)?OPD|7olnKSR;BkN3d*@Ig7j-GjKXsbTzyAgHjXGc09z{zu2mOBJO67NWe|e94FLO8z_n~;`w=J-5$`k*Y59etBx_}q))DJym zi%Y*C_SQFBkE>(xB;I&iRiR>fntb>PkIPWkh=V=?G<=-~5itr16V zRQ3NcZmnM?d>n@upyS?zuQ`02t7)fliP+)?d}}o+P+4`*xvCgF*PGZGaYOk>>zhx+ zvv}{+7QmzY+PF6VBl6usCgP{%jK=MfdyCA)l}uRQe%5hb zIxbyccXm$h8he#2?eIh{?>_J;HWk3*E-=o&Iy%$&d({pSNB%5S3_4ZZ%63_of9>j!QznRL|!36W;!!X}4TdcfCF2#w> zByX53l2Nh-pJ^Qp)@|1NQId7Dj$tBKW2`;VNura)1`-=cY#_0L#0C-@NNganfy4$9 z8%S&*v4O+}n%lrA#e~9S9rgI97yl+UtQhfTeODCQSByU_{wIy>6vw(wfX-G$t@1_N&h7y__Q}eEax-{u^-b>Dyp1;VA3Pm2 z$$vqY572S(wTmnJ2W<<=!NIPlb*>qd+uXOC|6*iP?vTL|d3D@7%D+)glIhp$t8%Th zMlsfRBl+@eKr(c~4>V%uztwUr+%LR*By!f2yVeP9^;HgCEuRH>$*n`)zU9A(epm7z z$z?>3H~rkk==?`=BbjICSg&r@=C8r$KS~Gkm*8t8^IvWIMF%69|7`1El=I(>9!B~5 z2crXBNA>#`TL**q{)x5W@i2VHello Simple App 2 from test HTML page. diff --git a/server/examples/simple/simple_ext2/templates/error.html b/server/examples/simple/simple_ext2/templates/error.html new file mode 100644 index 0000000..7426beb --- /dev/null +++ b/server/examples/simple/simple_ext2/templates/error.html @@ -0,0 +1,20 @@ +{% extends "page.html" %} + +{% block site %} + +
+ {% block h1_error %} +

{{status_code}} : {{status_message}}

+ {% endblock h1_error %} + + {% block error_detail %} + {% if message %} +

{% trans %}The error was:{% endtrans %}

+
+
{{message}}
+
+ {% endif %} + {% endblock error_detail %} +
+ +{% endblock %} diff --git a/server/examples/simple/simple_ext2/templates/index.html b/server/examples/simple/simple_ext2/templates/index.html new file mode 100644 index 0000000..a42a0ab --- /dev/null +++ b/server/examples/simple/simple_ext2/templates/index.html @@ -0,0 +1 @@ +

Hello Extension 2 from HTML Index Static Page

diff --git a/server/examples/simple/simple_ext2/templates/page.html b/server/examples/simple/simple_ext2/templates/page.html new file mode 100644 index 0000000..cf01c9a --- /dev/null +++ b/server/examples/simple/simple_ext2/templates/page.html @@ -0,0 +1,20 @@ + + + + + {% block title %}Jupyter Server 1{% endblock %} + {% block favicon %}{% endblock %} + + + {% block meta %} + {% endblock %} + + +
+ {% block site %} + {% endblock site %} +
+ {% block after_site %} + {% endblock after_site %} + + diff --git a/server/examples/simple/simple_ext2/templates/simple_ext2.html b/server/examples/simple/simple_ext2/templates/simple_ext2.html new file mode 100644 index 0000000..0c7df48 --- /dev/null +++ b/server/examples/simple/simple_ext2/templates/simple_ext2.html @@ -0,0 +1 @@ +

Hello Extension 2 from Simple HTML Static Page

diff --git a/server/examples/simple/tests/test_handlers.py b/server/examples/simple/tests/test_handlers.py new file mode 100644 index 0000000..7c4cb69 --- /dev/null +++ b/server/examples/simple/tests/test_handlers.py @@ -0,0 +1,20 @@ +import pytest + + +@pytest.fixture +def jp_server_config(jp_template_dir): + return { + "ServerApp": {"jpserver_extensions": {"simple_ext1": True}}, + } + + +async def test_handler_default(jp_fetch): + r = await jp_fetch("simple_ext1/default", method="GET") + assert r.code == 200 + print(r.body.decode()) + assert r.body.decode().index("Hello Simple 1 - I am the default...") > -1 + + +async def test_handler_template(jp_fetch): + r = await jp_fetch("simple_ext1/template1/test", method="GET") + assert r.code == 200 diff --git a/server/examples/simple/webpack.config.js b/server/examples/simple/webpack.config.js new file mode 100644 index 0000000..b242d09 --- /dev/null +++ b/server/examples/simple/webpack.config.js @@ -0,0 +1,8 @@ +module.exports = { + entry: ['./simple_ext1/static/index.js'], + output: { + path: require('path').join(__dirname, 'simple_ext1', 'static'), + filename: 'bundle.js' + }, + mode: 'development' +}; diff --git a/server/jupyter_server/__init__.py b/server/jupyter_server/__init__.py new file mode 100644 index 0000000..3a6586a --- /dev/null +++ b/server/jupyter_server/__init__.py @@ -0,0 +1,29 @@ +"""The Jupyter Server""" +import os +import subprocess +import sys + +DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") +DEFAULT_TEMPLATE_PATH_LIST = [ + os.path.dirname(__file__), + os.path.join(os.path.dirname(__file__), "templates"), +] + +DEFAULT_JUPYTER_SERVER_PORT = 8888 + +del os + +from ._version import version_info, __version__ # noqa + + +def _cleanup(): + pass + + +# patch subprocess on Windows for python<3.7 +# see https://bugs.python.org/issue37380 +# the fix for python3.7: https://github.com/python/cpython/pull/15706/files +if sys.platform == "win32": + if sys.version_info < (3, 7): + subprocess._cleanup = _cleanup + subprocess._active = None diff --git a/server/jupyter_server/__main__.py b/server/jupyter_server/__main__.py new file mode 100644 index 0000000..6ada4be --- /dev/null +++ b/server/jupyter_server/__main__.py @@ -0,0 +1,4 @@ +if __name__ == "__main__": + from jupyter_server import serverapp as app + + app.launch_new_instance() diff --git a/server/jupyter_server/_sysinfo.py b/server/jupyter_server/_sysinfo.py new file mode 100644 index 0000000..6dbc9f7 --- /dev/null +++ b/server/jupyter_server/_sysinfo.py @@ -0,0 +1,97 @@ +# encoding: utf-8 +""" +Utilities for getting information about Jupyter and the system it's running in. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import os +import platform +import subprocess +import sys + +from ipython_genutils import encoding + +import jupyter_server + + +def pkg_commit_hash(pkg_path): + """Get short form of commit hash given directory `pkg_path` + + We get the commit hash from git if it's a repo. + + If this fail, we return a not-found placeholder tuple + + Parameters + ---------- + pkg_path : str + directory containing package + only used for getting commit from active repo + + Returns + ------- + hash_from : str + Where we got the hash from - description + hash_str : str + short form of hash + """ + + # maybe we are in a repository, check for a .git folder + p = os.path + cur_path = None + par_path = pkg_path + while cur_path != par_path: + cur_path = par_path + if p.exists(p.join(cur_path, ".git")): + try: + proc = subprocess.Popen( + ["git", "rev-parse", "--short", "HEAD"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=pkg_path, + ) + repo_commit, _ = proc.communicate() + except OSError: + repo_commit = None + + if repo_commit: + return "repository", repo_commit.strip().decode("ascii") + else: + return "", "" + par_path = p.dirname(par_path) + + return "", "" + + +def pkg_info(pkg_path): + """Return dict describing the context of this package + + Parameters + ---------- + pkg_path : str + path containing __init__.py for package + + Returns + ------- + context : dict + with named parameters of interest + """ + src, hsh = pkg_commit_hash(pkg_path) + return dict( + jupyter_server_version=jupyter_server.__version__, + jupyter_server_path=pkg_path, + commit_source=src, + commit_hash=hsh, + sys_version=sys.version, + sys_executable=sys.executable, + sys_platform=sys.platform, + platform=platform.platform(), + os_name=os.name, + default_encoding=encoding.DEFAULT_ENCODING, + ) + + +def get_sys_info(): + """Return useful information about the system as a dict.""" + p = os.path + path = p.realpath(p.dirname(p.abspath(p.join(jupyter_server.__file__)))) + return pkg_info(path) diff --git a/server/jupyter_server/_tz.py b/server/jupyter_server/_tz.py new file mode 100644 index 0000000..4ea8cfc --- /dev/null +++ b/server/jupyter_server/_tz.py @@ -0,0 +1,49 @@ +# encoding: utf-8 +""" +Timezone utilities + +Just UTC-awareness right now +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from datetime import datetime +from datetime import timedelta +from datetime import tzinfo + +# constant for zero offset +ZERO = timedelta(0) + + +class tzUTC(tzinfo): + """tzinfo object for UTC (zero offset)""" + + def utcoffset(self, d): + return ZERO + + def dst(self, d): + return ZERO + + +UTC = tzUTC() + + +def utc_aware(unaware): + """decorator for adding UTC tzinfo to datetime's utcfoo methods""" + + def utc_method(*args, **kwargs): + dt = unaware(*args, **kwargs) + return dt.replace(tzinfo=UTC) + + return utc_method + + +utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) +utcnow = utc_aware(datetime.utcnow) + + +def isoformat(dt): + """Return iso-formatted timestamp + + Like .isoformat(), but uses Z for UTC instead of +00:00 + """ + return dt.isoformat().replace("+00:00", "Z") diff --git a/server/jupyter_server/_version.py b/server/jupyter_server/_version.py new file mode 100644 index 0000000..b45a4eb --- /dev/null +++ b/server/jupyter_server/_version.py @@ -0,0 +1,6 @@ +""" +store the current version info of the server. + +""" +version_info = (1, 14, 0, ".dev", "0") +__version__ = ".".join(map(str, version_info[:3])) + "".join(version_info[3:]) diff --git a/server/jupyter_server/auth/__init__.py b/server/jupyter_server/auth/__init__.py new file mode 100644 index 0000000..54477ff --- /dev/null +++ b/server/jupyter_server/auth/__init__.py @@ -0,0 +1,3 @@ +from .authorizer import * # noqa +from .decorator import authorized # noqa +from .security import passwd # noqa diff --git a/server/jupyter_server/auth/__main__.py b/server/jupyter_server/auth/__main__.py new file mode 100644 index 0000000..b34a318 --- /dev/null +++ b/server/jupyter_server/auth/__main__.py @@ -0,0 +1,55 @@ +import argparse +import sys +from getpass import getpass + +from jupyter_core.paths import jupyter_config_dir + +from jupyter_server.auth import passwd +from jupyter_server.config_manager import BaseJSONConfigManager + + +def set_password(args): + password = args.password + while not password: + password1 = getpass("" if args.quiet else "Provide password: ") + password_repeat = getpass("" if args.quiet else "Repeat password: ") + if password1 != password_repeat: + print("Passwords do not match, try again") + elif len(password1) < 4: + print("Please provide at least 4 characters") + else: + password = password1 + + password_hash = passwd(password) + cfg = BaseJSONConfigManager(config_dir=jupyter_config_dir()) + cfg.update( + "jupyter_server_config", + { + "ServerApp": { + "password": password_hash, + } + }, + ) + if not args.quiet: + print("password stored in config dir: %s" % jupyter_config_dir()) + + +def main(argv): + parser = argparse.ArgumentParser(argv[0]) + subparsers = parser.add_subparsers() + parser_password = subparsers.add_parser( + "password", help="sets a password for your jupyter server" + ) + parser_password.add_argument( + "password", + help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", + nargs="?", + ) + parser_password.add_argument("--quiet", help="suppress messages", action="store_true") + parser_password.set_defaults(function=set_password) + args = parser.parse_args(argv[1:]) + args.function(args) + + +if __name__ == "__main__": + main(sys.argv) diff --git a/server/jupyter_server/auth/authorizer.py b/server/jupyter_server/auth/authorizer.py new file mode 100644 index 0000000..952cb02 --- /dev/null +++ b/server/jupyter_server/auth/authorizer.py @@ -0,0 +1,69 @@ +"""An Authorizer for use in the Jupyter server. + +The default authorizer (AllowAllAuthorizer) +allows all authenticated requests + +.. versionadded:: 2.0 +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from traitlets.config import LoggingConfigurable + +from jupyter_server.base.handlers import JupyterHandler + + +class Authorizer(LoggingConfigurable): + """Base class for authorizing access to resources + in the Jupyter Server. + + All authorizers used in Jupyter Server + should inherit from this base class and, at the very minimum, + implement an `is_authorized` method with the + same signature as in this base class. + + The `is_authorized` method is called by the `@authorized` decorator + in JupyterHandler. If it returns True, the incoming request + to the server is accepted; if it returns False, the server + returns a 403 (Forbidden) error code. + + The authorization check will only be applied to requests + that have already been authenticated. + + .. versionadded:: 2.0 + """ + + def is_authorized(self, handler: JupyterHandler, user: str, action: str, resource: str) -> bool: + """A method to determine if `user` is authorized to perform `action` + (read, write, or execute) on the `resource` type. + + Parameters + ---------- + user : usually a dict or string + A truthy model representing the authenticated user. + A username string by default, + but usually a dict when integrating with an auth provider. + action : str + the category of action for the current request: read, write, or execute. + + resource : str + the type of resource (i.e. contents, kernels, files, etc.) the user is requesting. + + Returns True if user authorized to make request; otherwise, returns False. + """ + raise NotImplementedError() + + +class AllowAllAuthorizer(Authorizer): + """A no-op implementation of the Authorizer + + This authorizer allows all authenticated requests. + + .. versionadded:: 2.0 + """ + + def is_authorized(self, handler: JupyterHandler, user: str, action: str, resource: str) -> bool: + """This method always returns True. + + All authenticated users are allowed to do anything in the Jupyter Server. + """ + return True diff --git a/server/jupyter_server/auth/decorator.py b/server/jupyter_server/auth/decorator.py new file mode 100644 index 0000000..926808f --- /dev/null +++ b/server/jupyter_server/auth/decorator.py @@ -0,0 +1,78 @@ +"""Decorator for layering authorization into JupyterHandlers. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from functools import wraps +from typing import Callable +from typing import Optional +from typing import Union + +from tornado.log import app_log +from tornado.web import HTTPError + +from .utils import HTTP_METHOD_TO_AUTH_ACTION + + +def authorized( + action: Optional[Union[str, Callable]] = None, + resource: Optional[str] = None, + message: Optional[str] = None, +) -> Callable: + """A decorator for tornado.web.RequestHandler methods + that verifies whether the current user is authorized + to make the following request. + + Helpful for adding an 'authorization' layer to + a REST API. + + .. versionadded:: 2.0 + + Parameters + ---------- + action : str + the type of permission or action to check. + + resource: str or None + the name of the resource the action is being authorized + to access. + + message : str or none + a message for the unauthorized action. + """ + + def wrapper(method): + @wraps(method) + def inner(self, *args, **kwargs): + # default values for action, resource + nonlocal action + nonlocal resource + nonlocal message + if action is None: + http_method = self.request.method.upper() + action = HTTP_METHOD_TO_AUTH_ACTION[http_method] + if resource is None: + resource = self.auth_resource + if message is None: + message = f"User is not authorized to {action} on resource: {resource}." + + user = self.current_user + if not user: + app_log.warning("Attempting to authorize request without authentication!") + raise HTTPError(status_code=403, log_message=message) + # If the user is allowed to do this action, + # call the method. + if self.authorizer.is_authorized(self, user, action, resource): + return method(self, *args, **kwargs) + # else raise an exception. + else: + raise HTTPError(status_code=403, log_message=message) + + return inner + + if callable(action): + method = action + action = None + # no-arguments `@authorized` decorator called + return wrapper(method) + + return wrapper diff --git a/server/jupyter_server/auth/login.py b/server/jupyter_server/auth/login.py new file mode 100644 index 0000000..fb245f7 --- /dev/null +++ b/server/jupyter_server/auth/login.py @@ -0,0 +1,256 @@ +"""Tornado handlers for logging into the Jupyter Server.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import os +import re +import uuid +from urllib.parse import urlparse + +from tornado.escape import url_escape + +from ..base.handlers import JupyterHandler +from .security import passwd_check +from .security import set_password + + +class LoginHandler(JupyterHandler): + """The basic tornado login handler + + authenticates with a hashed password from the configuration. + """ + + def _render(self, message=None): + self.write( + self.render_template( + "login.html", + next=url_escape(self.get_argument("next", default=self.base_url)), + message=message, + ) + ) + + def _redirect_safe(self, url, default=None): + """Redirect if url is on our PATH + + Full-domain redirects are allowed if they pass our CORS origin checks. + + Otherwise use default (self.base_url if unspecified). + """ + if default is None: + default = self.base_url + # protect chrome users from mishandling unescaped backslashes. + # \ is not valid in urls, but some browsers treat it as / + # instead of %5C, causing `\\` to behave as `//` + url = url.replace("\\", "%5C") + parsed = urlparse(url) + if parsed.netloc or not (parsed.path + "/").startswith(self.base_url): + # require that next_url be absolute path within our path + allow = False + # OR pass our cross-origin check + if parsed.netloc: + # if full URL, run our cross-origin check: + origin = "%s://%s" % (parsed.scheme, parsed.netloc) + origin = origin.lower() + if self.allow_origin: + allow = self.allow_origin == origin + elif self.allow_origin_pat: + allow = bool(re.match(self.allow_origin_pat, origin)) + if not allow: + # not allowed, use default + self.log.warning("Not allowing login redirect to %r" % url) + url = default + self.redirect(url) + + def get(self): + if self.current_user: + next_url = self.get_argument("next", default=self.base_url) + self._redirect_safe(next_url) + else: + self._render() + + @property + def hashed_password(self): + return self.password_from_settings(self.settings) + + def passwd_check(self, a, b): + return passwd_check(a, b) + + def post(self): + typed_password = self.get_argument("password", default="") + new_password = self.get_argument("new_password", default="") + + if self.get_login_available(self.settings): + if self.passwd_check(self.hashed_password, typed_password) and not new_password: + self.set_login_cookie(self, uuid.uuid4().hex) + elif self.token and self.token == typed_password: + self.set_login_cookie(self, uuid.uuid4().hex) + if new_password and self.settings.get("allow_password_change"): + config_dir = self.settings.get("config_dir") + config_file = os.path.join(config_dir, "jupyter_server_config.json") + set_password(new_password, config_file=config_file) + self.log.info("Wrote hashed password to %s" % config_file) + else: + self.set_status(401) + self._render(message={"error": "Invalid credentials"}) + return + + next_url = self.get_argument("next", default=self.base_url) + self._redirect_safe(next_url) + + @classmethod + def set_login_cookie(cls, handler, user_id=None): + """Call this on handlers to set the login cookie for success""" + cookie_options = handler.settings.get("cookie_options", {}) + cookie_options.setdefault("httponly", True) + # tornado <4.2 has a bug that considers secure==True as soon as + # 'secure' kwarg is passed to set_secure_cookie + if handler.settings.get("secure_cookie", handler.request.protocol == "https"): + cookie_options.setdefault("secure", True) + cookie_options.setdefault("path", handler.base_url) + handler.set_secure_cookie(handler.cookie_name, user_id, **cookie_options) + return user_id + + auth_header_pat = re.compile(r"token\s+(.+)", re.IGNORECASE) + + @classmethod + def get_token(cls, handler): + """Get the user token from a request + + Default: + + - in URL parameters: ?token= + - in header: Authorization: token + """ + + user_token = handler.get_argument("token", "") + if not user_token: + # get it from Authorization header + m = cls.auth_header_pat.match(handler.request.headers.get("Authorization", "")) + if m: + user_token = m.group(1) + return user_token + + @classmethod + def should_check_origin(cls, handler): + """Should the Handler check for CORS origin validation? + + Origin check should be skipped for token-authenticated requests. + + Returns: + - True, if Handler must check for valid CORS origin. + - False, if Handler should skip origin check since requests are token-authenticated. + """ + return not cls.is_token_authenticated(handler) + + @classmethod + def is_token_authenticated(cls, handler): + """Returns True if handler has been token authenticated. Otherwise, False. + + Login with a token is used to signal certain things, such as: + + - permit access to REST API + - xsrf protection + - skip origin-checks for scripts + """ + if getattr(handler, "_user_id", None) is None: + # ensure get_user has been called, so we know if we're token-authenticated + handler.get_current_user() + return getattr(handler, "_token_authenticated", False) + + @classmethod + def get_user(cls, handler): + """Called by handlers.get_current_user for identifying the current user. + + See tornado.web.RequestHandler.get_current_user for details. + """ + # Can't call this get_current_user because it will collide when + # called on LoginHandler itself. + if getattr(handler, "_user_id", None): + return handler._user_id + user_id = cls.get_user_token(handler) + if user_id is None: + get_secure_cookie_kwargs = handler.settings.get("get_secure_cookie_kwargs", {}) + user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs) + if user_id: + user_id = user_id.decode() + else: + cls.set_login_cookie(handler, user_id) + # Record that the current request has been authenticated with a token. + # Used in is_token_authenticated above. + handler._token_authenticated = True + if user_id is None: + # If an invalid cookie was sent, clear it to prevent unnecessary + # extra warnings. But don't do this on a request with *no* cookie, + # because that can erroneously log you out (see gh-3365) + if handler.get_cookie(handler.cookie_name) is not None: + handler.log.warning("Clearing invalid/expired login cookie %s", handler.cookie_name) + handler.clear_login_cookie() + if not handler.login_available: + # Completely insecure! No authentication at all. + # No need to warn here, though; validate_security will have already done that. + user_id = "anonymous" + + # cache value for future retrievals on the same request + handler._user_id = user_id + return user_id + + @classmethod + def get_user_token(cls, handler): + """Identify the user based on a token in the URL or Authorization header + + Returns: + - uuid if authenticated + - None if not + """ + token = handler.token + if not token: + return + # check login token from URL argument or Authorization header + user_token = cls.get_token(handler) + authenticated = False + if user_token == token: + # token-authenticated, set the login cookie + handler.log.debug( + "Accepting token-authenticated connection from %s", + handler.request.remote_ip, + ) + authenticated = True + + if authenticated: + return uuid.uuid4().hex + else: + return None + + @classmethod + def validate_security(cls, app, ssl_options=None): + """Check the application's security. + + Show messages, or abort if necessary, based on the security configuration. + """ + if not app.ip: + warning = "WARNING: The Jupyter server is listening on all IP addresses" + if ssl_options is None: + app.log.warning(warning + " and not using encryption. This " "is not recommended.") + if not app.password and not app.token: + app.log.warning( + warning + " and not using authentication. " + "This is highly insecure and not recommended." + ) + else: + if not app.password and not app.token: + app.log.warning( + "All authentication is disabled." + " Anyone who can connect to this server will be able to run code." + ) + + @classmethod + def password_from_settings(cls, settings): + """Return the hashed password from the tornado settings. + + If there is no configured password, an empty string will be returned. + """ + return settings.get("password", "") + + @classmethod + def get_login_available(cls, settings): + """Whether this LoginHandler is needed - and therefore whether the login page should be displayed.""" + return bool(cls.password_from_settings(settings) or settings.get("token")) diff --git a/server/jupyter_server/auth/logout.py b/server/jupyter_server/auth/logout.py new file mode 100644 index 0000000..49b98bd --- /dev/null +++ b/server/jupyter_server/auth/logout.py @@ -0,0 +1,18 @@ +"""Tornado handlers for logging out of the Jupyter Server. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from ..base.handlers import JupyterHandler + + +class LogoutHandler(JupyterHandler): + def get(self): + self.clear_login_cookie() + if self.login_available: + message = {"info": "Successfully logged out."} + else: + message = {"warning": "Cannot log out. Kennen Server authentication " "is disabled."} + self.write(self.render_template("logout.html", message=message)) + + +default_handlers = [(r"/logout", LogoutHandler)] diff --git a/server/jupyter_server/auth/security.py b/server/jupyter_server/auth/security.py new file mode 100644 index 0000000..3c6bbad --- /dev/null +++ b/server/jupyter_server/auth/security.py @@ -0,0 +1,178 @@ +""" +Password generation for the Jupyter Server. +""" +import getpass +import hashlib +import io +import json +import os +import random +import traceback +import warnings +from contextlib import contextmanager + +from ipython_genutils.py3compat import cast_bytes +from ipython_genutils.py3compat import cast_unicode +from ipython_genutils.py3compat import str_to_bytes +from jupyter_core.paths import jupyter_config_dir +from traitlets.config import Config +from traitlets.config import ConfigFileNotFound +from traitlets.config import JSONFileConfigLoader + +# Length of the salt in nr of hex chars, which implies salt_len * 4 +# bits of randomness. +salt_len = 12 + + +def passwd(passphrase=None, algorithm="argon2"): + """Generate hashed password and salt for use in server configuration. + + In the server configuration, set `c.ServerApp.password` to + the generated string. + + Parameters + ---------- + passphrase : str + Password to hash. If unspecified, the user is asked to input + and verify a password. + algorithm : str + Hashing algorithm to use (e.g, 'sha1' or any argument supported + by :func:`hashlib.new`, or 'argon2'). + + Returns + ------- + hashed_passphrase : str + Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. + + Examples + -------- + >>> passwd('mypassword') # doctest: +ELLIPSIS + 'argon2:...' + + """ + if passphrase is None: + for i in range(3): + p0 = getpass.getpass("Enter password: ") + p1 = getpass.getpass("Verify password: ") + if p0 == p1: + passphrase = p0 + break + else: + print("Passwords do not match.") + else: + raise ValueError("No matching passwords found. Giving up.") + + if algorithm == "argon2": + import argon2 + + ph = argon2.PasswordHasher( + memory_cost=10240, + time_cost=10, + parallelism=8, + ) + h = ph.hash(passphrase) + + return ":".join((algorithm, cast_unicode(h, "ascii"))) + + h = hashlib.new(algorithm) + salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len) + h.update(cast_bytes(passphrase, "utf-8") + str_to_bytes(salt, "ascii")) + + return ":".join((algorithm, salt, h.hexdigest())) + + +def passwd_check(hashed_passphrase, passphrase): + """Verify that a given passphrase matches its hashed version. + + Parameters + ---------- + hashed_passphrase : str + Hashed password, in the format returned by `passwd`. + passphrase : str + Passphrase to validate. + + Returns + ------- + valid : bool + True if the passphrase matches the hash. + + Examples + -------- + >>> myhash = passwd('mypassword') + >>> passwd_check(myhash, 'mypassword') + True + + >>> passwd_check(myhash, 'otherpassword') + False + + >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', + ... 'mypassword') + True + """ + if hashed_passphrase.startswith("argon2:"): + import argon2 + import argon2.exceptions + + ph = argon2.PasswordHasher() + + try: + return ph.verify(hashed_passphrase[7:], passphrase) + except argon2.exceptions.VerificationError: + return False + + try: + algorithm, salt, pw_digest = hashed_passphrase.split(":", 2) + except (ValueError, TypeError): + return False + + try: + h = hashlib.new(algorithm) + except ValueError: + return False + + if len(pw_digest) == 0: + return False + + h.update(cast_bytes(passphrase, "utf-8") + cast_bytes(salt, "ascii")) + + return h.hexdigest() == pw_digest + + +@contextmanager +def persist_config(config_file=None, mode=0o600): + """Context manager that can be used to modify a config object + + On exit of the context manager, the config will be written back to disk, + by default with user-only (600) permissions. + """ + + if config_file is None: + config_file = os.path.join(jupyter_config_dir(), "jupyter_server_config.json") + + os.makedirs(os.path.dirname(config_file), exist_ok=True) + + loader = JSONFileConfigLoader(os.path.basename(config_file), os.path.dirname(config_file)) + try: + config = loader.load_config() + except ConfigFileNotFound: + config = Config() + + yield config + + with io.open(config_file, "w", encoding="utf8") as f: + f.write(cast_unicode(json.dumps(config, indent=2))) + + try: + os.chmod(config_file, mode) + except Exception as e: + tb = traceback.format_exc() + warnings.warn("Failed to set permissions on %s:\n%s" % (config_file, tb), RuntimeWarning) + + +def set_password(password=None, config_file=None): + """Ask user for password, store it in JSON configuration file""" + + hashed_password = passwd(password) + + with persist_config(config_file) as config: + config.ServerApp.password = hashed_password diff --git a/server/jupyter_server/auth/utils.py b/server/jupyter_server/auth/utils.py new file mode 100644 index 0000000..5336235 --- /dev/null +++ b/server/jupyter_server/auth/utils.py @@ -0,0 +1,66 @@ +"""A module with various utility methods for authorization in Jupyter Server. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import importlib +import re + + +HTTP_METHOD_TO_AUTH_ACTION = { + "GET": "read", + "HEAD": "read", + "OPTIONS": "read", + "POST": "write", + "PUT": "write", + "PATCH": "write", + "DELETE": "write", + "WEBSOCKET": "execute", +} + + +def get_regex_to_resource_map(): + """Returns a dictionary with all of Jupyter Server's + request handler URL regex patterns mapped to + their resource name. + + e.g. + { "/api/contents/": "contents", ...} + """ + from jupyter_server.serverapp import JUPYTER_SERVICE_HANDLERS + + modules = [] + for mod in JUPYTER_SERVICE_HANDLERS.values(): + if mod: + modules.extend(mod) + resource_map = {} + for handler_module in modules: + mod = importlib.import_module(handler_module) + name = mod.AUTH_RESOURCE + for handler in mod.default_handlers: + url_regex = handler[0] + resource_map[url_regex] = name + # terminal plugin doesn't have importable url patterns + # get these from terminal/__init__.py + for url_regex in [ + r"/terminals/websocket/(\w+)", + "/api/terminals", + r"/api/terminals/(\w+)", + ]: + resource_map[url_regex] = "terminals" + return resource_map + + +def match_url_to_resource(url, regex_mapping=None): + """Finds the JupyterHandler regex pattern that would + match the given URL and returns the resource name (str) + of that handler. + + e.g. + /api/contents/... returns "contents" + """ + if not regex_mapping: + regex_mapping = get_regex_to_resource_map() + for regex, auth_resource in regex_mapping.items(): + pattern = re.compile(regex) + if pattern.fullmatch(url): + return auth_resource diff --git a/server/jupyter_server/base/__init__.py b/server/jupyter_server/base/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/base/handlers.py b/server/jupyter_server/base/handlers.py new file mode 100644 index 0000000..0361b12 --- /dev/null +++ b/server/jupyter_server/base/handlers.py @@ -0,0 +1,987 @@ +"""Base Tornado handlers for the Jupyter server.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import datetime +import functools +import ipaddress +import json +import mimetypes +import os +import re +import traceback +import types +import warnings +from http.client import responses +from http.cookies import Morsel +from urllib.parse import urlparse + +import prometheus_client +from ipython_genutils.path import filefind +from jinja2 import TemplateNotFound +from jupyter_core.paths import is_hidden +from tornado import escape +from tornado import httputil +from tornado import web +from tornado.log import app_log +from traitlets.config import Application + +import jupyter_server +from jupyter_server._sysinfo import get_sys_info +from jupyter_server._tz import utcnow +from jupyter_server.i18n import combine_translations +from jupyter_server.services.security import csp_report_uri +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_is_absolute +from jupyter_server.utils import url_path_join +from jupyter_server.utils import urldecode_unix_socket_path + +# ----------------------------------------------------------------------------- +# Top-level handlers +# ----------------------------------------------------------------------------- +non_alphanum = re.compile(r"[^A-Za-z0-9]") + +_sys_info_cache = None + + +def json_sys_info(): + global _sys_info_cache + if _sys_info_cache is None: + _sys_info_cache = json.dumps(get_sys_info()) + return _sys_info_cache + + +def log(): + if Application.initialized(): + return Application.instance().log + else: + return app_log + + +class AuthenticatedHandler(web.RequestHandler): + """A RequestHandler with an authenticated user.""" + + @property + def content_security_policy(self): + """The default Content-Security-Policy header + + Can be overridden by defining Content-Security-Policy in settings['headers'] + """ + if "Content-Security-Policy" in self.settings.get("headers", {}): + # user-specified, don't override + return self.settings["headers"]["Content-Security-Policy"] + + return "; ".join( + [ + "frame-ancestors 'self'", + # Make sure the report-uri is relative to the base_url + "report-uri " + + self.settings.get("csp_report_uri", url_path_join(self.base_url, csp_report_uri)), + ] + ) + + def set_default_headers(self): + headers = {} + headers["X-Content-Type-Options"] = "nosniff" + headers.update(self.settings.get("headers", {})) + + headers["Content-Security-Policy"] = self.content_security_policy + + # Allow for overriding headers + for header_name, value in headers.items(): + try: + self.set_header(header_name, value) + except Exception as e: + # tornado raise Exception (not a subclass) + # if method is unsupported (websocket and Access-Control-Allow-Origin + # for example, so just ignore) + self.log.debug(e) + + def force_clear_cookie(self, name, path="/", domain=None): + """Deletes the cookie with the given name. + + Tornado's cookie handling currently (Jan 2018) stores cookies in a dict + keyed by name, so it can only modify one cookie with a given name per + response. The browser can store multiple cookies with the same name + but different domains and/or paths. This method lets us clear multiple + cookies with the same name. + + Due to limitations of the cookie protocol, you must pass the same + path and domain to clear a cookie as were used when that cookie + was set (but there is no way to find out on the server side + which values were used for a given cookie). + """ + name = escape.native_str(name) + expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) + + morsel = Morsel() + morsel.set(name, "", '""') + morsel["expires"] = httputil.format_timestamp(expires) + morsel["path"] = path + if domain: + morsel["domain"] = domain + self.add_header("Set-Cookie", morsel.OutputString()) + + def clear_login_cookie(self): + cookie_options = self.settings.get("cookie_options", {}) + path = cookie_options.setdefault("path", self.base_url) + self.clear_cookie(self.cookie_name, path=path) + if path and path != "/": + # also clear cookie on / to ensure old cookies are cleared + # after the change in path behavior. + # N.B. This bypasses the normal cookie handling, which can't update + # two cookies with the same name. See the method above. + self.force_clear_cookie(self.cookie_name) + + def get_current_user(self): + if self.login_handler is None: + return "anonymous" + return self.login_handler.get_user(self) + + def skip_check_origin(self): + """Ask my login_handler if I should skip the origin_check + + For example: in the default LoginHandler, if a request is token-authenticated, + origin checking should be skipped. + """ + if self.request.method == "OPTIONS": + # no origin-check on options requests, which are used to check origins! + return True + if self.login_handler is None or not hasattr(self.login_handler, "should_check_origin"): + return False + return not self.login_handler.should_check_origin(self) + + @property + def token_authenticated(self): + """Have I been authenticated with a token?""" + if self.login_handler is None or not hasattr(self.login_handler, "is_token_authenticated"): + return False + return self.login_handler.is_token_authenticated(self) + + @property + def cookie_name(self): + default_cookie_name = non_alphanum.sub("-", "username-{}".format(self.request.host)) + return self.settings.get("cookie_name", default_cookie_name) + + @property + def logged_in(self): + """Is a user currently logged in?""" + user = self.get_current_user() + return user and not user == "anonymous" + + @property + def login_handler(self): + """Return the login handler for this application, if any.""" + return self.settings.get("login_handler_class", None) + + @property + def token(self): + """Return the login token for this application, if any.""" + return self.settings.get("token", None) + + @property + def login_available(self): + """May a user proceed to log in? + + This returns True if login capability is available, irrespective of + whether the user is already logged in or not. + + """ + if self.login_handler is None: + return False + return bool(self.login_handler.get_login_available(self.settings)) + + @property + def authorizer(self): + return self.settings["authorizer"] + + +class JupyterHandler(AuthenticatedHandler): + """Jupyter-specific extensions to authenticated handling + + Mostly property shortcuts to Jupyter-specific settings. + """ + + @property + def config(self): + return self.settings.get("config", None) + + @property + def log(self): + """use the Jupyter log by default, falling back on tornado's logger""" + return log() + + @property + def jinja_template_vars(self): + """User-supplied values to supply to jinja templates.""" + return self.settings.get("jinja_template_vars", {}) + + @property + def serverapp(self): + return self.settings["serverapp"] + + # --------------------------------------------------------------- + # URLs + # --------------------------------------------------------------- + + @property + def version_hash(self): + """The version hash to use for cache hints for static files""" + return self.settings.get("version_hash", "") + + @property + def mathjax_url(self): + url = self.settings.get("mathjax_url", "") + if not url or url_is_absolute(url): + return url + return url_path_join(self.base_url, url) + + @property + def mathjax_config(self): + return self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe") + + @property + def base_url(self): + return self.settings.get("base_url", "/") + + @property + def default_url(self): + return self.settings.get("default_url", "") + + @property + def ws_url(self): + return self.settings.get("websocket_url", "") + + @property + def contents_js_source(self): + self.log.debug( + "Using contents: %s", + self.settings.get("contents_js_source", "services/contents"), + ) + return self.settings.get("contents_js_source", "services/contents") + + # --------------------------------------------------------------- + # Manager objects + # --------------------------------------------------------------- + + @property + def kernel_manager(self): + return self.settings["kernel_manager"] + + @property + def contents_manager(self): + return self.settings["contents_manager"] + + @property + def session_manager(self): + return self.settings["session_manager"] + + @property + def terminal_manager(self): + return self.settings["terminal_manager"] + + @property + def kernel_spec_manager(self): + return self.settings["kernel_spec_manager"] + + @property + def config_manager(self): + return self.settings["config_manager"] + + # --------------------------------------------------------------- + # CORS + # --------------------------------------------------------------- + + @property + def allow_origin(self): + """Normal Access-Control-Allow-Origin""" + return self.settings.get("allow_origin", "") + + @property + def allow_origin_pat(self): + """Regular expression version of allow_origin""" + return self.settings.get("allow_origin_pat", None) + + @property + def allow_credentials(self): + """Whether to set Access-Control-Allow-Credentials""" + return self.settings.get("allow_credentials", False) + + def set_default_headers(self): + """Add CORS headers, if defined""" + super(JupyterHandler, self).set_default_headers() + if self.allow_origin: + self.set_header("Access-Control-Allow-Origin", self.allow_origin) + elif self.allow_origin_pat: + origin = self.get_origin() + if origin and re.match(self.allow_origin_pat, origin): + self.set_header("Access-Control-Allow-Origin", origin) + elif self.token_authenticated and "Access-Control-Allow-Origin" not in self.settings.get( + "headers", {} + ): + # allow token-authenticated requests cross-origin by default. + # only apply this exception if allow-origin has not been specified. + self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin", "")) + + if self.allow_credentials: + self.set_header("Access-Control-Allow-Credentials", "true") + + def set_attachment_header(self, filename): + """Set Content-Disposition: attachment header + + As a method to ensure handling of filename encoding + """ + escaped_filename = url_escape(filename) + self.set_header( + "Content-Disposition", + "attachment;" + " filename*=utf-8''{utf8}".format( + utf8=escaped_filename, + ), + ) + + def get_origin(self): + # Handle WebSocket Origin naming convention differences + # The difference between version 8 and 13 is that in 8 the + # client sends a "Sec-Websocket-Origin" header and in 13 it's + # simply "Origin". + if "Origin" in self.request.headers: + origin = self.request.headers.get("Origin") + else: + origin = self.request.headers.get("Sec-Websocket-Origin", None) + return origin + + # origin_to_satisfy_tornado is present because tornado requires + # check_origin to take an origin argument, but we don't use it + def check_origin(self, origin_to_satisfy_tornado=""): + """Check Origin for cross-site API requests, including websockets + + Copied from WebSocket with changes: + + - allow unspecified host/origin (e.g. scripts) + - allow token-authenticated requests + """ + if self.allow_origin == "*" or self.skip_check_origin(): + return True + + host = self.request.headers.get("Host") + origin = self.request.headers.get("Origin") + + # If no header is provided, let the request through. + # Origin can be None for: + # - same-origin (IE, Firefox) + # - Cross-site POST form (IE, Firefox) + # - Scripts + # The cross-site POST (XSRF) case is handled by tornado's xsrf_token + if origin is None or host is None: + return True + + origin = origin.lower() + origin_host = urlparse(origin).netloc + + # OK if origin matches host + if origin_host == host: + return True + + # Check CORS headers + if self.allow_origin: + allow = self.allow_origin == origin + elif self.allow_origin_pat: + allow = bool(re.match(self.allow_origin_pat, origin)) + else: + # No CORS headers deny the request + allow = False + if not allow: + self.log.warning( + "Blocking Cross Origin API request for %s. Origin: %s, Host: %s", + self.request.path, + origin, + host, + ) + return allow + + def check_referer(self): + """Check Referer for cross-site requests. + Disables requests to certain endpoints with + external or missing Referer. + If set, allow_origin settings are applied to the Referer + to whitelist specific cross-origin sites. + Used on GET for api endpoints and /files/ + to block cross-site inclusion (XSSI). + """ + if self.allow_origin == "*" or self.skip_check_origin(): + return True + + host = self.request.headers.get("Host") + referer = self.request.headers.get("Referer") + + if not host: + self.log.warning("Blocking request with no host") + return False + if not referer: + self.log.warning("Blocking request with no referer") + return False + + referer_url = urlparse(referer) + referer_host = referer_url.netloc + if referer_host == host: + return True + + # apply cross-origin checks to Referer: + origin = "{}://{}".format(referer_url.scheme, referer_url.netloc) + if self.allow_origin: + allow = self.allow_origin == origin + elif self.allow_origin_pat: + allow = bool(re.match(self.allow_origin_pat, origin)) + else: + # No CORS settings, deny the request + allow = False + + if not allow: + self.log.warning( + "Blocking Cross Origin request for %s. Referer: %s, Host: %s", + self.request.path, + origin, + host, + ) + return allow + + def check_xsrf_cookie(self): + """Bypass xsrf cookie checks when token-authenticated""" + if self.token_authenticated or self.settings.get("disable_check_xsrf", False): + # Token-authenticated requests do not need additional XSRF-check + # Servers without authentication are vulnerable to XSRF + return + try: + return super(JupyterHandler, self).check_xsrf_cookie() + except web.HTTPError as e: + if self.request.method in {"GET", "HEAD"}: + # Consider Referer a sufficient cross-origin check for GET requests + if not self.check_referer(): + referer = self.request.headers.get("Referer") + if referer: + msg = "Blocking Cross Origin request from {}.".format(referer) + else: + msg = "Blocking request from unknown origin" + raise web.HTTPError(403, msg) + else: + raise + + def check_host(self): + """Check the host header if remote access disallowed. + + Returns True if the request should continue, False otherwise. + """ + if self.settings.get("allow_remote_access", False): + return True + + # Remove port (e.g. ':8888') from host + host = re.match(r"^(.*?)(:\d+)?$", self.request.host).group(1) + + # Browsers format IPv6 addresses like [::1]; we need to remove the [] + if host.startswith("[") and host.endswith("]"): + host = host[1:-1] + + # UNIX socket handling + check_host = urldecode_unix_socket_path(host) + if check_host.startswith("/") and os.path.exists(check_host): + allow = True + else: + try: + addr = ipaddress.ip_address(host) + except ValueError: + # Not an IP address: check against hostnames + allow = host in self.settings.get("local_hostnames", ["localhost"]) + else: + allow = addr.is_loopback + + if not allow: + self.log.warning( + ( + "Blocking request with non-local 'Host' %s (%s). " + "If the server should be accessible at that name, " + "set ServerApp.allow_remote_access to disable the check." + ), + host, + self.request.host, + ) + return allow + + def prepare(self): + if not self.check_host(): + raise web.HTTPError(403) + return super(JupyterHandler, self).prepare() + + # --------------------------------------------------------------- + # template rendering + # --------------------------------------------------------------- + + def get_template(self, name): + """Return the jinja template object for a given name""" + return self.settings["jinja2_env"].get_template(name) + + def render_template(self, name, **ns): + ns.update(self.template_namespace) + template = self.get_template(name) + return template.render(**ns) + + @property + def template_namespace(self): + return dict( + base_url=self.base_url, + default_url=self.default_url, + ws_url=self.ws_url, + logged_in=self.logged_in, + allow_password_change=self.settings.get("allow_password_change"), + login_available=self.login_available, + token_available=bool(self.token), + static_url=self.static_url, + sys_info=json_sys_info(), + contents_js_source=self.contents_js_source, + version_hash=self.version_hash, + xsrf_form_html=self.xsrf_form_html, + token=self.token, + xsrf_token=self.xsrf_token.decode("utf8"), + nbjs_translations=json.dumps( + combine_translations(self.request.headers.get("Accept-Language", "")) + ), + **self.jinja_template_vars + ) + + def get_json_body(self): + """Return the body of the request as JSON data.""" + if not self.request.body: + return None + # Do we need to call body.decode('utf-8') here? + body = self.request.body.strip().decode("utf-8") + try: + model = json.loads(body) + except Exception as e: + self.log.debug("Bad JSON: %r", body) + self.log.error("Couldn't parse JSON", exc_info=True) + raise web.HTTPError(400, "Invalid JSON in body of request") from e + return model + + def write_error(self, status_code, **kwargs): + """render custom error pages""" + exc_info = kwargs.get("exc_info") + message = "" + status_message = responses.get(status_code, "Unknown HTTP Error") + exception = "(unknown)" + if exc_info: + exception = exc_info[1] + # get the custom message, if defined + try: + message = exception.log_message % exception.args + except Exception: + pass + + # construct the custom reason, if defined + reason = getattr(exception, "reason", "") + if reason: + status_message = reason + + # build template namespace + ns = dict( + status_code=status_code, + status_message=status_message, + message=message, + exception=exception, + ) + + self.set_header("Content-Type", "text/html") + # render the template + try: + html = self.render_template("%s.html" % status_code, **ns) + except TemplateNotFound: + html = self.render_template("error.html", **ns) + + self.write(html) + + +class APIHandler(JupyterHandler): + """Base class for API handlers""" + + def prepare(self): + if not self.check_origin(): + raise web.HTTPError(404) + return super(APIHandler, self).prepare() + + def write_error(self, status_code, **kwargs): + """APIHandler errors are JSON, not human pages""" + self.set_header("Content-Type", "application/json") + message = responses.get(status_code, "Unknown HTTP Error") + reply = { + "message": message, + } + exc_info = kwargs.get("exc_info") + if exc_info: + e = exc_info[1] + if isinstance(e, HTTPError): + reply["message"] = e.log_message or message + reply["reason"] = e.reason + else: + reply["message"] = "Unhandled error" + reply["reason"] = None + reply["traceback"] = "".join(traceback.format_exception(*exc_info)) + self.log.warning(reply["message"]) + self.finish(json.dumps(reply)) + + def get_current_user(self): + """Raise 403 on API handlers instead of redirecting to human login page""" + # preserve _user_cache so we don't raise more than once + if hasattr(self, "_user_cache"): + return self._user_cache + self._user_cache = user = super(APIHandler, self).get_current_user() + return user + + def get_login_url(self): + # if get_login_url is invoked in an API handler, + # that means @web.authenticated is trying to trigger a redirect. + # instead of redirecting, raise 403 instead. + if not self.current_user: + raise web.HTTPError(403) + return super(APIHandler, self).get_login_url() + + @property + def content_security_policy(self): + csp = "; ".join( + [ + super(APIHandler, self).content_security_policy, + "default-src 'none'", + ] + ) + return csp + + # set _track_activity = False on API handlers that shouldn't track activity + _track_activity = True + + def update_api_activity(self): + """Update last_activity of API requests""" + # record activity of authenticated requests + if ( + self._track_activity + and getattr(self, "_user_cache", None) + and self.get_argument("no_track_activity", None) is None + ): + self.settings["api_last_activity"] = utcnow() + + def finish(self, *args, **kwargs): + self.update_api_activity() + self.set_header("Content-Type", "application/json") + return super(APIHandler, self).finish(*args, **kwargs) + + def options(self, *args, **kwargs): + if "Access-Control-Allow-Headers" in self.settings.get("headers", {}): + self.set_header( + "Access-Control-Allow-Headers", + self.settings["headers"]["Access-Control-Allow-Headers"], + ) + else: + self.set_header( + "Access-Control-Allow-Headers", + "accept, content-type, authorization, x-xsrftoken", + ) + self.set_header("Access-Control-Allow-Methods", "GET, PUT, POST, PATCH, DELETE, OPTIONS") + + # if authorization header is requested, + # that means the request is token-authenticated. + # avoid browser-side rejection of the preflight request. + # only allow this exception if allow_origin has not been specified + # and Jupyter server authentication is enabled. + # If the token is not valid, the 'real' request will still be rejected. + requested_headers = self.request.headers.get("Access-Control-Request-Headers", "").split( + "," + ) + if ( + requested_headers + and any(h.strip().lower() == "authorization" for h in requested_headers) + and ( + # FIXME: it would be even better to check specifically for token-auth, + # but there is currently no API for this. + self.login_available + ) + and ( + self.allow_origin + or self.allow_origin_pat + or "Access-Control-Allow-Origin" in self.settings.get("headers", {}) + ) + ): + self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin", "")) + + +class Template404(JupyterHandler): + """Render our 404 template""" + + def prepare(self): + raise web.HTTPError(404) + + +class AuthenticatedFileHandler(JupyterHandler, web.StaticFileHandler): + """static files should only be accessible when logged in""" + + @property + def content_security_policy(self): + # In case we're serving HTML/SVG, confine any Javascript to a unique + # origin so it can't interact with the Jupyter server. + return ( + super(AuthenticatedFileHandler, self).content_security_policy + + "; sandbox allow-scripts" + ) + + @web.authenticated + def head(self, path): + self.check_xsrf_cookie() + return super(AuthenticatedFileHandler, self).head(path) + + @web.authenticated + def get(self, path): + if os.path.splitext(path)[1] == ".ipynb" or self.get_argument("download", False): + name = path.rsplit("/", 1)[-1] + self.set_attachment_header(name) + + return web.StaticFileHandler.get(self, path) + + def get_content_type(self): + path = self.absolute_path.strip("/") + if "/" in path: + _, name = path.rsplit("/", 1) + else: + name = path + if name.endswith(".ipynb"): + return "application/x-ipynb+json" + else: + cur_mime = mimetypes.guess_type(name)[0] + if cur_mime == "text/plain": + return "text/plain; charset=UTF-8" + else: + return super(AuthenticatedFileHandler, self).get_content_type() + + def set_headers(self): + super(AuthenticatedFileHandler, self).set_headers() + # disable browser caching, rely on 304 replies for savings + if "v" not in self.request.arguments: + self.add_header("Cache-Control", "no-cache") + + def compute_etag(self): + return None + + def validate_absolute_path(self, root, absolute_path): + """Validate and return the absolute path. + + Requires tornado 3.1 + + Adding to tornado's own handling, forbids the serving of hidden files. + """ + abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) + abs_root = os.path.abspath(root) + if is_hidden(abs_path, abs_root) and not self.contents_manager.allow_hidden: + self.log.info( + "Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable" + ) + raise web.HTTPError(404) + return abs_path + + +def json_errors(method): + """Decorate methods with this to return GitHub style JSON errors. + + This should be used on any JSON API on any handler method that can raise HTTPErrors. + + This will grab the latest HTTPError exception using sys.exc_info + and then: + + 1. Set the HTTP status code based on the HTTPError + 2. Create and return a JSON body with a message field describing + the error in a human readable form. + """ + warnings.warn( + "@json_errors is deprecated in notebook 5.2.0. Subclass APIHandler instead.", + DeprecationWarning, + stacklevel=2, + ) + + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + self.write_error = types.MethodType(APIHandler.write_error, self) + return method(self, *args, **kwargs) + + return wrapper + + +# ----------------------------------------------------------------------------- +# File handler +# ----------------------------------------------------------------------------- + +# to minimize subclass changes: +HTTPError = web.HTTPError + + +class FileFindHandler(JupyterHandler, web.StaticFileHandler): + """subclass of StaticFileHandler for serving files from a search path""" + + # cache search results, don't search for files more than once + _static_paths = {} + + def set_headers(self): + super(FileFindHandler, self).set_headers() + # disable browser caching, rely on 304 replies for savings + if "v" not in self.request.arguments or any( + self.request.path.startswith(path) for path in self.no_cache_paths + ): + self.set_header("Cache-Control", "no-cache") + + def initialize(self, path, default_filename=None, no_cache_paths=None): + self.no_cache_paths = no_cache_paths or [] + + if isinstance(path, str): + path = [path] + + self.root = tuple(os.path.abspath(os.path.expanduser(p)) + os.sep for p in path) + self.default_filename = default_filename + + def compute_etag(self): + return None + + @classmethod + def get_absolute_path(cls, roots, path): + """locate a file to serve on our static file search path""" + with cls._lock: + if path in cls._static_paths: + return cls._static_paths[path] + try: + abspath = os.path.abspath(filefind(path, roots)) + except IOError: + # IOError means not found + return "" + + cls._static_paths[path] = abspath + + log().debug("Path %s served from %s" % (path, abspath)) + return abspath + + def validate_absolute_path(self, root, absolute_path): + """check if the file should be served (raises 404, 403, etc.)""" + if absolute_path == "": + raise web.HTTPError(404) + + for root in self.root: + if (absolute_path + os.sep).startswith(root): + break + + return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) + + +class APIVersionHandler(APIHandler): + def get(self): + # not authenticated, so give as few info as possible + self.finish(json.dumps({"version": jupyter_server.__version__})) + + +class TrailingSlashHandler(web.RequestHandler): + """Simple redirect handler that strips trailing slashes + + This should be the first, highest priority handler. + """ + + def get(self): + path, *rest = self.request.uri.partition("?") + # trim trailing *and* leading / + # to avoid misinterpreting repeated '//' + path = "/" + path.strip("/") + new_uri = "".join([path, *rest]) + self.redirect(new_uri) + + post = put = get + + +class MainHandler(JupyterHandler): + """Simple handler for base_url.""" + + def get(self): + html = self.render_template("main.html") + self.write(html) + + post = put = get + + +class FilesRedirectHandler(JupyterHandler): + """Handler for redirecting relative URLs to the /files/ handler""" + + @staticmethod + async def redirect_to_files(self, path): + """make redirect logic a reusable static method + + so it can be called from other handlers. + """ + cm = self.contents_manager + if await ensure_async(cm.dir_exists(path)): + # it's a *directory*, redirect to /tree + url = url_path_join(self.base_url, "tree", url_escape(path)) + else: + orig_path = path + # otherwise, redirect to /files + parts = path.split("/") + + if not await ensure_async(cm.file_exists(path=path)) and "files" in parts: + # redirect without files/ iff it would 404 + # this preserves pre-2.0-style 'files/' links + self.log.warning("Deprecated files/ URL: %s", orig_path) + parts.remove("files") + path = "/".join(parts) + + if not await ensure_async(cm.file_exists(path=path)): + raise web.HTTPError(404) + + url = url_path_join(self.base_url, "files", url_escape(path)) + self.log.debug("Redirecting %s to %s", self.request.path, url) + self.redirect(url) + + def get(self, path=""): + return self.redirect_to_files(self, path) + + +class RedirectWithParams(web.RequestHandler): + """Sam as web.RedirectHandler, but preserves URL parameters""" + + def initialize(self, url, permanent=True): + self._url = url + self._permanent = permanent + + def get(self): + sep = "&" if "?" in self._url else "?" + url = sep.join([self._url, self.request.query]) + self.redirect(url, permanent=self._permanent) + + +class PrometheusMetricsHandler(JupyterHandler): + """ + Return prometheus metrics for this notebook server + """ + + def get(self): + if self.settings["authenticate_prometheus"] and not self.logged_in: + raise web.HTTPError(403) + + self.set_header("Content-Type", prometheus_client.CONTENT_TYPE_LATEST) + self.write(prometheus_client.generate_latest(prometheus_client.REGISTRY)) + + +# ----------------------------------------------------------------------------- +# URL pattern fragments for re-use +# ----------------------------------------------------------------------------- + +# path matches any number of `/foo[/bar...]` or just `/` or '' +path_regex = r"(?P(?:(?:/[^/]+)+|/?))" + +# ----------------------------------------------------------------------------- +# URL to handler mappings +# ----------------------------------------------------------------------------- + + +default_handlers = [ + (r".*/", TrailingSlashHandler), + (r"api", APIVersionHandler), + (r"/(robots\.txt|favicon\.ico)", web.StaticFileHandler), + (r"/metrics", PrometheusMetricsHandler), +] diff --git a/server/jupyter_server/base/zmqhandlers.py b/server/jupyter_server/base/zmqhandlers.py new file mode 100644 index 0000000..ff8a5dd --- /dev/null +++ b/server/jupyter_server/base/zmqhandlers.py @@ -0,0 +1,346 @@ +# coding: utf-8 +"""Tornado handlers for WebSocket <-> ZMQ sockets.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json +import re +import struct +import sys +from urllib.parse import urlparse + +import tornado +from ipython_genutils.py3compat import cast_unicode + +try: + from jupyter_client.jsonutil import json_default +except ImportError: + from jupyter_client.jsonutil import date_default as json_default +from jupyter_client.jsonutil import extract_dates +from jupyter_client.session import Session +from tornado import ioloop +from tornado import web +from tornado.websocket import WebSocketHandler + +from .handlers import JupyterHandler + + +def serialize_binary_message(msg): + """serialize a message as a binary blob + + Header: + + 4 bytes: number of msg parts (nbufs) as 32b int + 4 * nbufs bytes: offset for each buffer as integer as 32b int + + Offsets are from the start of the buffer, including the header. + + Returns + ------- + The message serialized to bytes. + + """ + # don't modify msg or buffer list in-place + msg = msg.copy() + buffers = list(msg.pop("buffers")) + if sys.version_info < (3, 4): + buffers = [x.tobytes() for x in buffers] + bmsg = json.dumps(msg, default=json_default).encode("utf8") + buffers.insert(0, bmsg) + nbufs = len(buffers) + offsets = [4 * (nbufs + 1)] + for buf in buffers[:-1]: + offsets.append(offsets[-1] + len(buf)) + offsets_buf = struct.pack("!" + "I" * (nbufs + 1), nbufs, *offsets) + buffers.insert(0, offsets_buf) + return b"".join(buffers) + + +def deserialize_binary_message(bmsg): + """deserialize a message from a binary blog + + Header: + + 4 bytes: number of msg parts (nbufs) as 32b int + 4 * nbufs bytes: offset for each buffer as integer as 32b int + + Offsets are from the start of the buffer, including the header. + + Returns + ------- + message dictionary + """ + nbufs = struct.unpack("!i", bmsg[:4])[0] + offsets = list(struct.unpack("!" + "I" * nbufs, bmsg[4 : 4 * (nbufs + 1)])) + offsets.append(None) + bufs = [] + for start, stop in zip(offsets[:-1], offsets[1:]): + bufs.append(bmsg[start:stop]) + msg = json.loads(bufs[0].decode("utf8")) + msg["header"] = extract_dates(msg["header"]) + msg["parent_header"] = extract_dates(msg["parent_header"]) + msg["buffers"] = bufs[1:] + return msg + + +def serialize_msg_to_ws_v1(msg_or_list, channel, pack=None): + if pack: + msg_list = [ + pack(msg_or_list["header"]), + pack(msg_or_list["parent_header"]), + pack(msg_or_list["metadata"]), + pack(msg_or_list["content"]), + ] + else: + msg_list = msg_or_list + channel = channel.encode("utf-8") + offsets = [] + offsets.append(8 * (1 + 1 + len(msg_list) + 1)) + offsets.append(len(channel) + offsets[-1]) + for msg in msg_list: + offsets.append(len(msg) + offsets[-1]) + offset_number = len(offsets).to_bytes(8, byteorder="little") + offsets = [offset.to_bytes(8, byteorder="little") for offset in offsets] + bin_msg = b"".join([offset_number] + offsets + [channel] + msg_list) + return bin_msg + + +def deserialize_msg_from_ws_v1(ws_msg): + offset_number = int.from_bytes(ws_msg[:8], "little") + offsets = [ + int.from_bytes(ws_msg[8 * (i + 1) : 8 * (i + 2)], "little") for i in range(offset_number) + ] + channel = ws_msg[offsets[0] : offsets[1]].decode("utf-8") + msg_list = [ws_msg[offsets[i] : offsets[i + 1]] for i in range(1, offset_number - 1)] + return channel, msg_list + + +# ping interval for keeping websockets alive (30 seconds) +WS_PING_INTERVAL = 30000 + + +class WebSocketMixin(object): + """Mixin for common websocket options""" + + ping_callback = None + last_ping = 0 + last_pong = 0 + stream = None + + @property + def ping_interval(self): + """The interval for websocket keep-alive pings. + + Set ws_ping_interval = 0 to disable pings. + """ + return self.settings.get("ws_ping_interval", WS_PING_INTERVAL) + + @property + def ping_timeout(self): + """If no ping is received in this many milliseconds, + close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). + Default is max of 3 pings or 30 seconds. + """ + return self.settings.get("ws_ping_timeout", max(3 * self.ping_interval, WS_PING_INTERVAL)) + + def check_origin(self, origin=None): + """Check Origin == Host or Access-Control-Allow-Origin. + + Tornado >= 4 calls this method automatically, raising 403 if it returns False. + """ + + if self.allow_origin == "*" or ( + hasattr(self, "skip_check_origin") and self.skip_check_origin() + ): + return True + + host = self.request.headers.get("Host") + if origin is None: + origin = self.get_origin() + + # If no origin or host header is provided, assume from script + if origin is None or host is None: + return True + + origin = origin.lower() + origin_host = urlparse(origin).netloc + + # OK if origin matches host + if origin_host == host: + return True + + # Check CORS headers + if self.allow_origin: + allow = self.allow_origin == origin + elif self.allow_origin_pat: + allow = bool(re.match(self.allow_origin_pat, origin)) + else: + # No CORS headers deny the request + allow = False + if not allow: + self.log.warning( + "Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", + origin, + host, + ) + return allow + + def clear_cookie(self, *args, **kwargs): + """meaningless for websockets""" + pass + + def open(self, *args, **kwargs): + self.log.debug("Opening websocket %s", self.request.path) + + # start the pinging + if self.ping_interval > 0: + loop = ioloop.IOLoop.current() + self.last_ping = loop.time() # Remember time of last ping + self.last_pong = self.last_ping + self.ping_callback = ioloop.PeriodicCallback( + self.send_ping, + self.ping_interval, + ) + self.ping_callback.start() + return super(WebSocketMixin, self).open(*args, **kwargs) + + def send_ping(self): + """send a ping to keep the websocket alive""" + if self.ws_connection is None and self.ping_callback is not None: + self.ping_callback.stop() + return + + if self.ws_connection.client_terminated: + self.close() + return + + # check for timeout on pong. Make sure that we really have sent a recent ping in + # case the machine with both server and client has been suspended since the last ping. + now = ioloop.IOLoop.current().time() + since_last_pong = 1e3 * (now - self.last_pong) + since_last_ping = 1e3 * (now - self.last_ping) + if since_last_ping < 2 * self.ping_interval and since_last_pong > self.ping_timeout: + self.log.warning("WebSocket ping timeout after %i ms.", since_last_pong) + self.close() + return + + self.ping(b"") + self.last_ping = now + + def on_pong(self, data): + self.last_pong = ioloop.IOLoop.current().time() + + +class ZMQStreamHandler(WebSocketMixin, WebSocketHandler): + + if tornado.version_info < (4, 1): + """Backport send_error from tornado 4.1 to 4.0""" + + def send_error(self, *args, **kwargs): + if self.stream is None: + super(WebSocketHandler, self).send_error(*args, **kwargs) + else: + # If we get an uncaught exception during the handshake, + # we have no choice but to abruptly close the connection. + # TODO: for uncaught exceptions after the handshake, + # we can close the connection more gracefully. + self.stream.close() + + def _reserialize_reply(self, msg_or_list, channel=None): + """Reserialize a reply message using JSON. + + msg_or_list can be an already-deserialized msg dict or the zmq buffer list. + If it is the zmq list, it will be deserialized with self.session. + + This takes the msg list from the ZMQ socket and serializes the result for the websocket. + This method should be used by self._on_zmq_reply to build messages that can + be sent back to the browser. + + """ + if isinstance(msg_or_list, dict): + # already unpacked + msg = msg_or_list + else: + idents, msg_list = self.session.feed_identities(msg_or_list) + msg = self.session.deserialize(msg_list) + if channel: + msg["channel"] = channel + if msg["buffers"]: + buf = serialize_binary_message(msg) + return buf + else: + smsg = json.dumps(msg, default=json_default) + return cast_unicode(smsg) + + def select_subprotocol(self, subprotocols): + preferred_protocol = self.settings.get("kernel_ws_protocol") + if preferred_protocol is None: + preferred_protocol = "v1.kernel.websocket.jupyter.org" + elif preferred_protocol == "": + preferred_protocol = None + selected_subprotocol = preferred_protocol if preferred_protocol in subprotocols else None + # None is the default, "legacy" protocol + return selected_subprotocol + + def _on_zmq_reply(self, stream, msg_list): + # Sometimes this gets triggered when the on_close method is scheduled in the + # eventloop but hasn't been called. + if self.ws_connection is None or stream.closed(): + self.log.warning("zmq message arrived on closed channel") + self.close() + return + channel = getattr(stream, "channel", None) + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + bin_msg = serialize_msg_to_ws_v1(msg_list, channel) + self.write_message(bin_msg, binary=True) + else: + try: + msg = self._reserialize_reply(msg_list, channel=channel) + except Exception: + self.log.critical("Malformed message: %r" % msg_list, exc_info=True) + else: + self.write_message(msg, binary=isinstance(msg, bytes)) + + +class AuthenticatedZMQStreamHandler(ZMQStreamHandler, JupyterHandler): + def set_default_headers(self): + """Undo the set_default_headers in JupyterHandler + + which doesn't make sense for websockets + """ + pass + + def pre_get(self): + """Run before finishing the GET request + + Extend this method to add logic that should fire before + the websocket finishes completing. + """ + # authenticate the request before opening the websocket + user = self.get_current_user() + if user is None: + self.log.warning("Couldn't authenticate WebSocket connection") + raise web.HTTPError(403) + + # authorize the user. + if not self.authorizer.is_authorized(self, user, "execute", "kernels"): + raise web.HTTPError(403) + + if self.get_argument("session_id", False): + self.session.session = cast_unicode(self.get_argument("session_id")) + else: + self.log.warning("No session ID specified") + + async def get(self, *args, **kwargs): + # pre_get can be a coroutine in subclasses + # assign and yield in two step to avoid tornado 3 issues + res = self.pre_get() + await res + res = super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs) + await res + + def initialize(self): + self.log.debug("Initializing websocket connection %s", self.request.path) + self.session = Session(config=self.config) + + def get_compression_options(self): + return self.settings.get("websocket_compression_options", None) diff --git a/server/jupyter_server/config_manager.py b/server/jupyter_server/config_manager.py new file mode 100644 index 0000000..933529a --- /dev/null +++ b/server/jupyter_server/config_manager.py @@ -0,0 +1,138 @@ +# coding: utf-8 +"""Manager to read and modify config data in JSON files.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import copy +import errno +import glob +import io +import json +import os + +from six import PY3 +from traitlets.config import LoggingConfigurable +from traitlets.traitlets import Bool +from traitlets.traitlets import Unicode + + +def recursive_update(target, new): + """Recursively update one dictionary using another. + + None values will delete their keys. + """ + for k, v in new.items(): + if isinstance(v, dict): + if k not in target: + target[k] = {} + recursive_update(target[k], v) + if not target[k]: + # Prune empty subdicts + del target[k] + + elif v is None: + target.pop(k, None) + + else: + target[k] = v + + +def remove_defaults(data, defaults): + """Recursively remove items from dict that are already in defaults""" + # copy the iterator, since data will be modified + for key, value in list(data.items()): + if key in defaults: + if isinstance(value, dict): + remove_defaults(data[key], defaults[key]) + if not data[key]: # prune empty subdicts + del data[key] + else: + if value == defaults[key]: + del data[key] + + +class BaseJSONConfigManager(LoggingConfigurable): + """General JSON config manager + + Deals with persisting/storing config in a json file with optionally + default values in a {section_name}.d directory. + """ + + config_dir = Unicode(".") + read_directory = Bool(True) + + def ensure_config_dir_exists(self): + """Will try to create the config_dir directory.""" + try: + os.makedirs(self.config_dir, 0o755) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + def file_name(self, section_name): + """Returns the json filename for the section_name: {config_dir}/{section_name}.json""" + return os.path.join(self.config_dir, section_name + ".json") + + def directory(self, section_name): + """Returns the directory name for the section name: {config_dir}/{section_name}.d""" + return os.path.join(self.config_dir, section_name + ".d") + + def get(self, section_name, include_root=True): + """Retrieve the config data for the specified section. + + Returns the data as a dictionary, or an empty dictionary if the file + doesn't exist. + + When include_root is False, it will not read the root .json file, + effectively returning the default values. + """ + paths = [self.file_name(section_name)] if include_root else [] + if self.read_directory: + pattern = os.path.join(self.directory(section_name), "*.json") + # These json files should be processed first so that the + # {section_name}.json take precedence. + # The idea behind this is that installing a Python package may + # put a json file somewhere in the a .d directory, while the + # .json file is probably a user configuration. + paths = sorted(glob.glob(pattern)) + paths + self.log.debug( + "Paths used for configuration of %s: \n\t%s", + section_name, + "\n\t".join(paths), + ) + data = {} + for path in paths: + if os.path.isfile(path): + with io.open(path, encoding="utf-8") as f: + recursive_update(data, json.load(f)) + return data + + def set(self, section_name, data): + """Store the given config data.""" + filename = self.file_name(section_name) + self.ensure_config_dir_exists() + + if self.read_directory: + # we will modify data in place, so make a copy + data = copy.deepcopy(data) + defaults = self.get(section_name, include_root=False) + remove_defaults(data, defaults) + + # Generate the JSON up front, since it could raise an exception, + # in order to avoid writing half-finished corrupted data to disk. + json_content = json.dumps(data, indent=2) + if PY3: + f = io.open(filename, "w", encoding="utf-8") + else: + f = open(filename, "wb") + with f: + f.write(json_content) + + def update(self, section_name, new_data): + """Modify the config section by recursively updating it with new_data. + + Returns the modified config data as a dictionary. + """ + data = self.get(section_name) + recursive_update(data, new_data) + self.set(section_name, data) + return data diff --git a/server/jupyter_server/conftest.py b/server/jupyter_server/conftest.py new file mode 100644 index 0000000..e1a9fe0 --- /dev/null +++ b/server/jupyter_server/conftest.py @@ -0,0 +1,31 @@ +import pytest + + +pytest_plugins = ["jupyter_server.pytest_plugin"] + + +def pytest_addoption(parser): + parser.addoption( + "--integration_tests", + default=False, + type=bool, + help="only run tests with the 'integration_test' pytest mark.", + ) + + +def pytest_configure(config): + # register an additional marker + config.addinivalue_line("markers", "integration_test") + + +def pytest_runtest_setup(item): + is_integration_test = any(mark for mark in item.iter_markers(name="integration_test")) + + if item.config.getoption("--integration_tests") is True: + if not is_integration_test: + pytest.skip("Only running tests marked as 'integration_test'.") + else: + if is_integration_test: + pytest.skip( + "Skipping this test because it's marked 'integration_test'. Run integration tests using the `--integration_tests` flag." + ) diff --git a/server/jupyter_server/extension/__init__.py b/server/jupyter_server/extension/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/extension/application.py b/server/jupyter_server/extension/application.py new file mode 100644 index 0000000..ea18d68 --- /dev/null +++ b/server/jupyter_server/extension/application.py @@ -0,0 +1,583 @@ +import logging +import re +import sys + +from jinja2 import Environment +from jinja2 import FileSystemLoader +from jupyter_core.application import JupyterApp +from jupyter_core.application import NoStart +from tornado.log import LogFormatter +from tornado.web import RedirectHandler +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import HasTraits +from traitlets import List +from traitlets import Unicode +from traitlets.config import Config + +from .handler import ExtensionHandlerMixin +from jupyter_server.serverapp import ServerApp +from jupyter_server.transutils import _i18n +from jupyter_server.utils import is_namespace_package +from jupyter_server.utils import url_path_join + +# ----------------------------------------------------------------------------- +# Util functions and classes. +# ----------------------------------------------------------------------------- + + +def _preparse_for_subcommand(Application, argv): + """Preparse command line to look for subcommands.""" + # Read in arguments from command line. + if len(argv) == 0: + return + + # Find any subcommands. + if Application.subcommands and len(argv) > 0: + # we have subcommands, and one may have been specified + subc, subargv = argv[0], argv[1:] + if re.match(r"^\w(\-?\w)*$", subc) and subc in Application.subcommands: + # it's a subcommand, and *not* a flag or class parameter + app = Application() + app.initialize_subcommand(subc, subargv) + return app.subapp + + +def _preparse_for_stopping_flags(Application, argv): + """Looks for 'help', 'version', and 'generate-config; commands + in command line. If found, raises the help and version of + current Application. + + This is useful for traitlets applications that have to parse + the command line multiple times, but want to control when + when 'help' and 'version' is raised. + """ + # Arguments after a '--' argument are for the script IPython may be + # about to run, not IPython iteslf. For arguments parsed here (help and + # version), we want to only search the arguments up to the first + # occurrence of '--', which we're calling interpreted_argv. + try: + interpreted_argv = argv[: argv.index("--")] + except ValueError: + interpreted_argv = argv + + # Catch any help calls. + if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")): + app = Application() + app.print_help("--help-all" in interpreted_argv) + app.exit(0) + + # Catch version commands + if "--version" in interpreted_argv or "-V" in interpreted_argv: + app = Application() + app.print_version() + app.exit(0) + + # Catch generate-config commands. + if "--generate-config" in interpreted_argv: + app = Application() + app.write_default_config() + app.exit(0) + + +class ExtensionAppJinjaMixin(HasTraits): + """Use Jinja templates for HTML templates on top of an ExtensionApp.""" + + jinja2_options = Dict( + help=_i18n( + """Options to pass to the jinja2 environment for this + """ + ) + ).tag(config=True) + + def _prepare_templates(self): + # Get templates defined in a subclass. + self.initialize_templates() + # Add templates to web app settings if extension has templates. + if len(self.template_paths) > 0: + self.settings.update({"{}_template_paths".format(self.name): self.template_paths}) + + # Create a jinja environment for logging html templates. + self.jinja2_env = Environment( + loader=FileSystemLoader(self.template_paths), + extensions=["jinja2.ext.i18n"], + autoescape=True, + **self.jinja2_options + ) + + # Add the jinja2 environment for this extension to the tornado settings. + self.settings.update({"{}_jinja2_env".format(self.name): self.jinja2_env}) + + +# ----------------------------------------------------------------------------- +# ExtensionApp +# ----------------------------------------------------------------------------- + + +class JupyterServerExtensionException(Exception): + """Exception class for raising for Server extensions errors.""" + + +# ----------------------------------------------------------------------------- +# ExtensionApp +# ----------------------------------------------------------------------------- + + +class ExtensionApp(JupyterApp): + """Base class for configurable Jupyter Server Extension Applications. + + ExtensionApp subclasses can be initialized two ways: + 1. Extension is listed as a jpserver_extension, and ServerApp calls + its load_jupyter_server_extension classmethod. This is the + classic way of loading a server extension. + 2. Extension is launched directly by calling its `launch_instance` + class method. This method can be set as a entry_point in + the extensions setup.py + """ + + # Subclasses should override this trait. Tells the server if + # this extension allows other other extensions to be loaded + # side-by-side when launched directly. + load_other_extensions = True + + # A useful class property that subclasses can override to + # configure the underlying Jupyter Server when this extension + # is launched directly (using its `launch_instance` method). + serverapp_config = {} + + # Some subclasses will likely override this trait to flip + # the default value to False if they don't offer a browser + # based frontend. + open_browser = Bool( + help="""Whether to open in a browser after starting. + The specific browser used is platform dependent and + determined by the python standard library `webbrowser` + module, unless it is overridden using the --browser + (ServerApp.browser) configuration option. + """ + ).tag(config=True) + + @default("open_browser") + def _default_open_browser(self): + return self.serverapp.config["ServerApp"].get("open_browser", True) + + # The extension name used to name the jupyter config + # file, jupyter_{name}_config. + # This should also match the jupyter subcommand used to launch + # this extension from the CLI, e.g. `jupyter {name}`. + name = None + + @classmethod + def get_extension_package(cls): + parts = cls.__module__.split(".") + if is_namespace_package(parts[0]): + # in this case the package name is `.`. + return ".".join(parts[0:2]) + return parts[0] + + @classmethod + def get_extension_point(cls): + return cls.__module__ + + # Extension URL sets the default landing page for this extension. + extension_url = "/" + + default_url = Unicode().tag(config=True) + + @default("default_url") + def _default_url(self): + return self.extension_url + + file_url_prefix = Unicode("notebooks") + + # Is this linked to a serverapp yet? + _linked = Bool(False) + + # Extension can configure the ServerApp from the command-line + classes = [ + ServerApp, + ] + + # A ServerApp is not defined yet, but will be initialized below. + serverapp = None + + _log_formatter_cls = LogFormatter + + @default("log_level") + def _default_log_level(self): + return logging.INFO + + @default("log_format") + def _default_log_format(self): + """override default log format to include date & time""" + return ( + "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" + ) + + static_url_prefix = Unicode( + help="""Url where the static assets for the extension are served.""" + ).tag(config=True) + + @default("static_url_prefix") + def _default_static_url_prefix(self): + static_url = "static/{name}/".format(name=self.name) + return url_path_join(self.serverapp.base_url, static_url) + + static_paths = List( + Unicode(), + help="""paths to search for serving static files. + + This allows adding javascript/css to be available from the notebook server machine, + or overriding individual files in the IPython + """, + ).tag(config=True) + + template_paths = List( + Unicode(), + help=_i18n( + """Paths to search for serving jinja templates. + + Can be used to override templates from notebook.templates.""" + ), + ).tag(config=True) + + settings = Dict(help=_i18n("""Settings that will passed to the server.""")).tag(config=True) + + handlers = List(help=_i18n("""Handlers appended to the server.""")).tag(config=True) + + def _config_file_name_default(self): + """The default config file name.""" + if not self.name: + return "" + return "jupyter_{}_config".format(self.name.replace("-", "_")) + + def initialize_settings(self): + """Override this method to add handling of settings.""" + pass + + def initialize_handlers(self): + """Override this method to append handlers to a Jupyter Server.""" + pass + + def initialize_templates(self): + """Override this method to add handling of template files.""" + pass + + def _prepare_config(self): + """Builds a Config object from the extension's traits and passes + the object to the webapp's settings as `_config`. + """ + traits = self.class_own_traits().keys() + self.extension_config = Config({t: getattr(self, t) for t in traits}) + self.settings["{}_config".format(self.name)] = self.extension_config + + def _prepare_settings(self): + # Make webapp settings accessible to initialize_settings method + webapp = self.serverapp.web_app + self.settings.update(**webapp.settings) + + # Add static and template paths to settings. + self.settings.update( + { + "{}_static_paths".format(self.name): self.static_paths, + "{}".format(self.name): self, + } + ) + + # Get setting defined by subclass using initialize_settings method. + self.initialize_settings() + + # Update server settings with extension settings. + webapp.settings.update(**self.settings) + + def _prepare_handlers(self): + webapp = self.serverapp.web_app + + # Get handlers defined by extension subclass. + self.initialize_handlers() + + # prepend base_url onto the patterns that we match + new_handlers = [] + for handler_items in self.handlers: + # Build url pattern including base_url + pattern = url_path_join(webapp.settings["base_url"], handler_items[0]) + handler = handler_items[1] + + # Get handler kwargs, if given + kwargs = {} + if issubclass(handler, ExtensionHandlerMixin): + kwargs["name"] = self.name + + try: + kwargs.update(handler_items[2]) + except IndexError: + pass + + new_handler = (pattern, handler, kwargs) + new_handlers.append(new_handler) + + # Add static endpoint for this extension, if static paths are given. + if len(self.static_paths) > 0: + # Append the extension's static directory to server handlers. + static_url = url_path_join(self.static_url_prefix, "(.*)") + + # Construct handler. + handler = ( + static_url, + webapp.settings["static_handler_class"], + {"path": self.static_paths}, + ) + new_handlers.append(handler) + + webapp.add_handlers(".*$", new_handlers) + + def _prepare_templates(self): + # Add templates to web app settings if extension has templates. + if len(self.template_paths) > 0: + self.settings.update({"{}_template_paths".format(self.name): self.template_paths}) + self.initialize_templates() + + def _jupyter_server_config(self): + base_config = { + "ServerApp": { + "default_url": self.default_url, + "open_browser": self.open_browser, + "file_url_prefix": self.file_url_prefix, + } + } + base_config["ServerApp"].update(self.serverapp_config) + return base_config + + def _link_jupyter_server_extension(self, serverapp): + """Link the ExtensionApp to an initialized ServerApp. + + The ServerApp is stored as an attribute and config + is exchanged between ServerApp and `self` in case + the command line contains traits for the ExtensionApp + or the ExtensionApp's config files have server + settings. + + Note, the ServerApp has not initialized the Tornado + Web Application yet, so do not try to affect the + `web_app` attribute. + """ + self.serverapp = serverapp + # Load config from an ExtensionApp's config files. + self.load_config_file() + # ServerApp's config might have picked up + # config for the ExtensionApp. We call + # update_config to update ExtensionApp's + # traits with these values found in ServerApp's + # config. + # ServerApp config ---> ExtensionApp traits + self.update_config(self.serverapp.config) + # Use ExtensionApp's CLI parser to find any extra + # args that passed through ServerApp and + # now belong to ExtensionApp. + self.parse_command_line(self.serverapp.extra_args) + # If any config should be passed upstream to the + # ServerApp, do it here. + # i.e. ServerApp traits <--- ExtensionApp config + self.serverapp.update_config(self.config) + # Acknowledge that this extension has been linked. + self._linked = True + + def initialize(self): + """Initialize the extension app. The + corresponding server app and webapp should already + be initialized by this step. + + 1) Appends Handlers to the ServerApp, + 2) Passes config and settings from ExtensionApp + to the Tornado web application + 3) Points Tornado Webapp to templates and + static assets. + """ + if not self.serverapp: + msg = ( + "This extension has no attribute `serverapp`. " + "Try calling `.link_to_serverapp()` before calling " + "`.initialize()`." + ) + raise JupyterServerExtensionException(msg) + + self._prepare_config() + self._prepare_templates() + self._prepare_settings() + self._prepare_handlers() + + def start(self): + """Start the underlying Jupyter server. + + Server should be started after extension is initialized. + """ + super(ExtensionApp, self).start() + # Start the server. + self.serverapp.start() + + async def stop_extension(self): + """Cleanup any resources managed by this extension.""" + + def stop(self): + """Stop the underlying Jupyter server.""" + self.serverapp.stop() + self.serverapp.clear_instance() + + @classmethod + def _load_jupyter_server_extension(cls, serverapp): + """Initialize and configure this extension, then add the extension's + settings and handlers to the server's web application. + """ + extension_manager = serverapp.extension_manager + try: + # Get loaded extension from serverapp. + point = extension_manager.extension_points[cls.name] + extension = point.app + except KeyError: + extension = cls() + extension._link_jupyter_server_extension(serverapp) + extension.initialize() + return extension + + @classmethod + def load_classic_server_extension(cls, serverapp): + """Enables extension to be loaded as classic Notebook (jupyter/notebook) extension.""" + extension = cls() + extension.serverapp = serverapp + extension.load_config_file() + extension.update_config(serverapp.config) + extension.parse_command_line(serverapp.extra_args) + # Add redirects to get favicons from old locations in the classic notebook server + extension.handlers.extend( + [ + ( + r"/static/favicons/favicon.ico", + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico")}, + ), + ( + r"/static/favicons/favicon-busy-1.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-1.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-busy-2.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-2.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-busy-3.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-3.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-file.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-file.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-notebook.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, + "static/base/images/favicon-notebook.ico", + ) + }, + ), + ( + r"/static/favicons/favicon-terminal.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, + "static/base/images/favicon-terminal.ico", + ) + }, + ), + ( + r"/static/logo/logo.png", + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")}, + ), + ] + ) + extension.initialize() + + @classmethod + def initialize_server(cls, argv=[], load_other_extensions=True, **kwargs): + """Creates an instance of ServerApp and explicitly sets + this extension to enabled=True (i.e. superceding disabling + found in other config from files). + + The `launch_instance` method uses this method to initialize + and start a server. + """ + jpserver_extensions = {cls.get_extension_package(): True} + find_extensions = cls.load_other_extensions + if "jpserver_extensions" in cls.serverapp_config: + jpserver_extensions.update(cls.serverapp_config["jpserver_extensions"]) + cls.serverapp_config["jpserver_extensions"] = jpserver_extensions + find_extensions = False + serverapp = ServerApp.instance(jpserver_extensions=jpserver_extensions, **kwargs) + serverapp.aliases.update(cls.aliases) + serverapp.initialize( + argv=argv, + starter_extension=cls.name, + find_extensions=find_extensions, + ) + return serverapp + + @classmethod + def launch_instance(cls, argv=None, **kwargs): + """Launch the extension like an application. Initializes+configs a stock server + and appends the extension to the server. Then starts the server and routes to + extension's landing page. + """ + # Handle arguments. + if argv is None: + args = sys.argv[1:] # slice out extension config. + else: + args = argv + + # Handle all "stops" that could happen before + # continuing to launch a server+extension. + subapp = _preparse_for_subcommand(cls, args) + if subapp: + subapp.start() + return + + # Check for help, version, and generate-config arguments + # before initializing server to make sure these + # arguments trigger actions from the extension not the server. + _preparse_for_stopping_flags(cls, args) + serverapp = cls.initialize_server(argv=args) + + # Log if extension is blocking other extensions from loading. + if not cls.load_other_extensions: + serverapp.log.info( + "{ext_name} is running without loading " + "other extensions.".format(ext_name=cls.name) + ) + # Start the server. + try: + serverapp.start() + except NoStart: + pass diff --git a/server/jupyter_server/extension/config.py b/server/jupyter_server/extension/config.py new file mode 100644 index 0000000..8fb6cec --- /dev/null +++ b/server/jupyter_server/extension/config.py @@ -0,0 +1,33 @@ +from jupyter_server.services.config.manager import ConfigManager + + +DEFAULT_SECTION_NAME = "jupyter_server_config" + + +class ExtensionConfigManager(ConfigManager): + """A manager class to interface with Jupyter Server Extension config + found in a `config.d` folder. It is assumed that all configuration + files in this directory are JSON files. + """ + + def get_jpserver_extensions(self, section_name=DEFAULT_SECTION_NAME): + """Return the jpserver_extensions field from all + config files found.""" + data = self.get(section_name) + return data.get("ServerApp", {}).get("jpserver_extensions", {}) + + def enabled(self, name, section_name=DEFAULT_SECTION_NAME, include_root=True): + """Is the extension enabled?""" + extensions = self.get_jpserver_extensions(section_name) + try: + return extensions[name] + except KeyError: + return False + + def enable(self, name): + data = {"ServerApp": {"jpserver_extensions": {name: True}}} + self.update(name, data) + + def disable(self, name): + data = {"ServerApp": {"jpserver_extensions": {name: False}}} + self.update(name, data) diff --git a/server/jupyter_server/extension/handler.py b/server/jupyter_server/extension/handler.py new file mode 100644 index 0000000..be257a4 --- /dev/null +++ b/server/jupyter_server/extension/handler.py @@ -0,0 +1,119 @@ +from jinja2.exceptions import TemplateNotFound + +from jupyter_server.base.handlers import FileFindHandler + + +class ExtensionHandlerJinjaMixin: + """Mixin class for ExtensionApp handlers that use jinja templating for + template rendering. + """ + + def get_template(self, name): + """Return the jinja template object for a given name""" + try: + env = "{}_jinja2_env".format(self.name) + return self.settings[env].get_template(name) + except TemplateNotFound: + return super().get_template(name) + + +class ExtensionHandlerMixin: + """Base class for Jupyter server extension handlers. + + Subclasses can serve static files behind a namespaced + endpoint: "/static//" + + This allows multiple extensions to serve static files under + their own namespace and avoid intercepting requests for + other extensions. + """ + + def initialize(self, name): + self.name = name + + @property + def extensionapp(self): + return self.settings[self.name] + + @property + def serverapp(self): + key = "serverapp" + return self.settings[key] + + @property + def log(self): + if not hasattr(self, "name"): + return super().log + # Attempt to pull the ExtensionApp's log, otherwise fall back to ServerApp. + try: + return self.extensionapp.log + except AttributeError: + return self.serverapp.log + + @property + def config(self): + return self.settings["{}_config".format(self.name)] + + @property + def server_config(self): + return self.settings["config"] + + @property + def base_url(self): + return self.settings.get("base_url", "/") + + @property + def static_url_prefix(self): + return self.extensionapp.static_url_prefix + + @property + def static_path(self): + return self.settings["{}_static_paths".format(self.name)] + + def static_url(self, path, include_host=None, **kwargs): + """Returns a static URL for the given relative static file path. + This method requires you set the ``{name}_static_path`` + setting in your extension (which specifies the root directory + of your static files). + This method returns a versioned url (by default appending + ``?v=``), which allows the static files to be + cached indefinitely. This can be disabled by passing + ``include_version=False`` (in the default implementation; + other static file implementations are not required to support + this, but they may support other options). + By default this method returns URLs relative to the current + host, but if ``include_host`` is true the URL returned will be + absolute. If this handler has an ``include_host`` attribute, + that value will be used as the default for all `static_url` + calls that do not pass ``include_host`` as a keyword argument. + """ + key = "{}_static_paths".format(self.name) + try: + self.require_setting(key, "static_url") + except Exception as e: + if key in self.settings: + raise Exception( + "This extension doesn't have any static paths listed. Check that the " + "extension's `static_paths` trait is set." + ) from e + else: + raise e + + get_url = self.settings.get("static_handler_class", FileFindHandler).make_static_url + + if include_host is None: + include_host = getattr(self, "include_host", False) + + if include_host: + base = self.request.protocol + "://" + self.request.host + else: + base = "" + + # Hijack settings dict to send extension templates to extension + # static directory. + settings = { + "static_path": self.static_path, + "static_url_prefix": self.static_url_prefix, + } + + return base + get_url(settings, path, **kwargs) diff --git a/server/jupyter_server/extension/manager.py b/server/jupyter_server/extension/manager.py new file mode 100644 index 0000000..2f92e78 --- /dev/null +++ b/server/jupyter_server/extension/manager.py @@ -0,0 +1,401 @@ +import importlib +import sys +import traceback + +from tornado.gen import multi +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import HasTraits +from traitlets import Instance +from traitlets import observe +from traitlets import Unicode +from traitlets import validate as validate_trait +from traitlets.config import LoggingConfigurable + +from .config import ExtensionConfigManager +from .utils import ExtensionMetadataError +from .utils import ExtensionModuleNotFound +from .utils import get_loader +from .utils import get_metadata + + +class ExtensionPoint(HasTraits): + """A simple API for connecting to a Jupyter Server extension + point defined by metadata and importable from a Python package. + """ + + _linked = Bool(False) + _app = Any(None, allow_none=True) + + metadata = Dict() + + @validate_trait("metadata") + def _valid_metadata(self, proposed): + metadata = proposed["value"] + # Verify that the metadata has a "name" key. + try: + self._module_name = metadata["module"] + except KeyError: + raise ExtensionMetadataError( + "There is no 'module' key in the extension's " "metadata packet." + ) + + try: + self._module = importlib.import_module(self._module_name) + except ImportError: + raise ExtensionModuleNotFound( + "The submodule '{}' could not be found. Are you " + "sure the extension is installed?".format(self._module_name) + ) + # If the metadata includes an ExtensionApp, create an instance. + if "app" in metadata: + self._app = metadata["app"]() + return metadata + + @property + def linked(self): + """Has this extension point been linked to the server. + + Will pull from ExtensionApp's trait, if this point + is an instance of ExtensionApp. + """ + if self.app: + return self.app._linked + return self._linked + + @property + def app(self): + """If the metadata includes an `app` field""" + return self._app + + @property + def config(self): + """Return any configuration provided by this extension point.""" + if self.app: + return self.app._jupyter_server_config() + # At some point, we might want to add logic to load config from + # disk when extensions don't use ExtensionApp. + else: + return {} + + @property + def module_name(self): + """Name of the Python package module where the extension's + _load_jupyter_server_extension can be found. + """ + return self._module_name + + @property + def name(self): + """Name of the extension. + + If it's not provided in the metadata, `name` is set + to the extensions' module name. + """ + if self.app: + return self.app.name + return self.metadata.get("name", self.module_name) + + @property + def module(self): + """The imported module (using importlib.import_module)""" + return self._module + + def _get_linker(self): + if self.app: + linker = self.app._link_jupyter_server_extension + else: + linker = getattr( + self.module, + # Search for a _link_jupyter_extension + "_link_jupyter_server_extension", + # Otherwise return a dummy function. + lambda serverapp: None, + ) + return linker + + def _get_loader(self): + loc = self.app + if not loc: + loc = self.module + loader = get_loader(loc) + return loader + + def validate(self): + """Check that both a linker and loader exists.""" + try: + self._get_linker() + self._get_loader() + except Exception: + return False + else: + return True + + def link(self, serverapp): + """Link the extension to a Jupyter ServerApp object. + + This looks for a `_link_jupyter_server_extension` function + in the extension's module or ExtensionApp class. + """ + if not self.linked: + linker = self._get_linker() + linker(serverapp) + # Store this extension as already linked. + self._linked = True + + def load(self, serverapp): + """Load the extension in a Jupyter ServerApp object. + + This looks for a `_load_jupyter_server_extension` function + in the extension's module or ExtensionApp class. + """ + loader = self._get_loader() + return loader(serverapp) + + +class ExtensionPackage(HasTraits): + """An API for interfacing with a Jupyter Server extension package. + + Usage: + + ext_name = "my_extensions" + extpkg = ExtensionPackage(name=ext_name) + """ + + name = Unicode(help="Name of the an importable Python package.") + enabled = Bool(False).tag(config=True) + + def __init__(self, *args, **kwargs): + # Store extension points that have been linked. + self._linked_points = {} + super().__init__(*args, **kwargs) + + _linked_points = {} + + @validate_trait("name") + def _validate_name(self, proposed): + name = proposed["value"] + self._extension_points = {} + try: + self._module, self._metadata = get_metadata(name) + except ImportError: + raise ExtensionModuleNotFound( + "The module '{name}' could not be found. Are you " + "sure the extension is installed?".format(name=name) + ) + # Create extension point interfaces for each extension path. + for m in self._metadata: + point = ExtensionPoint(metadata=m) + self._extension_points[point.name] = point + return name + + @property + def module(self): + """Extension metadata loaded from the extension package.""" + return self._module + + @property + def version(self): + """Get the version of this package, if it's given. Otherwise, return an empty string""" + return getattr(self._module, "__version__", "") + + @property + def metadata(self): + """Extension metadata loaded from the extension package.""" + return self._metadata + + @property + def extension_points(self): + """A dictionary of extension points.""" + return self._extension_points + + def validate(self): + """Validate all extension points in this package.""" + for extension in self.extension_points.values(): + if not extension.validate(): + return False + return True + + def link_point(self, point_name, serverapp): + linked = self._linked_points.get(point_name, False) + if not linked: + point = self.extension_points[point_name] + point.link(serverapp) + + def load_point(self, point_name, serverapp): + point = self.extension_points[point_name] + return point.load(serverapp) + + def link_all_points(self, serverapp): + for point_name in self.extension_points: + self.link_point(point_name, serverapp) + + def load_all_points(self, serverapp): + return [self.load_point(point_name, serverapp) for point_name in self.extension_points] + + +class ExtensionManager(LoggingConfigurable): + """High level interface for findind, validating, + linking, loading, and managing Jupyter Server extensions. + + Usage: + m = ExtensionManager(config_manager=...) + """ + + config_manager = Instance(ExtensionConfigManager, allow_none=True) + + serverapp = Any() # Use Any to avoid circular import of Instance(ServerApp) + + @default("config_manager") + def _load_default_config_manager(self): + config_manager = ExtensionConfigManager() + self._load_config_manager(config_manager) + return config_manager + + @observe("config_manager") + def _config_manager_changed(self, change): + if change.new: + self._load_config_manager(change.new) + + # The `extensions` attribute provides a dictionary + # with extension (package) names mapped to their ExtensionPackage interface + # (see above). This manager simplifies the interaction between the + # ServerApp and the extensions being appended. + extensions = Dict( + help=""" + Dictionary with extension package names as keys + and ExtensionPackage objects as values. + """ + ) + + @property + def sorted_extensions(self): + """Returns an extensions dictionary, sorted alphabetically.""" + return dict(sorted(self.extensions.items())) + + # The `_linked_extensions` attribute tracks when each extension + # has been successfully linked to a ServerApp. This helps prevent + # extensions from being re-linked recursively unintentionally if another + # extension attempts to link extensions again. + linked_extensions = Dict( + help=""" + Dictionary with extension names as keys + + values are True if the extension is linked, False if not. + """ + ) + + @property + def extension_apps(self): + """Return mapping of extension names and sets of ExtensionApp objects.""" + return { + name: {point.app for point in extension.extension_points.values() if point.app} + for name, extension in self.extensions.items() + } + + @property + def extension_points(self): + """Return mapping of extension point names and ExtensionPoint objects.""" + return { + name: point + for value in self.extensions.values() + for name, point in value.extension_points.items() + } + + def from_config_manager(self, config_manager): + """Add extensions found by an ExtensionConfigManager""" + # load triggered via config_manager trait observer + self.config_manager = config_manager + + def _load_config_manager(self, config_manager): + """Actually load our config manager""" + jpserver_extensions = config_manager.get_jpserver_extensions() + self.from_jpserver_extensions(jpserver_extensions) + + def from_jpserver_extensions(self, jpserver_extensions): + """Add extensions from 'jpserver_extensions'-like dictionary.""" + for name, enabled in jpserver_extensions.items(): + self.add_extension(name, enabled=enabled) + + def add_extension(self, extension_name, enabled=False): + """Try to add extension to manager, return True if successful. + Otherwise, return False. + """ + try: + extpkg = ExtensionPackage(name=extension_name, enabled=enabled) + self.extensions[extension_name] = extpkg + return True + # Raise a warning if the extension cannot be loaded. + except Exception as e: + if self.serverapp.reraise_server_extension_failures: + raise + self.log.warning(e) + return False + + def link_extension(self, name): + linked = self.linked_extensions.get(name, False) + extension = self.extensions[name] + if not linked and extension.enabled: + try: + # Link extension and store links + extension.link_all_points(self.serverapp) + self.linked_extensions[name] = True + self.log.info("{name} | extension was successfully linked.".format(name=name)) + except Exception as e: + if self.serverapp.reraise_server_extension_failures: + raise + self.log.warning(e) + + def load_extension(self, name): + extension = self.extensions.get(name) + + if extension.enabled: + try: + extension.load_all_points(self.serverapp) + except Exception as e: + if self.serverapp.reraise_server_extension_failures: + raise + self.log.debug("".join(traceback.format_exception(*sys.exc_info()))) + self.log.warning( + "{name} | extension failed loading with message: {error}".format( + name=name, error=str(e) + ) + ) + else: + self.log.info("{name} | extension was successfully loaded.".format(name=name)) + + async def stop_extension(self, name, apps): + """Call the shutdown hooks in the specified apps.""" + for app in apps: + self.log.debug('{} | extension app "{}" stopping'.format(name, app.name)) + await app.stop_extension() + self.log.debug('{} | extension app "{}" stopped'.format(name, app.name)) + + def link_all_extensions(self): + """Link all enabled extensions + to an instance of ServerApp + """ + # Sort the extension names to enforce deterministic linking + # order. + for name in self.sorted_extensions.keys(): + self.link_extension(name) + + def load_all_extensions(self): + """Load all enabled extensions and append them to + the parent ServerApp. + """ + # Sort the extension names to enforce deterministic loading + # order. + for name in self.sorted_extensions.keys(): + self.load_extension(name) + + async def stop_all_extensions(self): + """Call the shutdown hooks in all extensions.""" + await multi( + [ + self.stop_extension(name, apps) + for name, apps in sorted(dict(self.extension_apps).items()) + ] + ) diff --git a/server/jupyter_server/extension/serverextension.py b/server/jupyter_server/extension/serverextension.py new file mode 100644 index 0000000..31a8c56 --- /dev/null +++ b/server/jupyter_server/extension/serverextension.py @@ -0,0 +1,384 @@ +# coding: utf-8 +"""Utilities for installing extensions""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import os +import sys + +from jupyter_core.application import JupyterApp +from jupyter_core.paths import ENV_CONFIG_PATH +from jupyter_core.paths import jupyter_config_dir +from jupyter_core.paths import SYSTEM_CONFIG_PATH +from tornado.log import LogFormatter +from traitlets import Bool + +from jupyter_server._version import __version__ +from jupyter_server.extension.config import ExtensionConfigManager +from jupyter_server.extension.manager import ExtensionManager +from jupyter_server.extension.manager import ExtensionPackage + + +def _get_config_dir(user=False, sys_prefix=False): + """Get the location of config files for the current context + + Returns the string to the environment + + Parameters + ---------- + user : bool [default: False] + Get the user's .jupyter config directory + sys_prefix : bool [default: False] + Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter + """ + if user and sys_prefix: + sys_prefix = False + if user: + extdir = jupyter_config_dir() + elif sys_prefix: + extdir = ENV_CONFIG_PATH[0] + else: + extdir = SYSTEM_CONFIG_PATH[0] + return extdir + + +def _get_extmanager_for_context(write_dir="jupyter_server_config.d", user=False, sys_prefix=False): + """Get an extension manager pointing at the current context + + Returns the path to the current context and an ExtensionManager object. + + Parameters + ---------- + write_dir : str [default: 'jupyter_server_config.d'] + Name of config directory to write extension config. + user : bool [default: False] + Get the user's .jupyter config directory + sys_prefix : bool [default: False] + Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter + """ + config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix) + config_manager = ExtensionConfigManager( + read_config_path=[config_dir], + write_config_dir=os.path.join(config_dir, write_dir), + ) + extension_manager = ExtensionManager( + config_manager=config_manager, + ) + return config_dir, extension_manager + + +class ArgumentConflict(ValueError): + pass + + +_base_flags = {} +_base_flags.update(JupyterApp.flags) +_base_flags.pop("y", None) +_base_flags.pop("generate-config", None) +_base_flags.update( + { + "user": ( + { + "BaseExtensionApp": { + "user": True, + } + }, + "Apply the operation only for the given user", + ), + "system": ( + { + "BaseExtensionApp": { + "user": False, + "sys_prefix": False, + } + }, + "Apply the operation system-wide", + ), + "sys-prefix": ( + { + "BaseExtensionApp": { + "sys_prefix": True, + } + }, + "Use sys.prefix as the prefix for installing extensions (for environments, packaging)", + ), + "py": ( + { + "BaseExtensionApp": { + "python": True, + } + }, + "Install from a Python package", + ), + } +) +_base_flags["python"] = _base_flags["py"] + +_base_aliases = {} +_base_aliases.update(JupyterApp.aliases) + + +class BaseExtensionApp(JupyterApp): + """Base extension installer app""" + + _log_formatter_cls = LogFormatter + flags = _base_flags + aliases = _base_aliases + version = __version__ + + user = Bool(False, config=True, help="Whether to do a user install") + sys_prefix = Bool(True, config=True, help="Use the sys.prefix as the prefix") + python = Bool(False, config=True, help="Install from a Python package") + + def _log_format_default(self): + """A default format for messages""" + return "%(message)s" + + @property + def config_dir(self): + return _get_config_dir(user=self.user, sys_prefix=self.sys_prefix) + + +# Constants for pretty print extension listing function. +# Window doesn't support coloring in the commandline +GREEN_ENABLED = "\033[32menabled\033[0m" if os.name != "nt" else "enabled" +RED_DISABLED = "\033[31mdisabled\033[0m" if os.name != "nt" else "disabled" +GREEN_OK = "\033[32mOK\033[0m" if os.name != "nt" else "ok" +RED_X = "\033[31m X\033[0m" if os.name != "nt" else " X" + +# ------------------------------------------------------------------------------ +# Public API +# ------------------------------------------------------------------------------ + + +def toggle_server_extension_python( + import_name, enabled=None, parent=None, user=False, sys_prefix=True +): + """Toggle the boolean setting for a given server extension + in a Jupyter config file. + """ + sys_prefix = False if user else sys_prefix + config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix) + manager = ExtensionConfigManager( + read_config_path=[config_dir], + write_config_dir=os.path.join(config_dir, "jupyter_server_config.d"), + ) + if enabled: + manager.enable(import_name) + else: + manager.disable(import_name) + + +# ---------------------------------------------------------------------- +# Applications +# ---------------------------------------------------------------------- + +flags = {} +flags.update(BaseExtensionApp.flags) +flags.pop("y", None) +flags.pop("generate-config", None) +flags.update( + { + "user": ( + { + "ToggleServerExtensionApp": { + "user": True, + } + }, + "Perform the operation for the current user", + ), + "system": ( + { + "ToggleServerExtensionApp": { + "user": False, + "sys_prefix": False, + } + }, + "Perform the operation system-wide", + ), + "sys-prefix": ( + { + "ToggleServerExtensionApp": { + "sys_prefix": True, + } + }, + "Use sys.prefix as the prefix for installing server extensions", + ), + "py": ( + { + "ToggleServerExtensionApp": { + "python": True, + } + }, + "Install from a Python package", + ), + } +) +flags["python"] = flags["py"] + + +class ToggleServerExtensionApp(BaseExtensionApp): + """A base class for enabling/disabling extensions""" + + name = "Kennen server extension enable/disable" + description = "Enable/disable a server extension using frontend configuration files." + + flags = flags + + _toggle_value = Bool() + _toggle_pre_message = "" + _toggle_post_message = "" + + def toggle_server_extension(self, import_name): + """Change the status of a named server extension. + + Uses the value of `self._toggle_value`. + + Parameters + --------- + + import_name : str + Importable Python module (dotted-notation) exposing the magic-named + `load_jupyter_server_extension` function + """ + # Create an extension manager for this instance. + config_dir, extension_manager = _get_extmanager_for_context( + user=self.user, sys_prefix=self.sys_prefix + ) + try: + self.log.info("{}: {}".format(self._toggle_pre_message.capitalize(), import_name)) + self.log.info("- Writing config: {}".format(config_dir)) + # Validate the server extension. + self.log.info(" - Validating {}...".format(import_name)) + # Interface with the Extension Package and validate. + extpkg = ExtensionPackage(name=import_name) + extpkg.validate() + version = extpkg.version + self.log.info(" {} {} {}".format(import_name, version, GREEN_OK)) + + # Toggle extension config. + config = extension_manager.config_manager + if self._toggle_value is True: + config.enable(import_name) + else: + config.disable(import_name) + + # If successful, let's log. + self.log.info(" - Extension successfully {}.".format(self._toggle_post_message)) + except Exception as err: + self.log.info(" {} Validation failed: {}".format(RED_X, err)) + + def start(self): + """Perform the App's actions as configured""" + if not self.extra_args: + sys.exit("Please specify a server extension/package to enable or disable") + for arg in self.extra_args: + self.toggle_server_extension(arg) + + +class EnableServerExtensionApp(ToggleServerExtensionApp): + """An App that enables (and validates) Server Extensions""" + + name = "jupyter server extension enable" + description = """ + Enable a server extension in configuration. + + Usage + jupyter server extension enable [--system|--sys-prefix] + """ + _toggle_value = True + _toggle_pre_message = "enabling" + _toggle_post_message = "enabled" + + +class DisableServerExtensionApp(ToggleServerExtensionApp): + """An App that disables Server Extensions""" + + name = "jupyter server extension disable" + description = """ + Disable a server extension in configuration. + + Usage + jupyter server extension disable [--system|--sys-prefix] + """ + _toggle_value = False + _toggle_pre_message = "disabling" + _toggle_post_message = "disabled" + + +class ListServerExtensionsApp(BaseExtensionApp): + """An App that lists (and validates) Server Extensions""" + + name = "jupyter server extension list" + version = __version__ + description = "List all server extensions known by the configuration system" + + def list_server_extensions(self): + """List all enabled and disabled server extensions, by config path + + Enabled extensions are validated, potentially generating warnings. + """ + configurations = ( + {"user": True, "sys_prefix": False}, + {"user": False, "sys_prefix": True}, + {"user": False, "sys_prefix": False}, + ) + + for option in configurations: + config_dir, ext_manager = _get_extmanager_for_context(**option) + self.log.info("Config dir: {}".format(config_dir)) + for name, extension in ext_manager.extensions.items(): + enabled = extension.enabled + # Attempt to get extension metadata + self.log.info(" {} {}".format(name, GREEN_ENABLED if enabled else RED_DISABLED)) + try: + self.log.info(" - Validating {}...".format(name)) + if not extension.validate(): + raise ValueError("validation failed") + version = extension.version + self.log.info(" {} {} {}".format(name, version, GREEN_OK)) + except Exception as err: + self.log.warn(" {} {}".format(RED_X, err)) + # Add a blank line between paths. + self.log.info("") + + def start(self): + """Perform the App's actions as configured""" + self.list_server_extensions() + + +_examples = """ +jupyter server extension list # list all configured server extensions +jupyter server extension enable --py # enable all server extensions in a Python package +jupyter server extension disable --py # disable all server extensions in a Python package +""" + + +class ServerExtensionApp(BaseExtensionApp): + """Root level server extension app""" + + name = "jupyter server extension" + version = __version__ + description = "Work with Jupyter server extensions" + examples = _examples + + subcommands = dict( + enable=(EnableServerExtensionApp, "Enable a server extension"), + disable=(DisableServerExtensionApp, "Disable a server extension"), + list=(ListServerExtensionsApp, "List server extensions"), + ) + + def start(self): + """Perform the App's actions as configured""" + super(ServerExtensionApp, self).start() + + # The above should have called a subcommand and raised NoStart; if we + # get here, it didn't, so we should self.log.info a message. + subcmds = ", ".join(sorted(self.subcommands)) + sys.exit("Please supply at least one subcommand: %s" % subcmds) + + +main = ServerExtensionApp.launch_instance + + +if __name__ == "__main__": + main() diff --git a/server/jupyter_server/extension/utils.py b/server/jupyter_server/extension/utils.py new file mode 100644 index 0000000..afca322 --- /dev/null +++ b/server/jupyter_server/extension/utils.py @@ -0,0 +1,103 @@ +import importlib +import warnings + + +class ExtensionLoadingError(Exception): + pass + + +class ExtensionMetadataError(Exception): + pass + + +class ExtensionModuleNotFound(Exception): + pass + + +class NotAnExtensionApp(Exception): + pass + + +def get_loader(obj, logger=None): + """Looks for _load_jupyter_server_extension as an attribute + of the object or module. + + Adds backwards compatibility for old function name missing the + underscore prefix. + """ + try: + func = getattr(obj, "_load_jupyter_server_extension") + except AttributeError: + func = getattr(obj, "load_jupyter_server_extension") + warnings.warn( + "A `_load_jupyter_server_extension` function was not " + "found in {name!s}. Instead, a `load_jupyter_server_extension` " + "function was found and will be used for now. This function " + "name will be deprecated in future releases " + "of Jupyter Server.".format(name=obj), + DeprecationWarning, + ) + except Exception: + raise ExtensionLoadingError("_load_jupyter_server_extension function was not found.") + return func + + +def get_metadata(package_name, logger=None): + """Find the extension metadata from an extension package. + + This looks for a `_jupyter_server_extension_points` function + that returns metadata about all extension points within a Jupyter + Server Extension pacakge. + + If it doesn't exist, return a basic metadata packet given + the module name. + """ + module = importlib.import_module(package_name) + + try: + return module, module._jupyter_server_extension_points() + except AttributeError: + pass + + # For backwards compatibility, we temporarily allow + # _jupyter_server_extension_paths. We will remove in + # a later release of Jupyter Server. + try: + extension_points = module._jupyter_server_extension_paths() + if logger: + logger.warning( + "A `_jupyter_server_extension_points` function was not " + "found in {name}. Instead, a `_jupyter_server_extension_paths` " + "function was found and will be used for now. This function " + "name will be deprecated in future releases " + "of Jupyter Server.".format(name=package_name) + ) + return module, extension_points + except AttributeError: + pass + + # Dynamically create metadata if the package doesn't + # provide it. + if logger: + logger.debug( + "A `_jupyter_server_extension_points` function was " + "not found in {name}, so Jupyter Server will look " + "for extension points in the extension pacakge's " + "root.".format(name=package_name) + ) + return module, [{"module": package_name, "name": package_name}] + + +def validate_extension(name): + """Raises an exception is the extension is missing a needed + hook or metadata field. + An extension is valid if: + 1) name is an importable Python package. + 1) the package has a _jupyter_server_extension_paths function + 2) each extension path has a _load_jupyter_server_extension function + + If this works, nothing should happen. + """ + from .manager import ExtensionPackage + + return ExtensionPackage(name=name) diff --git a/server/jupyter_server/files/__init__.py b/server/jupyter_server/files/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/files/handlers.py b/server/jupyter_server/files/handlers.py new file mode 100644 index 0000000..2b53145 --- /dev/null +++ b/server/jupyter_server/files/handlers.py @@ -0,0 +1,94 @@ +"""Serve files directly from the ContentsManager.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json +import mimetypes +from base64 import decodebytes + +from tornado import web + +from jupyter_server.auth import authorized +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.utils import ensure_async + +AUTH_RESOURCE = "contents" + + +class FilesHandler(JupyterHandler): + """serve files via ContentsManager + + Normally used when ContentsManager is not a FileContentsManager. + + FileContentsManager subclasses use AuthenticatedFilesHandler by default, + a subclass of StaticFileHandler. + """ + + auth_resource = AUTH_RESOURCE + + @property + def content_security_policy(self): + # In case we're serving HTML/SVG, confine any Javascript to a unique + # origin so it can't interact with the notebook server. + return super(FilesHandler, self).content_security_policy + "; sandbox allow-scripts" + + @web.authenticated + @authorized + def head(self, path): + self.get(path, include_body=False) + self.check_xsrf_cookie() + return self.get(path, include_body=False) + + @web.authenticated + @authorized + async def get(self, path, include_body=True): + # /files/ requests must originate from the same site + self.check_xsrf_cookie() + cm = self.contents_manager + + if await ensure_async(cm.is_hidden(path)) and not cm.allow_hidden: + self.log.info("Refusing to serve hidden file, via 404 Error") + raise web.HTTPError(404) + + path = path.strip("/") + if "/" in path: + _, name = path.rsplit("/", 1) + else: + name = path + + model = await ensure_async(cm.get(path, type="file", content=include_body)) + + if self.get_argument("download", False): + self.set_attachment_header(name) + + # get mimetype from filename + if name.lower().endswith(".ipynb"): + self.set_header("Content-Type", "application/x-ipynb+json") + else: + cur_mime, encoding = mimetypes.guess_type(name) + if cur_mime == "text/plain": + self.set_header("Content-Type", "text/plain; charset=UTF-8") + # RFC 6713 + if encoding == "gzip": + self.set_header("Content-Type", "application/gzip") + elif encoding is not None: + self.set_header("Content-Type", "application/octet-stream") + elif cur_mime is not None: + self.set_header("Content-Type", cur_mime) + else: + if model["format"] == "base64": + self.set_header("Content-Type", "application/octet-stream") + else: + self.set_header("Content-Type", "text/plain; charset=UTF-8") + + if include_body: + if model["format"] == "base64": + b64_bytes = model["content"].encode("ascii") + self.write(decodebytes(b64_bytes)) + elif model["format"] == "json": + self.write(json.dumps(model["content"])) + else: + self.write(model["content"]) + self.flush() + + +default_handlers = [] diff --git a/server/jupyter_server/gateway/__init__.py b/server/jupyter_server/gateway/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/gateway/gateway_client.py b/server/jupyter_server/gateway/gateway_client.py new file mode 100644 index 0000000..4efbd2e --- /dev/null +++ b/server/jupyter_server/gateway/gateway_client.py @@ -0,0 +1,438 @@ +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json +import os +from socket import gaierror + +from tornado import web +from tornado.httpclient import AsyncHTTPClient +from tornado.httpclient import HTTPError +from traitlets import Bool +from traitlets import default +from traitlets import Float +from traitlets import Int +from traitlets import TraitError +from traitlets import Unicode +from traitlets import validate +from traitlets.config import SingletonConfigurable + + +class GatewayClient(SingletonConfigurable): + """This class manages the configuration. It's its own singleton class so that we + can share these values across all objects. It also contains some helper methods + to build request arguments out of the various config options. + + """ + + url = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The url of the Kernel or Enterprise Gateway server where + kernel specifications are defined and kernel management takes place. + If defined, this Notebook server acts as a proxy for all kernel + management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) + """, + ) + + url_env = "JUPYTER_GATEWAY_URL" + + @default("url") + def _url_default(self): + return os.environ.get(self.url_env) + + @validate("url") + def _url_validate(self, proposal): + value = proposal["value"] + # Ensure value, if present, starts with 'http' + if value is not None and len(value) > 0: + if not str(value).lower().startswith("http"): + raise TraitError("GatewayClient url must start with 'http': '%r'" % value) + return value + + ws_url = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value + will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) + """, + ) + + ws_url_env = "JUPYTER_GATEWAY_WS_URL" + + @default("ws_url") + def _ws_url_default(self): + default_value = os.environ.get(self.ws_url_env) + if default_value is None: + if self.gateway_enabled: + default_value = self.url.lower().replace("http", "ws") + return default_value + + @validate("ws_url") + def _ws_url_validate(self, proposal): + value = proposal["value"] + # Ensure value, if present, starts with 'ws' + if value is not None and len(value) > 0: + if not str(value).lower().startswith("ws"): + raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) + return value + + kernels_endpoint_default_value = "/api/kernels" + kernels_endpoint_env = "JUPYTER_GATEWAY_KERNELS_ENDPOINT" + kernels_endpoint = Unicode( + default_value=kernels_endpoint_default_value, + config=True, + help="""The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)""", + ) + + @default("kernels_endpoint") + def _kernels_endpoint_default(self): + return os.environ.get(self.kernels_endpoint_env, self.kernels_endpoint_default_value) + + kernelspecs_endpoint_default_value = "/api/kernelspecs" + kernelspecs_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT" + kernelspecs_endpoint = Unicode( + default_value=kernelspecs_endpoint_default_value, + config=True, + help="""The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)""", + ) + + @default("kernelspecs_endpoint") + def _kernelspecs_endpoint_default(self): + return os.environ.get( + self.kernelspecs_endpoint_env, self.kernelspecs_endpoint_default_value + ) + + kernelspecs_resource_endpoint_default_value = "/kernelspecs" + kernelspecs_resource_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT" + kernelspecs_resource_endpoint = Unicode( + default_value=kernelspecs_resource_endpoint_default_value, + config=True, + help="""The gateway endpoint for accessing kernelspecs resources + (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""", + ) + + @default("kernelspecs_resource_endpoint") + def _kernelspecs_resource_endpoint_default(self): + return os.environ.get( + self.kernelspecs_resource_endpoint_env, + self.kernelspecs_resource_endpoint_default_value, + ) + + connect_timeout_default_value = 40.0 + connect_timeout_env = "JUPYTER_GATEWAY_CONNECT_TIMEOUT" + connect_timeout = Float( + default_value=connect_timeout_default_value, + config=True, + help="""The time allowed for HTTP connection establishment with the Gateway server. + (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""", + ) + + @default("connect_timeout") + def connect_timeout_default(self): + return float( + os.environ.get("JUPYTER_GATEWAY_CONNECT_TIMEOUT", self.connect_timeout_default_value) + ) + + request_timeout_default_value = 40.0 + request_timeout_env = "JUPYTER_GATEWAY_REQUEST_TIMEOUT" + request_timeout = Float( + default_value=request_timeout_default_value, + config=True, + help="""The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)""", + ) + + @default("request_timeout") + def request_timeout_default(self): + return float( + os.environ.get("JUPYTER_GATEWAY_REQUEST_TIMEOUT", self.request_timeout_default_value) + ) + + client_key = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) + """, + ) + client_key_env = "JUPYTER_GATEWAY_CLIENT_KEY" + + @default("client_key") + def _client_key_default(self): + return os.environ.get(self.client_key_env) + + client_cert = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) + """, + ) + client_cert_env = "JUPYTER_GATEWAY_CLIENT_CERT" + + @default("client_cert") + def _client_cert_default(self): + return os.environ.get(self.client_cert_env) + + ca_certs = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) + """, + ) + ca_certs_env = "JUPYTER_GATEWAY_CA_CERTS" + + @default("ca_certs") + def _ca_certs_default(self): + return os.environ.get(self.ca_certs_env) + + http_user = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) + """, + ) + http_user_env = "JUPYTER_GATEWAY_HTTP_USER" + + @default("http_user") + def _http_user_default(self): + return os.environ.get(self.http_user_env) + + http_pwd = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) + """, + ) + http_pwd_env = "JUPYTER_GATEWAY_HTTP_PWD" + + @default("http_pwd") + def _http_pwd_default(self): + return os.environ.get(self.http_pwd_env) + + headers_default_value = "{}" + headers_env = "JUPYTER_GATEWAY_HEADERS" + headers = Unicode( + default_value=headers_default_value, + allow_none=True, + config=True, + help="""Additional HTTP headers to pass on the request. This value will be converted to a dict. + (JUPYTER_GATEWAY_HEADERS env var) + """, + ) + + @default("headers") + def _headers_default(self): + return os.environ.get(self.headers_env, self.headers_default_value) + + auth_token = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The authorization token used in the HTTP headers. The header will be formatted as:: + + { + 'Authorization': '{auth_scheme} {auth_token}' + } + + (JUPYTER_GATEWAY_AUTH_TOKEN env var)""", + ) + auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN" + + @default("auth_token") + def _auth_token_default(self): + return os.environ.get(self.auth_token_env, "") + + auth_scheme = Unicode( + default_value=None, + allow_none=True, + config=True, + help="""The auth scheme, added as a prefix to the authorization token used in the HTTP headers. + (JUPYTER_GATEWAY_AUTH_SCHEME env var)""", + ) + auth_scheme_env = "JUPYTER_GATEWAY_AUTH_SCHEME" + + @default("auth_scheme") + def _auth_scheme_default(self): + return os.environ.get(self.auth_scheme_env, "token") + + validate_cert_default_value = True + validate_cert_env = "JUPYTER_GATEWAY_VALIDATE_CERT" + validate_cert = Bool( + default_value=validate_cert_default_value, + config=True, + help="""For HTTPS requests, determines if server's certificate should be validated or not. + (JUPYTER_GATEWAY_VALIDATE_CERT env var)""", + ) + + @default("validate_cert") + def validate_cert_default(self): + return bool( + os.environ.get(self.validate_cert_env, str(self.validate_cert_default_value)) + not in ["no", "false"] + ) + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._static_args = {} # initialized on first use + + env_whitelist_default_value = "" + env_whitelist_env = "JUPYTER_GATEWAY_ENV_WHITELIST" + env_whitelist = Unicode( + default_value=env_whitelist_default_value, + config=True, + help="""A comma-separated list of environment variable names that will be included, along with + their values, in the kernel startup request. The corresponding `env_whitelist` configuration + value must also be set on the Gateway server - since that configuration value indicates which + environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""", + ) + + @default("env_whitelist") + def _env_whitelist_default(self): + return os.environ.get(self.env_whitelist_env, self.env_whitelist_default_value) + + gateway_retry_interval_default_value = 1.0 + gateway_retry_interval_env = "JUPYTER_GATEWAY_RETRY_INTERVAL" + gateway_retry_interval = Float( + default_value=gateway_retry_interval_default_value, + config=True, + help="""The time allowed for HTTP reconnection with the Gateway server for the first time. + Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries + but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. + (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""", + ) + + @default("gateway_retry_interval") + def gateway_retry_interval_default(self): + return float( + os.environ.get( + "JUPYTER_GATEWAY_RETRY_INTERVAL", + self.gateway_retry_interval_default_value, + ) + ) + + gateway_retry_interval_max_default_value = 30.0 + gateway_retry_interval_max_env = "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX" + gateway_retry_interval_max = Float( + default_value=gateway_retry_interval_max_default_value, + config=True, + help="""The maximum time allowed for HTTP reconnection retry with the Gateway server. + (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""", + ) + + @default("gateway_retry_interval_max") + def gateway_retry_interval_max_default(self): + return float( + os.environ.get( + "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX", + self.gateway_retry_interval_max_default_value, + ) + ) + + gateway_retry_max_default_value = 5 + gateway_retry_max_env = "JUPYTER_GATEWAY_RETRY_MAX" + gateway_retry_max = Int( + default_value=gateway_retry_max_default_value, + config=True, + help="""The maximum retries allowed for HTTP reconnection with the Gateway server. + (JUPYTER_GATEWAY_RETRY_MAX env var)""", + ) + + @default("gateway_retry_max") + def gateway_retry_max_default(self): + return int( + os.environ.get("JUPYTER_GATEWAY_RETRY_MAX", self.gateway_retry_max_default_value) + ) + + @property + def gateway_enabled(self): + return bool(self.url is not None and len(self.url) > 0) + + # Ensure KERNEL_LAUNCH_TIMEOUT has a default value. + KERNEL_LAUNCH_TIMEOUT = int(os.environ.get("KERNEL_LAUNCH_TIMEOUT", 40)) + + def init_static_args(self): + """Initialize arguments used on every request. Since these are static values, we'll + perform this operation once. + + """ + # Ensure that request timeout and KERNEL_LAUNCH_TIMEOUT are the same, taking the + # greater value of the two. + if self.request_timeout < float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): + self.request_timeout = float(GatewayClient.KERNEL_LAUNCH_TIMEOUT) + elif self.request_timeout > float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): + GatewayClient.KERNEL_LAUNCH_TIMEOUT = int(self.request_timeout) + # Ensure any adjustments are reflected in env. + os.environ["KERNEL_LAUNCH_TIMEOUT"] = str(GatewayClient.KERNEL_LAUNCH_TIMEOUT) + + self._static_args["headers"] = json.loads(self.headers) + if "Authorization" not in self._static_args["headers"].keys(): + self._static_args["headers"].update( + {"Authorization": "{} {}".format(self.auth_scheme, self.auth_token)} + ) + self._static_args["connect_timeout"] = self.connect_timeout + self._static_args["request_timeout"] = self.request_timeout + self._static_args["validate_cert"] = self.validate_cert + if self.client_cert: + self._static_args["client_cert"] = self.client_cert + self._static_args["client_key"] = self.client_key + if self.ca_certs: + self._static_args["ca_certs"] = self.ca_certs + if self.http_user: + self._static_args["auth_username"] = self.http_user + if self.http_pwd: + self._static_args["auth_password"] = self.http_pwd + + def load_connection_args(self, **kwargs): + """Merges the static args relative to the connection, with the given keyword arguments. If statics + have yet to be initialized, we'll do that here. + + """ + if len(self._static_args) == 0: + self.init_static_args() + + kwargs.update(self._static_args) + return kwargs + + +async def gateway_request(endpoint, **kwargs): + """Make an async request to kernel gateway endpoint, returns a response""" + client = AsyncHTTPClient() + kwargs = GatewayClient.instance().load_connection_args(**kwargs) + try: + response = await client.fetch(endpoint, **kwargs) + # Trap a set of common exceptions so that we can inform the user that their Gateway url is incorrect + # or the server is not running. + # NOTE: We do this here since this handler is called during the Notebook's startup and subsequent refreshes + # of the tree view. + except ConnectionRefusedError as e: + raise web.HTTPError( + 503, + "Connection refused from Gateway server url '{}'. " + "Check to be sure the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) from e + except HTTPError as e: + # This can occur if the host is valid (e.g., foo.com) but there's nothing there. + raise web.HTTPError( + e.code, + "Error attempting to connect to Gateway server url '{}'. " + "Ensure gateway url is valid and the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) from e + except gaierror as e: + raise web.HTTPError( + 404, + "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " + "Ensure gateway url is valid and the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) from e + + return response diff --git a/server/jupyter_server/gateway/handlers.py b/server/jupyter_server/gateway/handlers.py new file mode 100644 index 0000000..3e86cbe --- /dev/null +++ b/server/jupyter_server/gateway/handlers.py @@ -0,0 +1,292 @@ +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import asyncio +import logging +import mimetypes +import os +import random + +from ipython_genutils.py3compat import cast_unicode +from jupyter_client.session import Session +from tornado import web +from tornado.concurrent import Future +from tornado.escape import json_decode +from tornado.escape import url_escape +from tornado.escape import utf8 +from tornado.httpclient import HTTPRequest +from tornado.ioloop import IOLoop +from tornado.ioloop import PeriodicCallback +from tornado.websocket import websocket_connect +from tornado.websocket import WebSocketHandler +from traitlets.config.configurable import LoggingConfigurable + +from ..base.handlers import APIHandler +from ..base.handlers import JupyterHandler +from ..utils import url_path_join +from .managers import GatewayClient + +# Keepalive ping interval (default: 30 seconds) +GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv("GATEWAY_WS_PING_INTERVAL_SECS", 30)) + + +class WebSocketChannelsHandler(WebSocketHandler, JupyterHandler): + + session = None + gateway = None + kernel_id = None + ping_callback = None + + def check_origin(self, origin=None): + return JupyterHandler.check_origin(self, origin) + + def set_default_headers(self): + """Undo the set_default_headers in JupyterHandler which doesn't make sense for websockets""" + pass + + def get_compression_options(self): + # use deflate compress websocket + return {} + + def authenticate(self): + """Run before finishing the GET request + + Extend this method to add logic that should fire before + the websocket finishes completing. + """ + # authenticate the request before opening the websocket + if self.get_current_user() is None: + self.log.warning("Couldn't authenticate WebSocket connection") + raise web.HTTPError(403) + + if self.get_argument("session_id", False): + self.session.session = cast_unicode(self.get_argument("session_id")) + else: + self.log.warning("No session ID specified") + + def initialize(self): + self.log.debug("Initializing websocket connection %s", self.request.path) + self.session = Session(config=self.config) + self.gateway = GatewayWebSocketClient(gateway_url=GatewayClient.instance().url) + + async def get(self, kernel_id, *args, **kwargs): + self.authenticate() + self.kernel_id = cast_unicode(kernel_id, "ascii") + await super(WebSocketChannelsHandler, self).get(kernel_id=kernel_id, *args, **kwargs) + + def send_ping(self): + if self.ws_connection is None and self.ping_callback is not None: + self.ping_callback.stop() + return + + self.ping(b"") + + def open(self, kernel_id, *args, **kwargs): + """Handle web socket connection open to notebook server and delegate to gateway web socket handler""" + self.ping_callback = PeriodicCallback(self.send_ping, GATEWAY_WS_PING_INTERVAL_SECS * 1000) + self.ping_callback.start() + + self.gateway.on_open( + kernel_id=kernel_id, + message_callback=self.write_message, + compression_options=self.get_compression_options(), + ) + + def on_message(self, message): + """Forward message to gateway web socket handler.""" + self.gateway.on_message(message) + + def write_message(self, message, binary=False): + """Send message back to notebook client. This is called via callback from self.gateway._read_messages.""" + if self.ws_connection: # prevent WebSocketClosedError + if isinstance(message, bytes): + binary = True + super(WebSocketChannelsHandler, self).write_message(message, binary=binary) + elif self.log.isEnabledFor(logging.DEBUG): + msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) + self.log.debug( + "Notebook client closed websocket connection - message dropped: {}".format( + msg_summary + ) + ) + + def on_close(self): + self.log.debug("Closing websocket connection %s", self.request.path) + self.gateway.on_close() + super(WebSocketChannelsHandler, self).on_close() + + @staticmethod + def _get_message_summary(message): + summary = [] + message_type = message["msg_type"] + summary.append("type: {}".format(message_type)) + + if message_type == "status": + summary.append(", state: {}".format(message["content"]["execution_state"])) + elif message_type == "error": + summary.append( + ", {}:{}:{}".format( + message["content"]["ename"], + message["content"]["evalue"], + message["content"]["traceback"], + ) + ) + else: + summary.append(", ...") # don't display potentially sensitive data + + return "".join(summary) + + +class GatewayWebSocketClient(LoggingConfigurable): + """Proxy web socket connection to a kernel/enterprise gateway.""" + + def __init__(self, **kwargs): + super(GatewayWebSocketClient, self).__init__(**kwargs) + self.kernel_id = None + self.ws = None + self.ws_future = Future() + self.disconnected = False + self.retry = 0 + + async def _connect(self, kernel_id, message_callback): + # websocket is initialized before connection + self.ws = None + self.kernel_id = kernel_id + ws_url = url_path_join( + GatewayClient.instance().ws_url, + GatewayClient.instance().kernels_endpoint, + url_escape(kernel_id), + "channels", + ) + self.log.info("Connecting to {}".format(ws_url)) + kwargs = {} + kwargs = GatewayClient.instance().load_connection_args(**kwargs) + + request = HTTPRequest(ws_url, **kwargs) + self.ws_future = websocket_connect(request) + self.ws_future.add_done_callback(self._connection_done) + + loop = IOLoop.current() + loop.add_future(self.ws_future, lambda future: self._read_messages(message_callback)) + + def _connection_done(self, fut): + if ( + not self.disconnected and fut.exception() is None + ): # prevent concurrent.futures._base.CancelledError + self.ws = fut.result() + self.retry = 0 + self.log.debug("Connection is ready: ws: {}".format(self.ws)) + else: + self.log.warning( + "Websocket connection has been closed via client disconnect or due to error. " + "Kernel with ID '{}' may not be terminated on GatewayClient: {}".format( + self.kernel_id, GatewayClient.instance().url + ) + ) + + def _disconnect(self): + self.disconnected = True + if self.ws is not None: + # Close connection + self.ws.close() + elif not self.ws_future.done(): + # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally + self.ws_future.cancel() + self.log.debug( + "_disconnect: future cancelled, disconnected: {}".format(self.disconnected) + ) + + async def _read_messages(self, callback): + """Read messages from gateway server.""" + while self.ws is not None: + message = None + if not self.disconnected: + try: + message = await self.ws.read_message() + except Exception as e: + self.log.error( + "Exception reading message from websocket: {}".format(e) + ) # , exc_info=True) + if message is None: + if not self.disconnected: + self.log.warning("Lost connection to Gateway: {}".format(self.kernel_id)) + break + callback( + message + ) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) + else: # ws cancelled - stop reading + break + + # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. + if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: + jitter = random.randint(10, 100) * 0.01 + retry_interval = ( + min( + GatewayClient.instance().gateway_retry_interval * (2**self.retry), + GatewayClient.instance().gateway_retry_interval_max, + ) + + jitter + ) + self.retry += 1 + self.log.info( + "Attempting to re-establish the connection to Gateway in %s secs (%s/%s): %s", + retry_interval, + self.retry, + GatewayClient.instance().gateway_retry_max, + self.kernel_id, + ) + await asyncio.sleep(retry_interval) + loop = IOLoop.current() + loop.spawn_callback(self._connect, self.kernel_id, callback) + + def on_open(self, kernel_id, message_callback, **kwargs): + """Web socket connection open against gateway server.""" + loop = IOLoop.current() + loop.spawn_callback(self._connect, kernel_id, message_callback) + + def on_message(self, message): + """Send message to gateway server.""" + if self.ws is None: + loop = IOLoop.current() + loop.add_future(self.ws_future, lambda future: self._write_message(message)) + else: + self._write_message(message) + + def _write_message(self, message): + """Send message to gateway server.""" + try: + if not self.disconnected and self.ws is not None: + self.ws.write_message(message) + except Exception as e: + self.log.error( + "Exception writing message to websocket: {}".format(e) + ) # , exc_info=True) + + def on_close(self): + """Web socket closed event.""" + self._disconnect() + + +class GatewayResourceHandler(APIHandler): + """Retrieves resources for specific kernelspec definitions from kernel/enterprise gateway.""" + + @web.authenticated + async def get(self, kernel_name, path, include_body=True): + ksm = self.kernel_spec_manager + kernel_spec_res = await ksm.get_kernel_spec_resource(kernel_name, path) + if kernel_spec_res is None: + self.log.warning( + "Kernelspec resource '{}' for '{}' not found. Gateway may not support" + " resource serving.".format(path, kernel_name) + ) + else: + self.set_header("Content-Type", mimetypes.guess_type(path)[0]) + self.finish(kernel_spec_res) + + +from ..services.kernels.handlers import _kernel_id_regex +from ..services.kernelspecs.handlers import kernel_name_regex + +default_handlers = [ + (r"/api/kernels/%s/channels" % _kernel_id_regex, WebSocketChannelsHandler), + (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, GatewayResourceHandler), +] diff --git a/server/jupyter_server/gateway/managers.py b/server/jupyter_server/gateway/managers.py new file mode 100644 index 0000000..9462229 --- /dev/null +++ b/server/jupyter_server/gateway/managers.py @@ -0,0 +1,718 @@ +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import datetime +import json +import os +from logging import Logger +from queue import Queue +from threading import Thread +from typing import Dict + +import websocket +from jupyter_client.asynchronous.client import AsyncKernelClient +from jupyter_client.clientabc import KernelClientABC +from jupyter_client.kernelspec import KernelSpecManager +from jupyter_client.manager import AsyncKernelManager +from jupyter_client.managerabc import KernelManagerABC +from tornado import web +from tornado.escape import json_decode +from tornado.escape import json_encode +from tornado.escape import url_escape +from tornado.escape import utf8 +from traitlets import default +from traitlets import DottedObjectName +from traitlets import Instance +from traitlets import Type + +from .._tz import UTC +from ..services.kernels.kernelmanager import AsyncMappingKernelManager +from ..services.sessions.sessionmanager import SessionManager +from ..utils import ensure_async +from ..utils import url_path_join +from .gateway_client import gateway_request +from .gateway_client import GatewayClient + + +class GatewayMappingKernelManager(AsyncMappingKernelManager): + """Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway.""" + + # We'll maintain our own set of kernel ids + _kernels: Dict[str, "GatewayKernelManager"] = {} + + @default("kernel_manager_class") + def _default_kernel_manager_class(self): + return "jupyter_server.gateway.managers.GatewayKernelManager" + + @default("shared_context") + def _default_shared_context(self): + return False # no need to share zmq contexts + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.kernels_url = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + ) + + def remove_kernel(self, kernel_id): + """Complete override since we want to be more tolerant of missing keys""" + try: + return self._kernels.pop(kernel_id) + except KeyError: + pass + + async def start_kernel(self, kernel_id=None, path=None, **kwargs): + """Start a kernel for a session and return its kernel_id. + + Parameters + ---------- + kernel_id : uuid + The uuid to associate the new kernel with. If this + is not None, this kernel will be persistent whenever it is + requested. + path : API path + The API path (unicode, '/' delimited) for the cwd. + Will be transformed to an OS path relative to root_dir. + """ + self.log.info(f"Request start kernel: kernel_id={kernel_id}, path='{path}'") + + if kernel_id is None: + if path is not None: + kwargs["cwd"] = self.cwd_for_path(path) + + km = self.kernel_manager_factory(parent=self, log=self.log) + await km.start_kernel(**kwargs) + kernel_id = km.kernel_id + self._kernels[kernel_id] = km + + # Initialize culling if not already + if not self._initialized_culler: + self.initialize_culler() + + return kernel_id + + async def kernel_model(self, kernel_id): + """Return a dictionary of kernel information described in the + JSON standard model. + + Parameters + ---------- + kernel_id : uuid + The uuid of the kernel. + """ + model = None + km = self.get_kernel(kernel_id) + if km: + model = km.kernel + return model + + async def list_kernels(self, **kwargs): + """Get a list of running kernels from the Gateway server. + + We'll use this opportunity to refresh the models in each of + the kernels we're managing. + """ + self.log.debug(f"Request list kernels: {self.kernels_url}") + response = await gateway_request(self.kernels_url, method="GET") + kernels = json_decode(response.body) + # Refresh our models to those we know about, and filter + # the return value with only our kernels. + kernel_models = {} + for model in kernels: + kid = model["id"] + if kid in self._kernels: + await self._kernels[kid].refresh_model(model) + kernel_models[kid] = model + # Remove any of our kernels that may have been culled on the gateway server + our_kernels = self._kernels.copy() + culled_ids = [] + for kid, km in our_kernels.items(): + if kid not in kernel_models: + self.log.warn(f"Kernel {kid} no longer active - probably culled on Gateway server.") + self._kernels.pop(kid, None) + culled_ids.append(kid) # TODO: Figure out what do with these. + return list(kernel_models.values()) + + async def shutdown_kernel(self, kernel_id, now=False, restart=False): + """Shutdown a kernel by its kernel uuid. + + Parameters + ========== + kernel_id : uuid + The id of the kernel to shutdown. + now : bool + Shutdown the kernel immediately (True) or gracefully (False) + restart : bool + The purpose of this shutdown is to restart the kernel (True) + """ + km = self.get_kernel(kernel_id) + await km.shutdown_kernel(now=now, restart=restart) + self.remove_kernel(kernel_id) + + async def restart_kernel(self, kernel_id, now=False, **kwargs): + """Restart a kernel by its kernel uuid. + + Parameters + ========== + kernel_id : uuid + The id of the kernel to restart. + """ + km = self.get_kernel(kernel_id) + await km.restart_kernel(now=now, **kwargs) + + async def interrupt_kernel(self, kernel_id, **kwargs): + """Interrupt a kernel by its kernel uuid. + + Parameters + ========== + kernel_id : uuid + The id of the kernel to interrupt. + """ + km = self.get_kernel(kernel_id) + await km.interrupt_kernel() + + async def shutdown_all(self, now=False): + """Shutdown all kernels.""" + for kernel_id in self._kernels: + km = self.get_kernel(kernel_id) + await km.shutdown_kernel(now=now) + self.remove_kernel(kernel_id) + + async def cull_kernels(self): + """Override cull_kernels so we can be sure their state is current.""" + await self.list_kernels() + await super().cull_kernels() + + +class GatewayKernelSpecManager(KernelSpecManager): + def __init__(self, **kwargs): + super().__init__(**kwargs) + base_endpoint = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernelspecs_endpoint + ) + + self.base_endpoint = GatewayKernelSpecManager._get_endpoint_for_user_filter(base_endpoint) + self.base_resource_endpoint = url_path_join( + GatewayClient.instance().url, + GatewayClient.instance().kernelspecs_resource_endpoint, + ) + + @staticmethod + def _get_endpoint_for_user_filter(default_endpoint): + kernel_user = os.environ.get("KERNEL_USERNAME") + if kernel_user: + return "?user=".join([default_endpoint, kernel_user]) + return default_endpoint + + def _get_kernelspecs_endpoint_url(self, kernel_name=None): + """Builds a url for the kernels endpoint + + Parameters + ---------- + kernel_name : kernel name (optional) + """ + if kernel_name: + return url_path_join(self.base_endpoint, url_escape(kernel_name)) + + return self.base_endpoint + + async def get_all_specs(self): + fetched_kspecs = await self.list_kernel_specs() + + # get the default kernel name and compare to that of this server. + # If different log a warning and reset the default. However, the + # caller of this method will still return this server's value until + # the next fetch of kernelspecs - at which time they'll match. + km = self.parent.kernel_manager + remote_default_kernel_name = fetched_kspecs.get("default") + if remote_default_kernel_name != km.default_kernel_name: + self.log.info( + f"Default kernel name on Gateway server ({remote_default_kernel_name}) differs from " + f"Notebook server ({km.default_kernel_name}). Updating to Gateway server's value." + ) + km.default_kernel_name = remote_default_kernel_name + + remote_kspecs = fetched_kspecs.get("kernelspecs") + return remote_kspecs + + async def list_kernel_specs(self): + """Get a list of kernel specs.""" + kernel_spec_url = self._get_kernelspecs_endpoint_url() + self.log.debug(f"Request list kernel specs at: {kernel_spec_url}") + response = await gateway_request(kernel_spec_url, method="GET") + kernel_specs = json_decode(response.body) + return kernel_specs + + async def get_kernel_spec(self, kernel_name, **kwargs): + """Get kernel spec for kernel_name. + + Parameters + ---------- + kernel_name : str + The name of the kernel. + """ + kernel_spec_url = self._get_kernelspecs_endpoint_url(kernel_name=str(kernel_name)) + self.log.debug(f"Request kernel spec at: {kernel_spec_url}") + try: + response = await gateway_request(kernel_spec_url, method="GET") + except web.HTTPError as error: + if error.status_code == 404: + # Convert not found to KeyError since that's what the Notebook handler expects + # message is not used, but might as well make it useful for troubleshooting + raise KeyError( + "kernelspec {kernel_name} not found on Gateway server at: {gateway_url}".format( + kernel_name=kernel_name, + gateway_url=GatewayClient.instance().url, + ) + ) from error + else: + raise + else: + kernel_spec = json_decode(response.body) + + return kernel_spec + + async def get_kernel_spec_resource(self, kernel_name, path): + """Get kernel spec for kernel_name. + + Parameters + ---------- + kernel_name : str + The name of the kernel. + path : str + The name of the desired resource + """ + kernel_spec_resource_url = url_path_join( + self.base_resource_endpoint, str(kernel_name), str(path) + ) + self.log.debug(f"Request kernel spec resource '{path}' at: {kernel_spec_resource_url}") + try: + response = await gateway_request(kernel_spec_resource_url, method="GET") + except web.HTTPError as error: + if error.status_code == 404: + kernel_spec_resource = None + else: + raise + else: + kernel_spec_resource = response.body + return kernel_spec_resource + + +class GatewaySessionManager(SessionManager): + kernel_manager = Instance("jupyter_server.gateway.managers.GatewayMappingKernelManager") + + async def kernel_culled(self, kernel_id): + """Checks if the kernel is still considered alive and returns true if its not found.""" + kernel = None + try: + km = self.kernel_manager.get_kernel(kernel_id) + kernel = await km.refresh_model() + except Exception: # Let exceptions here reflect culled kernel + pass + return kernel is None + + +"""KernelManager class to manage a kernel running on a Gateway Server via the REST API""" + + +class GatewayKernelManager(AsyncKernelManager): + """Manages a single kernel remotely via a Gateway Server.""" + + kernel_id = None + kernel = None + + @default("cache_ports") + def _default_cache_ports(self): + return False # no need to cache ports here + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.kernels_url = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + ) + self.kernel_url = self.kernel = self.kernel_id = None + # simulate busy/activity markers: + self.execution_state = self.last_activity = None + + @property + def has_kernel(self): + """Has a kernel been started that we are managing.""" + return self.kernel is not None + + client_class = DottedObjectName("jupyter_server.gateway.managers.GatewayKernelClient") + client_factory = Type(klass="jupyter_server.gateway.managers.GatewayKernelClient") + + # -------------------------------------------------------------------------- + # create a Client connected to our Kernel + # -------------------------------------------------------------------------- + + def client(self, **kwargs): + """Create a client configured to connect to our kernel""" + kw = {} + kw.update(self.get_connection_info(session=True)) + kw.update( + dict( + connection_file=self.connection_file, + parent=self, + ) + ) + kw["kernel_id"] = self.kernel_id + + # add kwargs last, for manual overrides + kw.update(kwargs) + return self.client_factory(**kw) + + async def refresh_model(self, model=None): + """Refresh the kernel model. + + Parameters + ---------- + model : dict + The model from which to refresh the kernel. If None, the kernel + model is fetched from the Gateway server. + """ + if model is None: + self.log.debug("Request kernel at: %s" % self.kernel_url) + try: + response = await gateway_request(self.kernel_url, method="GET") + except web.HTTPError as error: + if error.status_code == 404: + self.log.warning("Kernel not found at: %s" % self.kernel_url) + model = None + else: + raise + else: + model = json_decode(response.body) + self.log.debug("Kernel retrieved: %s" % model) + + if model: # Update activity markers + self.last_activity = datetime.datetime.strptime( + model["last_activity"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=UTC) + self.execution_state = model["execution_state"] + if isinstance(self.parent, AsyncMappingKernelManager): + # Update connections only if there's a mapping kernel manager parent for + # this kernel manager. The current kernel manager instance may not have + # an parent instance if, say, a server extension is using another application + # (e.g., papermill) that uses a KernelManager instance directly. + self.parent._kernel_connections[self.kernel_id] = int(model["connections"]) + + self.kernel = model + return model + + # -------------------------------------------------------------------------- + # Kernel management + # -------------------------------------------------------------------------- + + async def start_kernel(self, **kwargs): + """Starts a kernel via HTTP in an asynchronous manner. + + Parameters + ---------- + `**kwargs` : optional + keyword arguments that are passed down to build the kernel_cmd + and launching the kernel (e.g. Popen kwargs). + """ + kernel_id = kwargs.get("kernel_id") + + if kernel_id is None: + kernel_name = kwargs.get("kernel_name", "python3") + self.log.debug("Request new kernel at: %s" % self.kernels_url) + + # Let KERNEL_USERNAME take precedent over http_user config option. + if os.environ.get("KERNEL_USERNAME") is None and GatewayClient.instance().http_user: + os.environ["KERNEL_USERNAME"] = GatewayClient.instance().http_user + + kernel_env = { + k: v + for (k, v) in dict(os.environ).items() + if k.startswith("KERNEL_") or k in GatewayClient.instance().env_whitelist.split(",") + } + + # Add any env entries in this request + kernel_env.update(kwargs.get("env", {})) + + # Convey the full path to where this notebook file is located. + if kwargs.get("cwd") is not None and kernel_env.get("KERNEL_WORKING_DIR") is None: + kernel_env["KERNEL_WORKING_DIR"] = kwargs["cwd"] + + json_body = json_encode({"name": kernel_name, "env": kernel_env}) + + response = await gateway_request(self.kernels_url, method="POST", body=json_body) + self.kernel = json_decode(response.body) + self.kernel_id = self.kernel["id"] + self.log.info( + "GatewayKernelManager started kernel: {}, args: {}".format(self.kernel_id, kwargs) + ) + else: + self.kernel_id = kernel_id + self.kernel = await self.refresh_model() + self.log.info("GatewayKernelManager using existing kernel: {}".format(self.kernel_id)) + + self.kernel_url = url_path_join(self.kernels_url, url_escape(str(self.kernel_id))) + + async def shutdown_kernel(self, now=False, restart=False): + """Attempts to stop the kernel process cleanly via HTTP.""" + + if self.has_kernel: + self.log.debug("Request shutdown kernel at: %s", self.kernel_url) + response = await gateway_request(self.kernel_url, method="DELETE") + self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) + + async def restart_kernel(self, **kw): + """Restarts a kernel via HTTP.""" + if self.has_kernel: + kernel_url = self.kernel_url + "/restart" + self.log.debug("Request restart kernel at: %s", kernel_url) + response = await gateway_request(kernel_url, method="POST", body=json_encode({})) + self.log.debug("Restart kernel response: %d %s", response.code, response.reason) + + async def interrupt_kernel(self): + """Interrupts the kernel via an HTTP request.""" + if self.has_kernel: + kernel_url = self.kernel_url + "/interrupt" + self.log.debug("Request interrupt kernel at: %s", kernel_url) + response = await gateway_request(kernel_url, method="POST", body=json_encode({})) + self.log.debug("Interrupt kernel response: %d %s", response.code, response.reason) + + async def is_alive(self): + """Is the kernel process still running?""" + if self.has_kernel: + # Go ahead and issue a request to get the kernel + self.kernel = await self.refresh_model() + return True + else: # we don't have a kernel + return False + + def cleanup_resources(self, restart=False): + """Clean up resources when the kernel is shut down""" + pass + + +KernelManagerABC.register(GatewayKernelManager) + + +class ChannelQueue(Queue): + + channel_name: str = None + + def __init__(self, channel_name: str, channel_socket: websocket.WebSocket, log: Logger): + super().__init__() + self.channel_name = channel_name + self.channel_socket = channel_socket + self.log = log + + async def get_msg(self, *args, **kwargs) -> dict: + timeout = kwargs.get("timeout", 1) + msg = self.get(timeout=timeout) + self.log.debug( + "Received message on channel: {}, msg_id: {}, msg_type: {}".format( + self.channel_name, msg["msg_id"], msg["msg_type"] if msg else "null" + ) + ) + self.task_done() + return msg + + def send(self, msg: dict) -> None: + message = json.dumps(msg, default=ChannelQueue.serialize_datetime).replace(" None: + pass + + def stop(self) -> None: + if not self.empty(): + # If unprocessed messages are detected, drain the queue collecting non-status + # messages. If any remain that are not 'shutdown_reply' and this is not iopub + # go ahead and issue a warning. + msgs = [] + while self.qsize(): + msg = self.get_nowait() + if msg["msg_type"] != "status": + msgs.append(msg["msg_type"]) + if self.channel_name == "iopub" and "shutdown_reply" in msgs: + return + if len(msgs): + self.log.warning( + "Stopping channel '{}' with {} unprocessed non-status messages: {}.".format( + self.channel_name, len(msgs), msgs + ) + ) + + def is_alive(self) -> bool: + return self.channel_socket is not None + + +class HBChannelQueue(ChannelQueue): + def is_beating(self) -> bool: + # Just use the is_alive status for now + return self.is_alive() + + +class GatewayKernelClient(AsyncKernelClient): + """Communicates with a single kernel indirectly via a websocket to a gateway server. + + There are five channels associated with each kernel: + + * shell: for request/reply calls to the kernel. + * iopub: for the kernel to publish results to frontends. + * hb: for monitoring the kernel's heartbeat. + * stdin: for frontends to reply to raw_input calls in the kernel. + * control: for kernel management calls to the kernel. + + The messages that can be sent on these channels are exposed as methods of the + client (KernelClient.execute, complete, history, etc.). These methods only + send the message, they don't wait for a reply. To get results, use e.g. + :meth:`get_shell_msg` to fetch messages from the shell channel. + """ + + # flag for whether execute requests should be allowed to call raw_input: + allow_stdin = False + _channels_stopped = False + _channel_queues = {} + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.kernel_id = kwargs["kernel_id"] + self.channel_socket = None + self.response_router = None + + # -------------------------------------------------------------------------- + # Channel management methods + # -------------------------------------------------------------------------- + + async def start_channels(self, shell=True, iopub=True, stdin=True, hb=True, control=True): + """Starts the channels for this kernel. + + For this class, we establish a websocket connection to the destination + and setup the channel-based queues on which applicable messages will + be posted. + """ + + ws_url = url_path_join( + GatewayClient.instance().ws_url, + GatewayClient.instance().kernels_endpoint, + url_escape(self.kernel_id), + "channels", + ) + # Gather cert info in case where ssl is desired... + ssl_options = dict() + ssl_options["ca_certs"] = GatewayClient.instance().ca_certs + ssl_options["certfile"] = GatewayClient.instance().client_cert + ssl_options["keyfile"] = GatewayClient.instance().client_key + + self.channel_socket = websocket.create_connection( + ws_url, + timeout=GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT, + enable_multithread=True, + sslopt=ssl_options, + ) + self.response_router = Thread(target=self._route_responses) + self.response_router.start() + + await ensure_async( + super().start_channels(shell=shell, iopub=iopub, stdin=stdin, hb=hb, control=control) + ) + + def stop_channels(self): + """Stops all the running channels for this kernel. + + For this class, we close the websocket connection and destroy the + channel-based queues. + """ + super().stop_channels() + self._channels_stopped = True + self.log.debug("Closing websocket connection") + + self.channel_socket.close() + self.response_router.join() + + if self._channel_queues: + self._channel_queues.clear() + self._channel_queues = None + + # Channels are implemented via a ChannelQueue that is used to send and receive messages + + @property + def shell_channel(self): + """Get the shell channel object for this kernel.""" + if self._shell_channel is None: + self.log.debug("creating shell channel queue") + self._shell_channel = ChannelQueue("shell", self.channel_socket, self.log) + self._channel_queues["shell"] = self._shell_channel + return self._shell_channel + + @property + def iopub_channel(self): + """Get the iopub channel object for this kernel.""" + if self._iopub_channel is None: + self.log.debug("creating iopub channel queue") + self._iopub_channel = ChannelQueue("iopub", self.channel_socket, self.log) + self._channel_queues["iopub"] = self._iopub_channel + return self._iopub_channel + + @property + def stdin_channel(self): + """Get the stdin channel object for this kernel.""" + if self._stdin_channel is None: + self.log.debug("creating stdin channel queue") + self._stdin_channel = ChannelQueue("stdin", self.channel_socket, self.log) + self._channel_queues["stdin"] = self._stdin_channel + return self._stdin_channel + + @property + def hb_channel(self): + """Get the hb channel object for this kernel.""" + if self._hb_channel is None: + self.log.debug("creating hb channel queue") + self._hb_channel = HBChannelQueue("hb", self.channel_socket, self.log) + self._channel_queues["hb"] = self._hb_channel + return self._hb_channel + + @property + def control_channel(self): + """Get the control channel object for this kernel.""" + if self._control_channel is None: + self.log.debug("creating control channel queue") + self._control_channel = ChannelQueue("control", self.channel_socket, self.log) + self._channel_queues["control"] = self._control_channel + return self._control_channel + + def _route_responses(self): + """ + Reads responses from the websocket and routes each to the appropriate channel queue based + on the message's channel. It does this for the duration of the class's lifetime until the + channels are stopped, at which time the socket is closed (unblocking the router) and + the thread terminates. If shutdown happens to occur while processing a response (unlikely), + termination takes place via the loop control boolean. + """ + try: + while not self._channels_stopped: + raw_message = self.channel_socket.recv() + if not raw_message: + break + response_message = json_decode(utf8(raw_message)) + channel = response_message["channel"] + self._channel_queues[channel].put_nowait(response_message) + + except websocket.WebSocketConnectionClosedException: + pass # websocket closure most likely due to shutdown + + except BaseException as be: + if not self._channels_stopped: + self.log.warning("Unexpected exception encountered ({})".format(be)) + + self.log.debug("Response router thread exiting...") + + +KernelClientABC.register(GatewayKernelClient) diff --git a/server/jupyter_server/i18n/README.md b/server/jupyter_server/i18n/README.md new file mode 100644 index 0000000..a36d026 --- /dev/null +++ b/server/jupyter_server/i18n/README.md @@ -0,0 +1,132 @@ +# Implementation Notes for Internationalization of Jupyter Notebook + +The implementation of i18n features for jupyter notebook is still a work-in-progress: + +- User interface strings are (mostly) handled +- Console messages are not handled (their usefulness in a translated environment is questionable) +- Tooling has to be refined + +However… + +## How the language is selected ? + +1. `jupyter notebook` command reads the `LANG` environment variable at startup, + (`xx_XX` or just `xx` form, where `xx` is the language code you're wanting to + run in). + +Hint: if running Windows, you can set it in PowerShell with `${Env:LANG} = "xx_XX"`. +if running Ubuntu 14, you should set environment variable `LANGUAGE="xx_XX"`. + +2. The preferred language for web pages in your browser settings (`xx`) is + also used. At the moment, it has to be first in the list. + +## Contributing and managing translations + +### Requirements + +- _pybabel_ (could be installed `pip install babel`) +- _po2json_ (could be installed with `npm install -g po2json`) + +**All i18n-related commands are done from the related directory :** + + cd notebook/i18n/ + +### Message extraction + +The translatable material for notebook is split into 3 `.pot` files, as follows: + +- _notebook/i18n/notebook.pot_ - Console and startup messages, basically anything that is + produced by Python code. +- _notebook/i18n/nbui.pot_ - User interface strings, as extracted from the Jinja2 templates + in _notebook/templates/\*.html_ +- _noteook/i18n/nbjs.pot_ - JavaScript strings and dialogs, which contain much of the visible + user interface for Jupyter notebook. + +To extract the messages from the source code whenever new material is added, use the +`pybabel` command: + +```shell +pybabel extract -F babel_notebook.cfg -o notebook.pot --no-wrap --project Jupyter . +pybabel extract -F babel_nbui.cfg -o nbui.pot --no-wrap --project Jupyter . +pybabel extract -F babel_nbjs.cfg -o nbjs.pot --no-wrap --project Jupyter . +``` + +After this is complete you have 3 `.pot` files that you can give to a translator for your favorite language. + +### Messages compilation + +After the source material has been translated, you should have 3 `.po` files with the same base names +as the `.pot` files above. Put them in `notebook/i18n/${LANG}/LC_MESSAGES`, where `${LANG}` is the language +code for your desired language ( i.e. German = "de", Japanese = "ja", etc. ). + +_notebook.po_ and _nbui.po_ need to be converted from `.po` to `.mo` format for +use at runtime. + +```shell +pybabel compile -D notebook -f -l ${LANG} -i ${LANG}/LC_MESSAGES/notebook.po -o ${LANG}/LC_MESSAGES/notebook.mo +pybabel compile -D nbui -f -l ${LANG} -i ${LANG}/LC_MESSAGES/nbui.po -o ${LANG}/LC_MESSAGES/nbui.mo +``` + +_nbjs.po_ needs to be converted to JSON for use within the JavaScript code, with _po2json_, as follows: + + po2json -p -F -f jed1.x -d nbjs ${LANG}/LC_MESSAGES/nbjs.po ${LANG}/LC_MESSAGES/nbjs.json + +When new languages get added, their language codes should be added to _notebook/i18n/nbjs.json_ +under the `supported_languages` element. + +### Tips for Jupyter developers + +The biggest "mistake" I found while doing i18n enablement was the habit of constructing UI messages +from English "piece parts". For example, code like: + +```javascript +var msg = 'Enter a new ' + type + 'name:'; +``` + +where `type` is either "file", "directory", or "notebook".... + +is problematic when doing translations, because the surrounding text may need to vary +depending on the inserted word. In this case, you need to switch it and use complete phrases, +as follows: + +```javascript +var rename_msg = function (type) { + switch (type) { + case 'file': + return _('Enter a new file name:'); + case 'directory': + return _('Enter a new directory name:'); + case 'notebook': + return _('Enter a new notebook name:'); + default: + return _('Enter a new name:'); + } +}; +``` + +Also you need to remember that adding an "s" or "es" to an English word to +create the plural form doesn't translate well. Some languages have as many as 5 or 6 different +plural forms for differing numbers, so using an API such as ngettext() is necessary in order +to handle these cases properly. + +### Known issues and future evolutions + +1. Right now there are two different places where the desired language is set. At startup time, the Jupyter console's messages pay attention to the setting of the `${LANG}` environment variable + as set in the shell at startup time. Unfortunately, this is also the time where the Jinja2 + environment is set up, which means that the template stuff will always come from this setting. + We really want to be paying attention to the browser's settings for the stuff that happens in the + browser, so we need to be able to retrieve this information after the browser is started and somehow + communicate this back to Jinja2. So far, I haven't yet figured out how to do this, which means that if the \${LANG} at startup doesn't match the browser's settings, you could potentially get a mix + of languages in the UI ( never a good thing ). + +2. We will need to decide if console messages should be translatable, and enable them if desired. +3. The keyboard shortcut editor was implemented after the i18n work was completed, so that portion + does not have translation support at this time. +4. Babel's documentation has instructions on how to integrate messages extraction + into your _setup.py_ so that eventually we can just do: + + ./setup.py extract_messages + +I hope to get this working at some point in the near future. 5. The conversions from `.po` to `.mo` probably can and should be done using `setup.py install`. + +Any questions or comments please let me know @JCEmmons on github (emmo@us.ibm.com) diff --git a/server/jupyter_server/i18n/__init__.py b/server/jupyter_server/i18n/__init__.py new file mode 100644 index 0000000..2ffa7ad --- /dev/null +++ b/server/jupyter_server/i18n/__init__.py @@ -0,0 +1,101 @@ +"""Server functions for loading translations +""" +import errno +import io +import json +import re +from collections import defaultdict +from os.path import dirname +from os.path import join as pjoin + +I18N_DIR = dirname(__file__) +# Cache structure: +# {'nbjs': { # Domain +# 'zh-CN': { # Language code +# : +# ... +# } +# }} +TRANSLATIONS_CACHE = {"nbjs": {}} + + +_accept_lang_re = re.compile( + r""" +(?P[a-zA-Z]{1,8}(-[a-zA-Z]{1,8})?) +(\s*;\s*q\s*=\s* + (?P[01](.\d+)?) +)?""", + re.VERBOSE, +) + + +def parse_accept_lang_header(accept_lang): + """Parses the 'Accept-Language' HTTP header. + + Returns a list of language codes in *ascending* order of preference + (with the most preferred language last). + """ + by_q = defaultdict(list) + for part in accept_lang.split(","): + m = _accept_lang_re.match(part.strip()) + if not m: + continue + lang, qvalue = m.group("lang", "qvalue") + # Browser header format is zh-CN, gettext uses zh_CN + lang = lang.replace("-", "_") + if qvalue is None: + qvalue = 1.0 + else: + qvalue = float(qvalue) + if qvalue == 0: + continue # 0 means not accepted + by_q[qvalue].append(lang) + + res = [] + for qvalue, langs in sorted(by_q.items()): + res.extend(sorted(langs)) + return res + + +def load(language, domain="nbjs"): + """Load translations from an nbjs.json file""" + try: + f = io.open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") + except IOError as e: + if e.errno != errno.ENOENT: + raise + return {} + + with f: + data = json.load(f) + return data["locale_data"][domain] + + +def cached_load(language, domain="nbjs"): + """Load translations for one language, using in-memory cache if available""" + domain_cache = TRANSLATIONS_CACHE[domain] + try: + return domain_cache[language] + except KeyError: + data = load(language, domain) + domain_cache[language] = data + return data + + +def combine_translations(accept_language, domain="nbjs"): + """Combine translations for multiple accepted languages. + + Returns data re-packaged in jed1.x format. + """ + lang_codes = parse_accept_lang_header(accept_language) + combined = {} + for language in lang_codes: + if language == "en": + # en is default, all translations are in frontend. + combined.clear() + else: + combined.update(cached_load(language, domain)) + + combined[""] = {"domain": "nbjs"} + + return {"domain": domain, "locale_data": {domain: combined}} diff --git a/server/jupyter_server/i18n/babel_nbui.cfg b/server/jupyter_server/i18n/babel_nbui.cfg new file mode 100644 index 0000000..271554a --- /dev/null +++ b/server/jupyter_server/i18n/babel_nbui.cfg @@ -0,0 +1,4 @@ +[jinja2: notebook/templates/**.html] + encoding = utf-8 +[extractors] + jinja2 = jinja2.ext:babel_extract diff --git a/server/jupyter_server/i18n/babel_notebook.cfg b/server/jupyter_server/i18n/babel_notebook.cfg new file mode 100644 index 0000000..d4e3cf9 --- /dev/null +++ b/server/jupyter_server/i18n/babel_notebook.cfg @@ -0,0 +1,2 @@ +[python: notebook/*.py] +[python: notebook/services/contents/*.py] diff --git a/server/jupyter_server/i18n/nbjs.json b/server/jupyter_server/i18n/nbjs.json new file mode 100644 index 0000000..a263a46 --- /dev/null +++ b/server/jupyter_server/i18n/nbjs.json @@ -0,0 +1,11 @@ +{ + "domain": "nbjs", + "supported_languages": ["zh-CN"], + "locale_data": { + "nbjs": { + "": { + "domain": "nbjs" + } + } + } +} diff --git a/server/jupyter_server/i18n/nbui.pot b/server/jupyter_server/i18n/nbui.pot new file mode 100644 index 0000000..7ca038e --- /dev/null +++ b/server/jupyter_server/i18n/nbui.pot @@ -0,0 +1,731 @@ +# Translations template for Jupyter. +# Copyright (C) 2017 ORGANIZATION +# This file is distributed under the same license as the Jupyter project. +# FIRST AUTHOR , 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Kennen VERSION\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2017-07-07 12:48-0500\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.3.4\n" + +#: notebook/templates/404.html:3 +msgid "You are requesting a page that does not exist!" +msgstr "" + +#: notebook/templates/edit.html:37 +msgid "current mode" +msgstr "" + +#: notebook/templates/edit.html:48 notebook/templates/notebook.html:78 +msgid "File" +msgstr "" + +#: notebook/templates/edit.html:50 notebook/templates/tree.html:57 +msgid "New" +msgstr "" + +#: notebook/templates/edit.html:51 +msgid "Save" +msgstr "" + +#: notebook/templates/edit.html:52 notebook/templates/tree.html:36 +msgid "Rename" +msgstr "" + +#: notebook/templates/edit.html:53 notebook/templates/tree.html:38 +msgid "Download" +msgstr "" + +#: notebook/templates/edit.html:56 notebook/templates/notebook.html:131 +#: notebook/templates/tree.html:41 +msgid "Edit" +msgstr "" + +#: notebook/templates/edit.html:58 +msgid "Find" +msgstr "" + +#: notebook/templates/edit.html:59 +msgid "Find & Replace" +msgstr "" + +#: notebook/templates/edit.html:61 +msgid "Key Map" +msgstr "" + +#: notebook/templates/edit.html:62 +msgid "Default" +msgstr "" + +#: notebook/templates/edit.html:63 +msgid "Sublime Text" +msgstr "" + +#: notebook/templates/edit.html:68 notebook/templates/notebook.html:159 +#: notebook/templates/tree.html:40 +msgid "View" +msgstr "" + +#: notebook/templates/edit.html:70 notebook/templates/notebook.html:162 +msgid "Show/Hide the logo and notebook title (above menu bar)" +msgstr "" + +#: notebook/templates/edit.html:71 notebook/templates/notebook.html:163 +msgid "Toggle Header" +msgstr "" + +#: notebook/templates/edit.html:72 notebook/templates/notebook.html:171 +msgid "Toggle Line Numbers" +msgstr "" + +#: notebook/templates/edit.html:75 +msgid "Language" +msgstr "" + +#: notebook/templates/error.html:23 +msgid "The error was:" +msgstr "" + +#: notebook/templates/login.html:24 +msgid "Password or token:" +msgstr "" + +#: notebook/templates/login.html:26 +msgid "Password:" +msgstr "" + +#: notebook/templates/login.html:31 +msgid "Log in" +msgstr "" + +#: notebook/templates/login.html:39 +msgid "No login available, you shouldn't be seeing this page." +msgstr "" + +#: notebook/templates/logout.html:24 +#, python-format +msgid "Proceed to the dashboard" +msgstr "" + +#: notebook/templates/logout.html:26 +#, python-format +msgid "Proceed to the login page" +msgstr "" + +#: notebook/templates/notebook.html:62 +msgid "Menu" +msgstr "" + +#: notebook/templates/notebook.html:65 notebook/templates/notebook.html:254 +msgid "Kernel" +msgstr "" + +#: notebook/templates/notebook.html:68 +msgid "This notebook is read-only" +msgstr "" + +#: notebook/templates/notebook.html:81 +msgid "New Notebook" +msgstr "" + +#: notebook/templates/notebook.html:85 +msgid "Opens a new window with the Dashboard view" +msgstr "" + +#: notebook/templates/notebook.html:86 +msgid "Open..." +msgstr "" + +#: notebook/templates/notebook.html:90 +msgid "Open a copy of this notebook's contents and start a new kernel" +msgstr "" + +#: notebook/templates/notebook.html:91 +msgid "Make a Copy..." +msgstr "" + +#: notebook/templates/notebook.html:92 +msgid "Rename..." +msgstr "" + +#: notebook/templates/notebook.html:93 +msgid "Save and Checkpoint" +msgstr "" + +#: notebook/templates/notebook.html:96 +msgid "Revert to Checkpoint" +msgstr "" + +#: notebook/templates/notebook.html:106 +msgid "Print Preview" +msgstr "" + +#: notebook/templates/notebook.html:107 +msgid "Download as" +msgstr "" + +#: notebook/templates/notebook.html:109 +msgid "Notebook (.ipynb)" +msgstr "" + +#: notebook/templates/notebook.html:110 +msgid "Script" +msgstr "" + +#: notebook/templates/notebook.html:111 +msgid "HTML (.html)" +msgstr "" + +#: notebook/templates/notebook.html:112 +msgid "Markdown (.md)" +msgstr "" + +#: notebook/templates/notebook.html:113 +msgid "reST (.rst)" +msgstr "" + +#: notebook/templates/notebook.html:114 +msgid "LaTeX (.tex)" +msgstr "" + +#: notebook/templates/notebook.html:115 +msgid "PDF via LaTeX (.pdf)" +msgstr "" + +#: notebook/templates/notebook.html:118 +msgid "Deploy as" +msgstr "" + +#: notebook/templates/notebook.html:123 +msgid "Trust the output of this notebook" +msgstr "" + +#: notebook/templates/notebook.html:124 +msgid "Trust Notebook" +msgstr "" + +#: notebook/templates/notebook.html:127 +msgid "Shutdown this notebook's kernel, and close this window" +msgstr "" + +#: notebook/templates/notebook.html:128 +msgid "Close and Halt" +msgstr "" + +#: notebook/templates/notebook.html:133 +msgid "Cut Cells" +msgstr "" + +#: notebook/templates/notebook.html:134 +msgid "Copy Cells" +msgstr "" + +#: notebook/templates/notebook.html:135 +msgid "Paste Cells Above" +msgstr "" + +#: notebook/templates/notebook.html:136 +msgid "Paste Cells Below" +msgstr "" + +#: notebook/templates/notebook.html:137 +msgid "Paste Cells & Replace" +msgstr "" + +#: notebook/templates/notebook.html:138 +msgid "Delete Cells" +msgstr "" + +#: notebook/templates/notebook.html:139 +msgid "Undo Delete Cells" +msgstr "" + +#: notebook/templates/notebook.html:141 +msgid "Split Cell" +msgstr "" + +#: notebook/templates/notebook.html:142 +msgid "Merge Cell Above" +msgstr "" + +#: notebook/templates/notebook.html:143 +msgid "Merge Cell Below" +msgstr "" + +#: notebook/templates/notebook.html:145 +msgid "Move Cell Up" +msgstr "" + +#: notebook/templates/notebook.html:146 +msgid "Move Cell Down" +msgstr "" + +#: notebook/templates/notebook.html:148 +msgid "Edit Notebook Metadata" +msgstr "" + +#: notebook/templates/notebook.html:150 +msgid "Find and Replace" +msgstr "" + +#: notebook/templates/notebook.html:152 +msgid "Cut Cell Attachments" +msgstr "" + +#: notebook/templates/notebook.html:153 +msgid "Copy Cell Attachments" +msgstr "" + +#: notebook/templates/notebook.html:154 +msgid "Paste Cell Attachments" +msgstr "" + +#: notebook/templates/notebook.html:156 +msgid "Insert Image" +msgstr "" + +#: notebook/templates/notebook.html:166 +msgid "Show/Hide the action icons (below menu bar)" +msgstr "" + +#: notebook/templates/notebook.html:167 +msgid "Toggle Toolbar" +msgstr "" + +#: notebook/templates/notebook.html:170 +msgid "Show/Hide line numbers in cells" +msgstr "" + +#: notebook/templates/notebook.html:174 +msgid "Cell Toolbar" +msgstr "" + +#: notebook/templates/notebook.html:179 +msgid "Insert" +msgstr "" + +#: notebook/templates/notebook.html:182 +msgid "Insert an empty Code cell above the currently active cell" +msgstr "" + +#: notebook/templates/notebook.html:183 +msgid "Insert Cell Above" +msgstr "" + +#: notebook/templates/notebook.html:185 +msgid "Insert an empty Code cell below the currently active cell" +msgstr "" + +#: notebook/templates/notebook.html:186 +msgid "Insert Cell Below" +msgstr "" + +#: notebook/templates/notebook.html:189 +msgid "Cell" +msgstr "" + +#: notebook/templates/notebook.html:191 +msgid "Run this cell, and move cursor to the next one" +msgstr "" + +#: notebook/templates/notebook.html:192 +msgid "Run Cells" +msgstr "" + +#: notebook/templates/notebook.html:193 +msgid "Run this cell, select below" +msgstr "" + +#: notebook/templates/notebook.html:194 +msgid "Run Cells and Select Below" +msgstr "" + +#: notebook/templates/notebook.html:195 +msgid "Run this cell, insert below" +msgstr "" + +#: notebook/templates/notebook.html:196 +msgid "Run Cells and Insert Below" +msgstr "" + +#: notebook/templates/notebook.html:197 +msgid "Run all cells in the notebook" +msgstr "" + +#: notebook/templates/notebook.html:198 +msgid "Run All" +msgstr "" + +#: notebook/templates/notebook.html:199 +msgid "Run all cells above (but not including) this cell" +msgstr "" + +#: notebook/templates/notebook.html:200 +msgid "Run All Above" +msgstr "" + +#: notebook/templates/notebook.html:201 +msgid "Run this cell and all cells below it" +msgstr "" + +#: notebook/templates/notebook.html:202 +msgid "Run All Below" +msgstr "" + +#: notebook/templates/notebook.html:205 +msgid "All cells in the notebook have a cell type. By default, new cells are created as 'Code' cells" +msgstr "" + +#: notebook/templates/notebook.html:206 +msgid "Cell Type" +msgstr "" + +#: notebook/templates/notebook.html:209 +msgid "Contents will be sent to the kernel for execution, and output will display in the footer of cell" +msgstr "" + +#: notebook/templates/notebook.html:212 +msgid "Contents will be rendered as HTML and serve as explanatory text" +msgstr "" + +#: notebook/templates/notebook.html:213 notebook/templates/notebook.html:298 +msgid "Markdown" +msgstr "" + +#: notebook/templates/notebook.html:215 +msgid "Contents will pass through nbconvert unmodified" +msgstr "" + +#: notebook/templates/notebook.html:216 +msgid "Raw NBConvert" +msgstr "" + +#: notebook/templates/notebook.html:220 +msgid "Current Outputs" +msgstr "" + +#: notebook/templates/notebook.html:223 +msgid "Hide/Show the output of the current cell" +msgstr "" + +#: notebook/templates/notebook.html:224 notebook/templates/notebook.html:240 +msgid "Toggle" +msgstr "" + +#: notebook/templates/notebook.html:227 +msgid "Scroll the output of the current cell" +msgstr "" + +#: notebook/templates/notebook.html:228 notebook/templates/notebook.html:244 +msgid "Toggle Scrolling" +msgstr "" + +#: notebook/templates/notebook.html:231 +msgid "Clear the output of the current cell" +msgstr "" + +#: notebook/templates/notebook.html:232 notebook/templates/notebook.html:248 +msgid "Clear" +msgstr "" + +#: notebook/templates/notebook.html:236 +msgid "All Output" +msgstr "" + +#: notebook/templates/notebook.html:239 +msgid "Hide/Show the output of all cells" +msgstr "" + +#: notebook/templates/notebook.html:243 +msgid "Scroll the output of all cells" +msgstr "" + +#: notebook/templates/notebook.html:247 +msgid "Clear the output of all cells" +msgstr "" + +#: notebook/templates/notebook.html:257 +msgid "Send Keyboard Interrupt (CTRL-C) to the Kernel" +msgstr "" + +#: notebook/templates/notebook.html:258 +msgid "Interrupt" +msgstr "" + +#: notebook/templates/notebook.html:261 +msgid "Restart the Kernel" +msgstr "" + +#: notebook/templates/notebook.html:262 +msgid "Restart" +msgstr "" + +#: notebook/templates/notebook.html:265 +msgid "Restart the Kernel and clear all output" +msgstr "" + +#: notebook/templates/notebook.html:266 +msgid "Restart & Clear Output" +msgstr "" + +#: notebook/templates/notebook.html:269 +msgid "Restart the Kernel and re-run the notebook" +msgstr "" + +#: notebook/templates/notebook.html:270 +msgid "Restart & Run All" +msgstr "" + +#: notebook/templates/notebook.html:273 +msgid "Reconnect to the Kernel" +msgstr "" + +#: notebook/templates/notebook.html:274 +msgid "Reconnect" +msgstr "" + +#: notebook/templates/notebook.html:282 +msgid "Change kernel" +msgstr "" + +#: notebook/templates/notebook.html:287 +msgid "Help" +msgstr "" + +#: notebook/templates/notebook.html:290 +msgid "A quick tour of the notebook user interface" +msgstr "" + +#: notebook/templates/notebook.html:290 +msgid "User Interface Tour" +msgstr "" + +#: notebook/templates/notebook.html:291 +msgid "Opens a tooltip with all keyboard shortcuts" +msgstr "" + +#: notebook/templates/notebook.html:291 +msgid "Keyboard Shortcuts" +msgstr "" + +#: notebook/templates/notebook.html:292 +msgid "Opens a dialog allowing you to edit Keyboard shortcuts" +msgstr "" + +#: notebook/templates/notebook.html:292 +msgid "Edit Keyboard Shortcuts" +msgstr "" + +#: notebook/templates/notebook.html:297 +msgid "Notebook Help" +msgstr "" + +#: notebook/templates/notebook.html:303 +msgid "Opens in a new window" +msgstr "" + +#: notebook/templates/notebook.html:319 +msgid "About Kennen Notebook" +msgstr "" + +#: notebook/templates/notebook.html:319 +msgid "About" +msgstr "" + +#: notebook/templates/page.html:114 +msgid "Jupyter Notebook requires JavaScript." +msgstr "" + +#: notebook/templates/page.html:115 +msgid "Please enable it to proceed. " +msgstr "" + +#: notebook/templates/page.html:121 +msgid "dashboard" +msgstr "" + +#: notebook/templates/page.html:132 +msgid "Logout" +msgstr "" + +#: notebook/templates/page.html:134 +msgid "Login" +msgstr "" + +#: notebook/templates/tree.html:23 +msgid "Files" +msgstr "" + +#: notebook/templates/tree.html:24 +msgid "Running" +msgstr "" + +#: notebook/templates/tree.html:25 +msgid "Clusters" +msgstr "" + +#: notebook/templates/tree.html:32 +msgid "Select items to perform actions on them." +msgstr "" + +#: notebook/templates/tree.html:35 +msgid "Duplicate selected" +msgstr "" + +#: notebook/templates/tree.html:35 +msgid "Duplicate" +msgstr "" + +#: notebook/templates/tree.html:36 +msgid "Rename selected" +msgstr "" + +#: notebook/templates/tree.html:37 +msgid "Move selected" +msgstr "" + +#: notebook/templates/tree.html:37 +msgid "Move" +msgstr "" + +#: notebook/templates/tree.html:38 +msgid "Download selected" +msgstr "" + +#: notebook/templates/tree.html:39 +msgid "Shutdown selected notebook(s)" +msgstr "" + +#: notebook/templates/notebook.html:278 +#: notebook/templates/tree.html:39 +msgid "Shutdown" +msgstr "" + +#: notebook/templates/tree.html:40 +msgid "View selected" +msgstr "" + +#: notebook/templates/tree.html:41 +msgid "Edit selected" +msgstr "" + +#: notebook/templates/tree.html:42 +msgid "Delete selected" +msgstr "" + +#: notebook/templates/tree.html:50 +msgid "Click to browse for a file to upload." +msgstr "" + +#: notebook/templates/tree.html:51 +msgid "Upload" +msgstr "" + +#: notebook/templates/tree.html:65 +msgid "Text File" +msgstr "" + +#: notebook/templates/tree.html:68 +msgid "Folder" +msgstr "" + +#: notebook/templates/tree.html:72 +msgid "Terminal" +msgstr "" + +#: notebook/templates/tree.html:76 +msgid "Terminals Unavailable" +msgstr "" + +#: notebook/templates/tree.html:82 +msgid "Refresh notebook list" +msgstr "" + +#: notebook/templates/tree.html:90 +msgid "Select All / None" +msgstr "" + +#: notebook/templates/tree.html:93 +msgid "Select..." +msgstr "" + +#: notebook/templates/tree.html:98 +msgid "Select All Folders" +msgstr "" + +#: notebook/templates/tree.html:98 +msgid " Folders" +msgstr "" + +#: notebook/templates/tree.html:99 +msgid "Select All Notebooks" +msgstr "" + +#: notebook/templates/tree.html:99 +msgid " All Notebooks" +msgstr "" + +#: notebook/templates/tree.html:100 +msgid "Select Running Notebooks" +msgstr "" + +#: notebook/templates/tree.html:100 +msgid " Running" +msgstr "" + +#: notebook/templates/tree.html:101 +msgid "Select All Files" +msgstr "" + +#: notebook/templates/tree.html:101 +msgid " Files" +msgstr "" + +#: notebook/templates/tree.html:114 +msgid "Last Modified" +msgstr "" + +#: notebook/templates/tree.html:120 +msgid "Name" +msgstr "" + +#: notebook/templates/tree.html:130 +msgid "Currently running Kennen processes" +msgstr "" + +#: notebook/templates/tree.html:134 +msgid "Refresh running list" +msgstr "" + +#: notebook/templates/tree.html:150 +msgid "There are no terminals running." +msgstr "" + +#: notebook/templates/tree.html:152 +msgid "Terminals are unavailable." +msgstr "" + +#: notebook/templates/tree.html:162 +msgid "Notebooks" +msgstr "" + +#: notebook/templates/tree.html:169 +msgid "There are no notebooks running." +msgstr "" + +#: notebook/templates/tree.html:178 +msgid "Clusters tab is now provided by IPython parallel." +msgstr "" + +#: notebook/templates/tree.html:179 +msgid "See 'IPython parallel' for installation details." +msgstr "" diff --git a/server/jupyter_server/i18n/notebook.pot b/server/jupyter_server/i18n/notebook.pot new file mode 100644 index 0000000..272e514 --- /dev/null +++ b/server/jupyter_server/i18n/notebook.pot @@ -0,0 +1,442 @@ +# Translations template for Jupyter. +# Copyright (C) 2017 ORGANIZATION +# This file is distributed under the same license as the Jupyter project. +# FIRST AUTHOR , 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Kennen VERSION\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2017-07-08 21:52-0500\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.3.4\n" + +#: jupyter_server/serverapp.py:53 +msgid "The Kennen Server requires tornado >= 4.0" +msgstr "" + +#: jupyter_server/serverapp.py:57 +msgid "The Kennen Server requires tornado >= 4.0, but you have < 1.1.0" +msgstr "" + +#: jupyter_server/serverapp.py:59 +#, python-format +msgid "The Kennen Server requires tornado >= 4.0, but you have %s" +msgstr "" + +#: jupyter_server/serverapp.py:389 +msgid "List currently running Kennen servers." +msgstr "" + +#: jupyter_server/serverapp.py:393 +msgid "Produce machine-readable JSON output." +msgstr "" + +#: jupyter_server/serverapp.py:397 +msgid "If True, each line of output will be a JSON object with the details from the server info file." +msgstr "" + +#: jupyter_server/serverapp.py:402 +msgid "Currently running servers:" +msgstr "" + +#: jupyter_server/serverapp.py:419 +msgid "Don't open the kennen_server in a browser after startup." +msgstr "" + +#: jupyter_server/serverapp.py:423 +msgid "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." +msgstr "" + +#: jupyter_server/serverapp.py:439 +msgid "Allow the server to be run from root user." +msgstr "" + +#: jupyter_server/serverapp.py:470 +msgid "" +"The Kennen Server.\n" +" \n" +" This launches a Tornado-based Kennen Server." +msgstr "" + +#: jupyter_server/serverapp.py:540 +msgid "Set the Access-Control-Allow-Credentials: true header" +msgstr "" + +#: jupyter_server/serverapp.py:544 +msgid "Whether to allow the user to run the Jupyter server as root." +msgstr "" + +#: jupyter_server/serverapp.py:548 +msgid "The default URL to redirect to from `/`" +msgstr "" + +#: jupyter_server/serverapp.py:552 +msgid "The IP address the Jupyter server will listen on." +msgstr "" + +#: jupyter_server/serverapp.py:565 +#, python-format +msgid "" +"Cannot bind to localhost, using 127.0.0.1 as default ip\n" +"%s" +msgstr "" + +#: jupyter_server/serverapp.py:579 +msgid "The port the Kennen server will listen on." +msgstr "" + +#: jupyter_server/serverapp.py:583 +msgid "The number of additional ports to try if the specified port is not available." +msgstr "" + +#: jupyter_server/serverapp.py:587 +msgid "The full path to an SSL/TLS certificate file." +msgstr "" + +#: jupyter_server/serverapp.py:591 +msgid "The full path to a private key file for usage with SSL/TLS." +msgstr "" + +#: jupyter_server/serverapp.py:595 +msgid "The full path to a certificate authority certificate for SSL/TLS client authentication." +msgstr "" + +#: jupyter_server/serverapp.py:599 +msgid "The file where the cookie secret is stored." +msgstr "" + +#: jupyter_server/serverapp.py:628 +#, python-format +msgid "Writing Kennen server cookie secret to %s" +msgstr "" + +#: jupyter_server/serverapp.py:635 +#, python-format +msgid "Could not set permissions on %s" +msgstr "" + +#: jupyter_server/serverapp.py:640 +msgid "" +"Token used for authenticating first-time connections to the server.\n" +"\n" +" When no password is enabled,\n" +" the default is to generate a new, random token.\n" +"\n" +" Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED.\n" +" " +msgstr "" + +#: jupyter_server/serverapp.py:650 +msgid "" +"One-time token used for opening a browser.\n" +" Once used, this token cannot be used again.\n" +" " +msgstr "" + +#: jupyter_server/serverapp.py:726 +msgid "" +"Specify Where to open the server on startup. This is the\n" +" `new` argument passed to the standard library method `webbrowser.open`.\n" +" The behaviour is not guaranteed, but depends on browser support. Valid\n" +" values are:\n" +" 2 opens a new tab,\n" +" 1 opens a new window,\n" +" 0 opens in an existing window.\n" +" See the `webbrowser.open` documentation for details.\n" +" " +msgstr "" + +#: jupyter_server/serverapp.py:742 +msgid "" +"\n" +" webapp_settings is deprecated, use tornado_settings.\n" +msgstr "" + +#: jupyter_server/serverapp.py:746 +msgid "Supply overrides for the tornado.web.Application that the Jupyter server uses." +msgstr "" + +#: jupyter_server/serverapp.py:750 +msgid "" +"\n" +" Set the tornado compression options for websocket connections.\n" +"\n" +" This value will be returned from :meth:`WebSocketHandler.get_compression_options`.\n" +" None (default) will disable compression.\n" +" A dict (even an empty one) will enable compression.\n" +"\n" +" See the tornado docs for WebSocketHandler.get_compression_options for details.\n" +" " +msgstr "" + +#: jupyter_server/serverapp.py:761 +msgid "Supply overrides for terminado. Currently only supports \"shell_command\"." +msgstr "" + +#: jupyter_server/serverapp.py:764 +msgid "Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details." +msgstr "" + +#: jupyter_server/serverapp.py:768 +msgid "" +"Supply SSL options for the tornado HTTPServer.\n" +" See the tornado docs for details." +msgstr "" + +#: jupyter_server/serverapp.py:772 +msgid "Supply extra arguments that will be passed to Jinja environment." +msgstr "" + +#: jupyter_server/serverapp.py:776 +msgid "Extra variables to supply to jinja templates when rendering." +msgstr "" + +#: jupyter_server/serverapp.py:816 +msgid "base_project_url is deprecated, use base_url" +msgstr "" + +#: jupyter_server/serverapp.py:832 +msgid "Path to search for custom.js, css" +msgstr "" + +#: jupyter_server/serverapp.py:844 +msgid "" +"Extra paths to search for serving jinja templates.\n" +"\n" +" Can be used to override templates from kennen_server.templates." +msgstr "" + +#: jupyter_server/serverapp.py:900 +#, python-format +msgid "Using MathJax: %s" +msgstr "" + +#: jupyter_server/serverapp.py:903 +msgid "The MathJax.js configuration file that is to be used." +msgstr "" + +#: jupyter_server/serverapp.py:908 +#, python-format +msgid "Using MathJax configuration file: %s" +msgstr "" + +#: jupyter_server/serverapp.py:920 +msgid "The kernel manager class to use." +msgstr "" + +#: jupyter_server/serverapp.py:926 +msgid "The session manager class to use." +msgstr "" + +#: jupyter_server/serverapp.py:932 +msgid "The config manager class to use" +msgstr "" + +#: jupyter_server/serverapp.py:953 +msgid "The login handler class to use." +msgstr "" + +#: jupyter_server/serverapp.py:960 +msgid "The logout handler class to use." +msgstr "" + +#: jupyter_server/serverapp.py:964 +msgid "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL" +msgstr "" + +#: jupyter_server/serverapp.py:976 +msgid "" +"\n" +" DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.\n" +" " +msgstr "" + +#: jupyter_server/serverapp.py:988 +msgid "Support for specifying --pylab on the command line has been removed." +msgstr "" + +#: jupyter_server/serverapp.py:990 +msgid "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself." +msgstr "" + +#: jupyter_server/serverapp.py:995 +msgid "The directory to use for notebooks and kernels." +msgstr "" + +#: jupyter_server/serverapp.py:1018 +#, python-format +msgid "No such notebook dir: '%r'" +msgstr "" + +#: jupyter_server/serverapp.py:1036 +msgid "server_extensions is deprecated, use jpserver_extensions" +msgstr "" + +#: jupyter_server/serverapp.py:1040 +msgid "Dict of Python modules to load as notebook server extensions. Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order." +msgstr "" + +#: jupyter_server/serverapp.py:1049 +msgid "Reraise exceptions encountered loading server extensions?" +msgstr "" + +#: jupyter_server/serverapp.py:1052 +msgid "" +"(msgs/sec)\n" +" Maximum rate at which messages can be sent on iopub before they are\n" +" limited." +msgstr "" + +#: jupyter_server/serverapp.py:1056 +msgid "" +"(bytes/sec)\n" +" Maximum rate at which stream output can be sent on iopub before they are\n" +" limited." +msgstr "" + +#: jupyter_server/serverapp.py:1060 +msgid "" +"(sec) Time window used to \n" +" check the message and data rate limits." +msgstr "" + +#: jupyter_server/serverapp.py:1071 +#, python-format +msgid "No such file or directory: %s" +msgstr "" + +#: jupyter_server/serverapp.py:1141 +msgid "Notebook servers are configured to only be run with a password." +msgstr "" + +#: jupyter_server/serverapp.py:1142 +msgid "Hint: run the following command to set a password" +msgstr "" + +#: jupyter_server/serverapp.py:1143 +msgid "\t$ python -m jupyter_server.auth password" +msgstr "" + +#: jupyter_server/serverapp.py:1181 +#, python-format +msgid "The port %i is already in use, trying another port." +msgstr "" + +#: jupyter_server/serverapp.py:1184 +#, python-format +msgid "Permission to listen on port %i denied" +msgstr "" + +#: jupyter_server/serverapp.py:1193 +msgid "ERROR: the Jupyter server could not be started because no available port could be found." +msgstr "" + +#: jupyter_server/serverapp.py:1199 +msgid "[all ip addresses on your system]" +msgstr "" + +#: jupyter_server/serverapp.py:1223 +#, python-format +msgid "Terminals not available (error was %s)" +msgstr "" + +#: jupyter_server/serverapp.py:1259 +msgid "interrupted" +msgstr "" + +#: jupyter_server/serverapp.py:1261 +msgid "y" +msgstr "" + +#: jupyter_server/serverapp.py:1262 +msgid "n" +msgstr "" + +#: jupyter_server/serverapp.py:1263 +#, python-format +msgid "Shutdown this notebook server (%s/[%s])? " +msgstr "" + +#: jupyter_server/serverapp.py:1269 +msgid "Shutdown confirmed" +msgstr "" + +#: jupyter_server/serverapp.py:1273 +msgid "No answer for 5s:" +msgstr "" + +#: jupyter_server/serverapp.py:1274 +msgid "resuming operation..." +msgstr "" + +#: jupyter_server/serverapp.py:1282 +#, python-format +msgid "received signal %s, stopping" +msgstr "" + +#: jupyter_server/serverapp.py:1338 +#, python-format +msgid "Error loading server extension %s" +msgstr "" + +#: jupyter_server/serverapp.py:1369 +#, python-format +msgid "Shutting down %d kernels" +msgstr "" + +#: jupyter_server/serverapp.py:1375 +#, python-format +msgid "%d active kernel" +msgid_plural "%d active kernels" +msgstr[0] "" +msgstr[1] "" + +#: jupyter_server/serverapp.py:1379 +#, python-format +msgid "" +"The Jupyter Notebook is running at:\n" +"\r" +"%s" +msgstr "" + +#: jupyter_server/serverapp.py:1426 +msgid "Running as root is not recommended. Use --allow-root to bypass." +msgstr "" + +#: jupyter_server/serverapp.py:1432 +msgid "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." +msgstr "" + +#: jupyter_server/serverapp.py:1434 +msgid "Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at http://jupyter.org/community.html." +msgstr "" + +#: jupyter_server/serverapp.py:1445 +#, python-format +msgid "No web browser found: %s." +msgstr "" + +#: jupyter_server/serverapp.py:1450 +#, python-format +msgid "%s does not exist" +msgstr "" + +#: jupyter_server/serverapp.py:1484 +msgid "Interrupted..." +msgstr "" + +#: jupyter_server/services/contents/filemanager.py:506 +#, python-format +msgid "Serving notebooks from local directory: %s" +msgstr "" + +#: jupyter_server/services/contents/manager.py:68 +msgid "Untitled" +msgstr "" diff --git a/server/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po b/server/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po new file mode 100644 index 0000000..a8ccac2 --- /dev/null +++ b/server/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po @@ -0,0 +1,731 @@ +# Translations template for Jupyter. +# Copyright (C) 2017 ORGANIZATION +# This file is distributed under the same license as the Jupyter project. +# FIRST AUTHOR , 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Kennen VERSION\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2017-08-25 02:53-0400\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.5.0\n" + +#: notebook/templates/404.html:3 +msgid "You are requesting a page that does not exist!" +msgstr "请求的代码不存在!" + +#: notebook/templates/edit.html:37 +msgid "current mode" +msgstr "当前模式" + +#: notebook/templates/edit.html:48 notebook/templates/notebook.html:78 +msgid "File" +msgstr "文件" + +#: notebook/templates/edit.html:50 notebook/templates/tree.html:57 +msgid "New" +msgstr "新建" + +#: notebook/templates/edit.html:51 +msgid "Save" +msgstr "保存" + +#: notebook/templates/edit.html:52 notebook/templates/tree.html:36 +msgid "Rename" +msgstr "重命名" + +#: notebook/templates/edit.html:53 notebook/templates/tree.html:38 +msgid "Download" +msgstr "下载" + +#: notebook/templates/edit.html:56 notebook/templates/notebook.html:131 +#: notebook/templates/tree.html:41 +msgid "Edit" +msgstr "编辑" + +#: notebook/templates/edit.html:58 +msgid "Find" +msgstr "查找" + +#: notebook/templates/edit.html:59 +msgid "Find & Replace" +msgstr "查找 & 替换" + +#: notebook/templates/edit.html:61 +msgid "Key Map" +msgstr "键值对" + +#: notebook/templates/edit.html:62 +msgid "Default" +msgstr "默认" + +#: notebook/templates/edit.html:63 +msgid "Sublime Text" +msgstr "代码编辑器" + +#: notebook/templates/edit.html:68 notebook/templates/notebook.html:159 +#: notebook/templates/tree.html:40 +msgid "View" +msgstr "查看" + +#: notebook/templates/edit.html:70 notebook/templates/notebook.html:162 +msgid "Show/Hide the logo and notebook title (above menu bar)" +msgstr "显示/隐藏 标题和logo" + +#: notebook/templates/edit.html:71 notebook/templates/notebook.html:163 +msgid "Toggle Header" +msgstr "切换Header" + +#: notebook/templates/edit.html:72 notebook/templates/notebook.html:171 +msgid "Toggle Line Numbers" +msgstr "切换行号" + +#: notebook/templates/edit.html:75 +msgid "Language" +msgstr "语言" + +#: notebook/templates/error.html:23 +msgid "The error was:" +msgstr "错误:" + +#: notebook/templates/login.html:24 +msgid "Password or token:" +msgstr "密码或者token:" + +#: notebook/templates/login.html:26 +msgid "Password:" +msgstr "密码:" + +#: notebook/templates/login.html:31 +msgid "Log in" +msgstr "登录" + +#: notebook/templates/login.html:39 +msgid "No login available, you shouldn't be seeing this page." +msgstr "还没有登录, 请先登录." + +#: notebook/templates/logout.html:31 +#, python-format +msgid "Proceed to the dashboard" +msgstr "进入 指示板" + +#: notebook/templates/logout.html:33 +#, python-format +msgid "Proceed to the login page" +msgstr "进入 登录页面" + +#: notebook/templates/notebook.html:62 +msgid "Menu" +msgstr "菜单" + +#: notebook/templates/notebook.html:65 notebook/templates/notebook.html:254 +msgid "Kernel" +msgstr "服务" + +#: notebook/templates/notebook.html:68 +msgid "This notebook is read-only" +msgstr "这个代码是只读的" + +#: notebook/templates/notebook.html:81 +msgid "New Notebook" +msgstr "新建代码" + +#: notebook/templates/notebook.html:85 +msgid "Opens a new window with the Dashboard view" +msgstr "以仪表盘视角打开新的窗口" + +#: notebook/templates/notebook.html:86 +msgid "Open..." +msgstr "打开..." + +#: notebook/templates/notebook.html:90 +msgid "Open a copy of this notebook's contents and start a new kernel" +msgstr "打开代码内容的副本并启动一个新的服务" + +#: notebook/templates/notebook.html:91 +msgid "Make a Copy..." +msgstr "复制..." + +#: notebook/templates/notebook.html:92 +msgid "Rename..." +msgstr "重命名..." + +#: notebook/templates/notebook.html:93 +msgid "Save and Checkpoint" +msgstr "保存" + +#: notebook/templates/notebook.html:96 +msgid "Revert to Checkpoint" +msgstr "恢复" + +#: notebook/templates/notebook.html:106 +msgid "Print Preview" +msgstr "打印预览" + +#: notebook/templates/notebook.html:107 +msgid "Download as" +msgstr "下载" + +#: notebook/templates/notebook.html:109 +msgid "Notebook (.ipynb)" +msgstr "代码(.ipynb)" + +#: notebook/templates/notebook.html:110 +msgid "Script" +msgstr "脚本" + +#: notebook/templates/notebook.html:111 +msgid "HTML (.html)" +msgstr "" + +#: notebook/templates/notebook.html:112 +msgid "Markdown (.md)" +msgstr "" + +#: notebook/templates/notebook.html:113 +msgid "reST (.rst)" +msgstr "" + +#: notebook/templates/notebook.html:114 +msgid "LaTeX (.tex)" +msgstr "" + +#: notebook/templates/notebook.html:115 +msgid "PDF via LaTeX (.pdf)" +msgstr "" + +#: notebook/templates/notebook.html:118 +msgid "Deploy as" +msgstr "部署在" + +#: notebook/templates/notebook.html:123 +msgid "Trust the output of this notebook" +msgstr "信任代码的输出" + +#: notebook/templates/notebook.html:124 +msgid "Trust Notebook" +msgstr "信任代码" + +#: notebook/templates/notebook.html:127 +msgid "Shutdown this notebook's kernel, and close this window" +msgstr "关闭代码服务并关闭窗口" + +#: notebook/templates/notebook.html:128 +msgid "Close and Halt" +msgstr "关闭" + +#: notebook/templates/notebook.html:133 +msgid "Cut Cells" +msgstr "剪切代码块" + +#: notebook/templates/notebook.html:134 +msgid "Copy Cells" +msgstr "复制代码块" + +#: notebook/templates/notebook.html:135 +msgid "Paste Cells Above" +msgstr "粘贴到上面" + +#: notebook/templates/notebook.html:136 +msgid "Paste Cells Below" +msgstr "粘贴到下面" + +#: notebook/templates/notebook.html:137 +msgid "Paste Cells & Replace" +msgstr "粘贴代码块 & 替换" + +#: notebook/templates/notebook.html:138 +msgid "Delete Cells" +msgstr "删除代码块" + +#: notebook/templates/notebook.html:139 +msgid "Undo Delete Cells" +msgstr "撤销删除" + +#: notebook/templates/notebook.html:141 +msgid "Split Cell" +msgstr "分割代码块" + +#: notebook/templates/notebook.html:142 +msgid "Merge Cell Above" +msgstr "合并上面的代码块" + +#: notebook/templates/notebook.html:143 +msgid "Merge Cell Below" +msgstr "合并下面的代码块" + +#: notebook/templates/notebook.html:145 +msgid "Move Cell Up" +msgstr "上移代码块" + +#: notebook/templates/notebook.html:146 +msgid "Move Cell Down" +msgstr "下移代码块" + +#: notebook/templates/notebook.html:148 +msgid "Edit Notebook Metadata" +msgstr "编辑界面元数据" + +#: notebook/templates/notebook.html:150 +msgid "Find and Replace" +msgstr "查找并替换" + +#: notebook/templates/notebook.html:152 +msgid "Cut Cell Attachments" +msgstr "剪切附件" + +#: notebook/templates/notebook.html:153 +msgid "Copy Cell Attachments" +msgstr "复制附件" + +#: notebook/templates/notebook.html:154 +msgid "Paste Cell Attachments" +msgstr "粘贴附件" + +#: notebook/templates/notebook.html:156 +msgid "Insert Image" +msgstr "插入图片" + +#: notebook/templates/notebook.html:166 +msgid "Show/Hide the action icons (below menu bar)" +msgstr "显示/隐藏 操作图标" + +#: notebook/templates/notebook.html:167 +msgid "Toggle Toolbar" +msgstr "" + +#: notebook/templates/notebook.html:170 +msgid "Show/Hide line numbers in cells" +msgstr "显示/隐藏行号" + +#: notebook/templates/notebook.html:174 +msgid "Cell Toolbar" +msgstr "单元格工具栏" + +#: notebook/templates/notebook.html:179 +msgid "Insert" +msgstr "插入" + +#: notebook/templates/notebook.html:182 +msgid "Insert an empty Code cell above the currently active cell" +msgstr "在当前活动单元上插入一个空的代码单元格" + +#: notebook/templates/notebook.html:183 +msgid "Insert Cell Above" +msgstr "插入单元格上面" + +#: notebook/templates/notebook.html:185 +msgid "Insert an empty Code cell below the currently active cell" +msgstr "在当前活动单元下面插入一个空的代码单元格" + +#: notebook/templates/notebook.html:186 +msgid "Insert Cell Below" +msgstr "插入单元格下面" + +#: notebook/templates/notebook.html:189 +msgid "Cell" +msgstr "单元格" + +#: notebook/templates/notebook.html:191 +msgid "Run this cell, and move cursor to the next one" +msgstr "运行这个单元格,并将光标移到下一个" + +#: notebook/templates/notebook.html:192 +msgid "Run Cells" +msgstr "运行所有单元格" + +#: notebook/templates/notebook.html:193 +msgid "Run this cell, select below" +msgstr "运行此单元,选择以下选项" + +#: notebook/templates/notebook.html:194 +msgid "Run Cells and Select Below" +msgstr "运行单元格并自动选择下一个" + +#: notebook/templates/notebook.html:195 +msgid "Run this cell, insert below" +msgstr "运行单元格并选择以下" + +#: notebook/templates/notebook.html:196 +msgid "Run Cells and Insert Below" +msgstr "运行单元格并在下面插入" + +#: notebook/templates/notebook.html:197 +msgid "Run all cells in the notebook" +msgstr "运行所有的单元格" + +#: notebook/templates/notebook.html:198 +msgid "Run All" +msgstr "运行所有" + +#: notebook/templates/notebook.html:199 +msgid "Run all cells above (but not including) this cell" +msgstr "运行上面的所有单元(但不包括)这个单元格" + +#: notebook/templates/notebook.html:200 +msgid "Run All Above" +msgstr "运行上面的代码块" + +#: notebook/templates/notebook.html:201 +msgid "Run this cell and all cells below it" +msgstr "运行当前及以下代码块" + +#: notebook/templates/notebook.html:202 +msgid "Run All Below" +msgstr "运行下面的代码块" + +#: notebook/templates/notebook.html:205 +msgid "All cells in the notebook have a cell type. By default, new cells are created as 'Code' cells" +msgstr "代码里的所有单元格都有一个类型. 默认情况下, 新单元被创建为'Code'单元格" + +#: notebook/templates/notebook.html:206 +msgid "Cell Type" +msgstr "单元格类型" + +#: notebook/templates/notebook.html:209 +msgid "Contents will be sent to the kernel for execution, and output will display in the footer of cell" +msgstr "内容将被发送到内核以执行, 输出将显示在单元格的页脚." + +#: notebook/templates/notebook.html:212 +msgid "Contents will be rendered as HTML and serve as explanatory text" +msgstr "内容将以HTML形式呈现, 并作为解释性文本" + +#: notebook/templates/notebook.html:213 notebook/templates/notebook.html:298 +msgid "Markdown" +msgstr "标签" + +#: notebook/templates/notebook.html:215 +msgid "Contents will pass through nbconvert unmodified" +msgstr "内容将通过未经修改的nbconvert" + +#: notebook/templates/notebook.html:216 +msgid "Raw NBConvert" +msgstr "原生 NBConvert" + +#: notebook/templates/notebook.html:220 +msgid "Current Outputs" +msgstr "当前输出" + +#: notebook/templates/notebook.html:223 +msgid "Hide/Show the output of the current cell" +msgstr "隐藏/显示当前单元格输出" + +#: notebook/templates/notebook.html:224 notebook/templates/notebook.html:240 +msgid "Toggle" +msgstr "切换" + +#: notebook/templates/notebook.html:227 +msgid "Scroll the output of the current cell" +msgstr "滚动当前单元格的输出" + +#: notebook/templates/notebook.html:228 notebook/templates/notebook.html:244 +msgid "Toggle Scrolling" +msgstr "切换滚动" + +#: notebook/templates/notebook.html:231 +msgid "Clear the output of the current cell" +msgstr "清除当前单元格的输出" + +#: notebook/templates/notebook.html:232 notebook/templates/notebook.html:248 +msgid "Clear" +msgstr "清空" + +#: notebook/templates/notebook.html:236 +msgid "All Output" +msgstr "所有输出" + +#: notebook/templates/notebook.html:239 +msgid "Hide/Show the output of all cells" +msgstr "隐藏/显示 所有代码块的输出" + +#: notebook/templates/notebook.html:243 +msgid "Scroll the output of all cells" +msgstr "滚动所有单元格的输出" + +#: notebook/templates/notebook.html:247 +msgid "Clear the output of all cells" +msgstr "清空所有代码块的输出" + +#: notebook/templates/notebook.html:257 +msgid "Send Keyboard Interrupt (CTRL-C) to the Kernel" +msgstr "按下CTRL-C 中断服务" + +#: notebook/templates/notebook.html:258 +msgid "Interrupt" +msgstr "中断" + +#: notebook/templates/notebook.html:261 +msgid "Restart the Kernel" +msgstr "重启服务" + +#: notebook/templates/notebook.html:262 +msgid "Restart" +msgstr "重启" + +#: notebook/templates/notebook.html:265 +msgid "Restart the Kernel and clear all output" +msgstr "重启服务并清空所有输出" + +#: notebook/templates/notebook.html:266 +msgid "Restart & Clear Output" +msgstr "重启 & 清空输出" + +#: notebook/templates/notebook.html:269 +msgid "Restart the Kernel and re-run the notebook" +msgstr "重启服务并且重新运行代码" + +#: notebook/templates/notebook.html:270 +msgid "Restart & Run All" +msgstr "重启 & 运行所有" + +#: notebook/templates/notebook.html:273 +msgid "Reconnect to the Kernel" +msgstr "重新连接服务" + +#: notebook/templates/notebook.html:274 +msgid "Reconnect" +msgstr "重连" + +#: notebook/templates/notebook.html:282 +msgid "Change kernel" +msgstr "改变服务" + +#: notebook/templates/notebook.html:287 +msgid "Help" +msgstr "帮助" + +#: notebook/templates/notebook.html:290 +msgid "A quick tour of the notebook user interface" +msgstr "快速浏览一下notebook用户界面" + +#: notebook/templates/notebook.html:290 +msgid "User Interface Tour" +msgstr "用户界面之旅" + +#: notebook/templates/notebook.html:291 +msgid "Opens a tooltip with all keyboard shortcuts" +msgstr "打开所有快捷键提示信息" + +#: notebook/templates/notebook.html:291 +msgid "Keyboard Shortcuts" +msgstr "快捷键" + +#: notebook/templates/notebook.html:292 +msgid "Opens a dialog allowing you to edit Keyboard shortcuts" +msgstr "打开对话框编辑快捷键" + +#: notebook/templates/notebook.html:292 +msgid "Edit Keyboard Shortcuts" +msgstr "编辑快捷键" + +#: notebook/templates/notebook.html:297 +msgid "Notebook Help" +msgstr "帮助" + +#: notebook/templates/notebook.html:303 +msgid "Opens in a new window" +msgstr "在新窗口打开" + +#: notebook/templates/notebook.html:319 +msgid "About Kennen Notebook" +msgstr "关于本程序" + +#: notebook/templates/notebook.html:319 +msgid "About" +msgstr "关于" + +#: notebook/templates/page.html:114 +msgid "Jupyter Notebook requires JavaScript." +msgstr "Jupyter Notebook需要的JavaScript." + +#: notebook/templates/page.html:115 +msgid "Please enable it to proceed. " +msgstr "请允许它继续." + +#: notebook/templates/page.html:122 +msgid "dashboard" +msgstr "指示板" + +#: notebook/templates/page.html:135 +msgid "Logout" +msgstr "注销" + +#: notebook/templates/page.html:137 +msgid "Login" +msgstr "登录" + +#: notebook/templates/tree.html:23 +msgid "Files" +msgstr "文件" + +#: notebook/templates/tree.html:24 +msgid "Running" +msgstr "运行" + +#: notebook/templates/tree.html:25 +msgid "Clusters" +msgstr "集群" + +#: notebook/templates/tree.html:32 +msgid "Select items to perform actions on them." +msgstr "选择操作对象." + +#: notebook/templates/tree.html:35 +msgid "Duplicate selected" +msgstr "复制选择的对象" + +#: notebook/templates/tree.html:35 +msgid "Duplicate" +msgstr "复制" + +#: notebook/templates/tree.html:36 +msgid "Rename selected" +msgstr "重命名" + +#: notebook/templates/tree.html:37 +msgid "Move selected" +msgstr "移动" + +#: notebook/templates/tree.html:37 +msgid "Move" +msgstr "移动" + +#: notebook/templates/tree.html:38 +msgid "Download selected" +msgstr "下载" + +#: notebook/templates/tree.html:39 +msgid "Shutdown selected notebook(s)" +msgstr "停止运行选择的notebook(s)" + +#: notebook/templates/notebook.html:278 +#: notebook/templates/tree.html:39 +msgid "Shutdown" +msgstr "关闭" + +#: notebook/templates/tree.html:40 +msgid "View selected" +msgstr "查看" + +#: notebook/templates/tree.html:41 +msgid "Edit selected" +msgstr "编辑" + +#: notebook/templates/tree.html:42 +msgid "Delete selected" +msgstr "删除" + +#: notebook/templates/tree.html:50 +msgid "Click to browse for a file to upload." +msgstr "点击浏览文件上传" + +#: notebook/templates/tree.html:51 +msgid "Upload" +msgstr "上传" + +#: notebook/templates/tree.html:65 +msgid "Text File" +msgstr "文本文件" + +#: notebook/templates/tree.html:68 +msgid "Folder" +msgstr "文件夹" + +#: notebook/templates/tree.html:72 +msgid "Terminal" +msgstr "终端" + +#: notebook/templates/tree.html:76 +msgid "Terminals Unavailable" +msgstr "终端不可用" + +#: notebook/templates/tree.html:82 +msgid "Refresh notebook list" +msgstr "刷新笔记列表" + +#: notebook/templates/tree.html:90 +msgid "Select All / None" +msgstr "全选 / 全部选" + +#: notebook/templates/tree.html:93 +msgid "Select..." +msgstr "选择..." + +#: notebook/templates/tree.html:98 +msgid "Select All Folders" +msgstr "选择所有文件夹" + +#: notebook/templates/tree.html:98 +msgid " Folders" +msgstr "文件夹" + +#: notebook/templates/tree.html:99 +msgid "Select All Notebooks" +msgstr "选择所有笔记" + +#: notebook/templates/tree.html:99 +msgid " All Notebooks" +msgstr "所有笔记" + +#: notebook/templates/tree.html:100 +msgid "Select Running Notebooks" +msgstr "选择运行中的笔记" + +#: notebook/templates/tree.html:100 +msgid " Running" +msgstr "运行" + +#: notebook/templates/tree.html:101 +msgid "Select All Files" +msgstr "选择所有文件" + +#: notebook/templates/tree.html:101 +msgid " Files" +msgstr "文件" + +#: notebook/templates/tree.html:114 +msgid "Last Modified" +msgstr "最后修改" + +#: notebook/templates/tree.html:120 +msgid "Name" +msgstr "名字" + +#: notebook/templates/tree.html:130 +msgid "Currently running Jupyter processes" +msgstr "当前运行Jupyter" + +#: notebook/templates/tree.html:134 +msgid "Refresh running list" +msgstr "刷新运行列表" + +#: notebook/templates/tree.html:150 +msgid "There are no terminals running." +msgstr "没有终端运行" + +#: notebook/templates/tree.html:152 +msgid "Terminals are unavailable." +msgstr "终端不可用" + +#: notebook/templates/tree.html:162 +msgid "Notebooks" +msgstr "笔记" + +#: notebook/templates/tree.html:169 +msgid "There are no notebooks running." +msgstr "没有笔记正在运行" + +#: notebook/templates/tree.html:178 +msgid "Clusters tab is now provided by IPython parallel." +msgstr "集群标签现在由IPython并行提供." + +#: notebook/templates/tree.html:179 +msgid "See 'IPython parallel' for installation details." +msgstr "安装细节查看 'IPython parallel'." diff --git a/server/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po b/server/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po new file mode 100644 index 0000000..f09d563 --- /dev/null +++ b/server/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po @@ -0,0 +1,446 @@ +# Translations template for Jupyter. +# Copyright (C) 2017 ORGANIZATION +# This file is distributed under the same license as the Jupyter project. +# FIRST AUTHOR , 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: Kennen VERSION\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2017-08-25 02:53-0400\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.5.0\n" + +#: notebook/serverapp.py:49 +msgid "The Kennen Notebook requires tornado >= 4.0" +msgstr "该程序要求 tornado 版本 >= 4.0" + +#: notebook/serverapp.py:53 +msgid "The Kennen Notebook requires tornado >= 4.0, but you have < 1.1.0" +msgstr "该程序要求 tornado 版本 >= 4.0, 可是现实却是 < 1.1.0" + +#: notebook/serverapp.py:55 +#, python-format +msgid "The Kennen Notebook requires tornado >= 4.0, but you have %s" +msgstr "该程序要求 tornado 版本 >= 4.0, 可是现实却是 %s" + +#: notebook/serverapp.py:206 +#, python-format +msgid "Alternatively use `%s` when working on the notebook's Javascript and LESS" +msgstr "在使用notebook的JavaScript和LESS时,可以替换使用 `%s` " + +#: notebook/serverapp.py:385 +msgid "List currently running notebook servers." +msgstr "列出当前运行的Notebook服务." + +#: notebook/serverapp.py:389 +msgid "Produce machine-readable JSON list output." +msgstr "生成机器可读的JSON输出." + +#: notebook/serverapp.py:391 +msgid "Produce machine-readable JSON object on each line of output." +msgstr "当前运行的服务" + +#: notebook/serverapp.py:395 +msgid "If True, the output will be a JSON list of objects, one per active notebook server, each with the details from the relevant server info file." +msgstr "如果是正确的,输出将是一个对象的JSON列表,一个活动的笔记本服务器,每一个都有相关的服务器信息文件的详细信息。" + +#: notebook/serverapp.py:399 +msgid "If True, each line of output will be a JSON object with the details from the server info file. For a JSON list output, see the NbserverListApp.jsonlist configuration value" +msgstr "如果是正确的,每一行输出将是一个JSON对象,其中有来自服务器信息文件的详细信息。对于一个JSON列表输出,请参阅NbserverListApp。jsonlist配置值" + +#: notebook/serverapp.py:425 +msgid "Don't open the notebook in a browser after startup." +msgstr "在启动服务以后不在浏览器中打开一个窗口." + +#: notebook/serverapp.py:429 +msgid "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." +msgstr "" + +#: notebook/serverapp.py:445 +msgid "Allow the notebook to be run from root user." +msgstr "允许notebook在root用户下运行." + +#: notebook/serverapp.py:476 +msgid "" +"The Kennen HTML Notebook.\n" +" \n" +" This launches a Tornado based HTML Notebook Server that serves up an HTML5/Javascript Notebook client." +msgstr "The Jupyter HTML Notebook.\n \n 这将启动一个基于tornado的HTML笔记本服务器,它提供一个html5/javascript笔记本客户端。" + +#: notebook/serverapp.py:546 +msgid "Set the Access-Control-Allow-Credentials: true header" +msgstr "设置Access-Control-Allow-Credentials:true报头" + +#: notebook/serverapp.py:550 +msgid "Whether to allow the user to run the notebook as root." +msgstr "是否允许notebook在root用户下运行." + +#: notebook/serverapp.py:554 +msgid "The default URL to redirect to from `/`" +msgstr "从 `/` 重定向到的默认URL " + +#: notebook/serverapp.py:558 +msgid "The IP address the notebook server will listen on." +msgstr "notebook服务会监听的IP地址." + +#: notebook/serverapp.py:571 +#, python-format +msgid "" +"Cannot bind to localhost, using 127.0.0.1 as default ip\n" +"%s" +msgstr "不能绑定到localhost, 使用127.0.0.1作为默认的IP \n %s" + +#: notebook/serverapp.py:585 +msgid "The port the notebook server will listen on." +msgstr "notebook服务会监听的IP端口." + +#: notebook/serverapp.py:589 +msgid "The number of additional ports to try if the specified port is not available." +msgstr "如果指定的端口不可用,则要尝试其他端口的数量." + +#: notebook/serverapp.py:593 +msgid "The full path to an SSL/TLS certificate file." +msgstr "SSL/TLS 认证文件所在全路径." + +#: notebook/serverapp.py:597 +msgid "The full path to a private key file for usage with SSL/TLS." +msgstr "SSL/TLS 私钥文件所在全路径." + +#: notebook/serverapp.py:601 +msgid "The full path to a certificate authority certificate for SSL/TLS client authentication." +msgstr "用于ssl/tls客户端身份验证的证书颁发证书的完整路径." + +#: notebook/serverapp.py:605 +msgid "The file where the cookie secret is stored." +msgstr "存放cookie密钥的文件被保存了." + +#: notebook/serverapp.py:634 +#, python-format +msgid "Writing notebook server cookie secret to %s" +msgstr "把notebook 服务cookie密码写入 %s" + +#: notebook/serverapp.py:641 +#, python-format +msgid "Could not set permissions on %s" +msgstr "不能在 %s 设置权限" + +#: notebook/serverapp.py:646 +msgid "" +"Token used for authenticating first-time connections to the server.\n" +"\n" +" When no password is enabled,\n" +" the default is to generate a new, random token.\n" +"\n" +" Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED.\n" +" " +msgstr "" + +#: notebook/serverapp.py:656 +msgid "" +"One-time token used for opening a browser.\n" +" Once used, this token cannot be used again.\n" +" " +msgstr "" + +#: notebook/serverapp.py:732 +msgid "" +"Specify Where to open the notebook on startup. This is the\n" +" `new` argument passed to the standard library method `webbrowser.open`.\n" +" The behaviour is not guaranteed, but depends on browser support. Valid\n" +" values are:\n" +" 2 opens a new tab,\n" +" 1 opens a new window,\n" +" 0 opens in an existing window.\n" +" See the `webbrowser.open` documentation for details.\n" +" " +msgstr "" + +#: notebook/serverapp.py:752 +msgid "Supply overrides for the tornado.web.Application that the Jupyter notebook uses." +msgstr "" + +#: notebook/serverapp.py:756 +msgid "" +"\n" +" Set the tornado compression options for websocket connections.\n" +"\n" +" This value will be returned from :meth:`WebSocketHandler.get_compression_options`.\n" +" None (default) will disable compression.\n" +" A dict (even an empty one) will enable compression.\n" +"\n" +" See the tornado docs for WebSocketHandler.get_compression_options for details.\n" +" " +msgstr "" + +#: notebook/serverapp.py:767 +msgid "Supply overrides for terminado. Currently only supports \"shell_command\"." +msgstr "" + +#: notebook/serverapp.py:770 +msgid "Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details." +msgstr "" + +#: notebook/serverapp.py:774 +msgid "" +"Supply SSL options for the tornado HTTPServer.\n" +" See the tornado docs for details." +msgstr "" + +#: notebook/serverapp.py:778 +msgid "Supply extra arguments that will be passed to Jinja environment." +msgstr "" + +#: notebook/serverapp.py:782 +msgid "Extra variables to supply to jinja templates when rendering." +msgstr "" + +#: notebook/serverapp.py:838 +msgid "Path to search for custom.js, css" +msgstr "" + +#: notebook/serverapp.py:850 +msgid "" +"Extra paths to search for serving jinja templates.\n" +"\n" +" Can be used to override templates from notebook.templates." +msgstr "" + +#: notebook/serverapp.py:861 +msgid "extra paths to look for Javascript notebook extensions" +msgstr "" + +#: notebook/serverapp.py:906 +#, python-format +msgid "Using MathJax: %s" +msgstr "" + +#: notebook/serverapp.py:909 +msgid "The MathJax.js configuration file that is to be used." +msgstr "" + +#: notebook/serverapp.py:914 +#, python-format +msgid "Using MathJax configuration file: %s" +msgstr "" + +#: notebook/serverapp.py:920 +msgid "The notebook manager class to use." +msgstr "" + +#: notebook/serverapp.py:926 +msgid "The kernel manager class to use." +msgstr "" + +#: notebook/serverapp.py:932 +msgid "The session manager class to use." +msgstr "" + +#: notebook/serverapp.py:938 +msgid "The config manager class to use" +msgstr "" + +#: notebook/serverapp.py:959 +msgid "The login handler class to use." +msgstr "" + +#: notebook/serverapp.py:966 +msgid "The logout handler class to use." +msgstr "" + +#: notebook/serverapp.py:970 +msgid "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL" +msgstr "" + +#: notebook/serverapp.py:982 +msgid "" +"\n" +" DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.\n" +" " +msgstr "" + +#: notebook/serverapp.py:994 +msgid "Support for specifying --pylab on the command line has been removed." +msgstr "" + +#: notebook/serverapp.py:996 +msgid "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself." +msgstr "" + +#: notebook/serverapp.py:1001 +msgid "The directory to use for notebooks and kernels." +msgstr "用于笔记本和内核的目录。" + +#: notebook/serverapp.py:1024 +#, python-format +msgid "No such notebook dir: '%r'" +msgstr "没有找到路径: '%r' " + +#: notebook/serverapp.py:1046 +msgid "Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order." +msgstr "将Python模块作为笔记本服务器扩展加载。可以使用条目值来启用和禁用扩展的加载。这些扩展将以字母顺序加载。" + +#: notebook/serverapp.py:1055 +msgid "Reraise exceptions encountered loading server extensions?" +msgstr "重新运行的异常会遇到加载服务器扩展吗?" + +#: notebook/serverapp.py:1058 +msgid "" +"(msgs/sec)\n" +" Maximum rate at which messages can be sent on iopub before they are\n" +" limited." +msgstr "" + +#: notebook/serverapp.py:1062 +msgid "" +"(bytes/sec)\n" +" Maximum rate at which stream output can be sent on iopub before they are\n" +" limited." +msgstr "" + +#: notebook/serverapp.py:1066 +msgid "" +"(sec) Time window used to \n" +" check the message and data rate limits." +msgstr "(sec)时间窗口被用来 \n 检查消息和数据速率限制." + +#: notebook/serverapp.py:1077 +#, python-format +msgid "No such file or directory: %s" +msgstr "找不到文件或文件夹: %s" + +#: notebook/serverapp.py:1147 +msgid "Notebook servers are configured to only be run with a password." +msgstr "服务设置为只能使用密码运行." + +#: notebook/serverapp.py:1148 +msgid "Hint: run the following command to set a password" +msgstr "提示: 运行下面命令设置密码" + +#: notebook/serverapp.py:1149 +msgid "\t$ python -m notebook.auth password" +msgstr "" + +#: notebook/serverapp.py:1187 +#, python-format +msgid "The port %i is already in use, trying another port." +msgstr "端口 %i 已经被站用, 请尝试其他端口." + +#: notebook/serverapp.py:1190 +#, python-format +msgid "Permission to listen on port %i denied" +msgstr "监听端口 %i 失败" + +#: notebook/serverapp.py:1199 +msgid "ERROR: the notebook server could not be started because no available port could be found." +msgstr "错误: 服务启动失败因为没有找到可用的端口. " + +#: notebook/serverapp.py:1205 +msgid "[all ip addresses on your system]" +msgstr "[系统所有IP地址]" + +#: notebook/serverapp.py:1229 +#, python-format +msgid "Terminals not available (error was %s)" +msgstr "终端不可用(错误: %s)" + +#: notebook/serverapp.py:1265 +msgid "interrupted" +msgstr "中断" + +#: notebook/serverapp.py:1267 +msgid "y" +msgstr "" + +#: notebook/serverapp.py:1268 +msgid "n" +msgstr "" + +#: notebook/serverapp.py:1269 +#, python-format +msgid "Shutdown this notebook server (%s/[%s])? " +msgstr "关闭服务 (%s/[%s])" + +#: notebook/serverapp.py:1275 +msgid "Shutdown confirmed" +msgstr "关闭确定" + +#: notebook/serverapp.py:1279 +msgid "No answer for 5s:" +msgstr "5s 未响应" + +#: notebook/serverapp.py:1280 +msgid "resuming operation..." +msgstr "重启操作..." + +#: notebook/serverapp.py:1288 +#, python-format +msgid "received signal %s, stopping" +msgstr "接受信号 %s, 正在停止" + +#: notebook/serverapp.py:1344 +#, python-format +msgid "Error loading server extension %s" +msgstr "加载插件 %s 失败" + +#: notebook/serverapp.py:1375 +#, python-format +msgid "Shutting down %d kernel" +msgid_plural "Shutting down %d kernels" +msgstr[0] "关闭 %d 服务" +msgstr[1] "关闭 %d 服务" + +#: notebook/serverapp.py:1383 +#, python-format +msgid "%d active kernel" +msgid_plural "%d active kernels" +msgstr[0] "%d 活跃的服务" +msgstr[1] "%d 活跃的服务" + +#: notebook/serverapp.py:1387 +#, python-format +msgid "" +"The Jupyter Notebook is running at:\n" +"%s" +msgstr "本程序运行在: %s" + +#: notebook/serverapp.py:1434 +msgid "Running as root is not recommended. Use --allow-root to bypass." +msgstr "不建议以root身份运行.使用--allow-root绕过过." + +#: notebook/serverapp.py:1440 +msgid "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." +msgstr "使用control-c停止此服务器并关闭所有内核(两次跳过确认)." + +#: notebook/serverapp.py:1442 +msgid "Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at http://jupyter.org/community.html." +msgstr "欢迎来到项目Jupyter! 探索可用的各种工具及其相应的文档. 如果你有兴趣对这个平台,请访问http://jupyter.org/community.html community resources部分." + +#: notebook/serverapp.py:1453 +#, python-format +msgid "No web browser found: %s." +msgstr "没有找到web浏览器: %s." + +#: notebook/serverapp.py:1458 +#, python-format +msgid "%s does not exist" +msgstr "%s 不存在" + +#: notebook/serverapp.py:1492 +msgid "Interrupted..." +msgstr "已经中断..." + +#: notebook/services/contents/filemanager.py:525 +#, python-format +msgid "Serving notebooks from local directory: %s" +msgstr "启动notebooks 在本地路径: %s" + +#: notebook/services/contents/manager.py:69 +msgid "Untitled" +msgstr "未命名" diff --git a/server/jupyter_server/kernelspecs/__init__.py b/server/jupyter_server/kernelspecs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/kernelspecs/handlers.py b/server/jupyter_server/kernelspecs/handlers.py new file mode 100644 index 0000000..437670d --- /dev/null +++ b/server/jupyter_server/kernelspecs/handlers.py @@ -0,0 +1,37 @@ +from tornado import web + +from ..base.handlers import JupyterHandler +from ..services.kernelspecs.handlers import kernel_name_regex +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "kernelspecs" + + +class KernelSpecResourceHandler(web.StaticFileHandler, JupyterHandler): + SUPPORTED_METHODS = ("GET", "HEAD") + auth_resource = AUTH_RESOURCE + + def initialize(self): + web.StaticFileHandler.initialize(self, path="") + + @web.authenticated + @authorized + def get(self, kernel_name, path, include_body=True): + ksm = self.kernel_spec_manager + try: + self.root = ksm.get_kernel_spec(kernel_name).resource_dir + except KeyError as e: + raise web.HTTPError(404, "Kernel spec %s not found" % kernel_name) from e + self.log.debug("Serving kernel resource from: %s", self.root) + return web.StaticFileHandler.get(self, path, include_body=include_body) + + @web.authenticated + @authorized + def head(self, kernel_name, path): + return self.get(kernel_name, path, include_body=False) + + +default_handlers = [ + (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, KernelSpecResourceHandler), +] diff --git a/server/jupyter_server/log.py b/server/jupyter_server/log.py new file mode 100644 index 0000000..3fd63c7 --- /dev/null +++ b/server/jupyter_server/log.py @@ -0,0 +1,56 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) Jupyter Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +# ----------------------------------------------------------------------------- +import json + +from tornado.log import access_log + +from .prometheus.log_functions import prometheus_log_method + + +def log_request(handler): + """log a bit more information about each request than tornado's default + + - move static file get success to debug-level (reduces noise) + - get proxied IP instead of proxy IP + - log referer for redirect and failed requests + - log user-agent for failed requests + """ + status = handler.get_status() + request = handler.request + try: + logger = handler.log + except AttributeError: + logger = access_log + + if status < 300 or status == 304: + # Successes (or 304 FOUND) are debug-level + log_method = logger.debug + elif status < 400: + log_method = logger.info + elif status < 500: + log_method = logger.warning + else: + log_method = logger.error + + request_time = 1000.0 * handler.request.request_time() + ns = dict( + status=status, + method=request.method, + ip=request.remote_ip, + uri=request.uri, + request_time=request_time, + ) + msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms" + if status >= 400: + # log bad referers + ns["referer"] = request.headers.get("Referer", "None") + msg = msg + " referer={referer}" + if status >= 500 and status != 502: + # log all headers if it caused an error + log_method(json.dumps(dict(request.headers), indent=2)) + log_method(msg.format(**ns)) + prometheus_log_method(handler) diff --git a/server/jupyter_server/nbconvert/__init__.py b/server/jupyter_server/nbconvert/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/nbconvert/handlers.py b/server/jupyter_server/nbconvert/handlers.py new file mode 100644 index 0000000..84efaab --- /dev/null +++ b/server/jupyter_server/nbconvert/handlers.py @@ -0,0 +1,197 @@ +"""Tornado handlers for nbconvert.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import io +import os +import zipfile + +from anyio.to_thread import run_sync +from ipython_genutils import text +from ipython_genutils.py3compat import cast_bytes +from nbformat import from_dict +from tornado import web +from tornado.log import app_log + +from ..base.handlers import FilesRedirectHandler +from ..base.handlers import JupyterHandler +from ..base.handlers import path_regex +from jupyter_server.auth import authorized +from jupyter_server.utils import ensure_async + + +AUTH_RESOURCE = "nbconvert" + + +def find_resource_files(output_files_dir): + files = [] + for dirpath, dirnames, filenames in os.walk(output_files_dir): + files.extend([os.path.join(dirpath, f) for f in filenames]) + return files + + +def respond_zip(handler, name, output, resources): + """Zip up the output and resource files and respond with the zip file. + + Returns True if it has served a zip file, False if there are no resource + files, in which case we serve the plain output file. + """ + # Check if we have resource files we need to zip + output_files = resources.get("outputs", None) + if not output_files: + return False + + # Headers + zip_filename = os.path.splitext(name)[0] + ".zip" + handler.set_attachment_header(zip_filename) + handler.set_header("Content-Type", "application/zip") + handler.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") + + # Prepare the zip file + buffer = io.BytesIO() + zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) + output_filename = os.path.splitext(name)[0] + resources["output_extension"] + zipf.writestr(output_filename, cast_bytes(output, "utf-8")) + for filename, data in output_files.items(): + zipf.writestr(os.path.basename(filename), data) + zipf.close() + + handler.finish(buffer.getvalue()) + return True + + +def get_exporter(format, **kwargs): + """get an exporter, raising appropriate errors""" + # if this fails, will raise 500 + try: + from nbconvert.exporters.base import get_exporter + except ImportError as e: + raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e + + try: + Exporter = get_exporter(format) + except KeyError as e: + # should this be 400? + raise web.HTTPError(404, "No exporter for format: %s" % format) from e + + try: + return Exporter(**kwargs) + except Exception as e: + app_log.exception("Could not construct Exporter: %s", Exporter) + raise web.HTTPError(500, "Could not construct Exporter: %s" % e) from e + + +class NbconvertFileHandler(JupyterHandler): + + auth_resource = AUTH_RESOURCE + SUPPORTED_METHODS = ("GET",) + + @web.authenticated + @authorized + async def get(self, format, path): + self.check_xsrf_cookie() + exporter = get_exporter(format, config=self.config, log=self.log) + + path = path.strip("/") + # If the notebook relates to a real file (default contents manager), + # give its path to nbconvert. + if hasattr(self.contents_manager, "_get_os_path"): + os_path = self.contents_manager._get_os_path(path) + ext_resources_dir, basename = os.path.split(os_path) + else: + ext_resources_dir = None + + model = await ensure_async(self.contents_manager.get(path=path)) + name = model["name"] + if model["type"] != "notebook": + # not a notebook, redirect to files + return FilesRedirectHandler.redirect_to_files(self, path) + + nb = model["content"] + + self.set_header("Last-Modified", model["last_modified"]) + + # create resources dictionary + mod_date = model["last_modified"].strftime(text.date_format) + nb_title = os.path.splitext(name)[0] + + resource_dict = { + "metadata": {"name": nb_title, "modified_date": mod_date}, + "config_dir": self.application.settings["config_dir"], + } + + if ext_resources_dir: + resource_dict["metadata"]["path"] = ext_resources_dir + + # Exporting can take a while, delegate to a thread so we don't block the event loop + try: + output, resources = await run_sync( + lambda: exporter.from_notebook_node(nb, resources=resource_dict) + ) + except Exception as e: + self.log.exception("nbconvert failed: %s", e) + raise web.HTTPError(500, "nbconvert failed: %s" % e) from e + + if respond_zip(self, name, output, resources): + return + + # Force download if requested + if self.get_argument("download", "false").lower() == "true": + filename = os.path.splitext(name)[0] + resources["output_extension"] + self.set_attachment_header(filename) + + # MIME type + if exporter.output_mimetype: + self.set_header("Content-Type", "%s; charset=utf-8" % exporter.output_mimetype) + + self.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") + self.finish(output) + + +class NbconvertPostHandler(JupyterHandler): + + SUPPORTED_METHODS = ("POST",) + auth_resource = AUTH_RESOURCE + + @web.authenticated + @authorized + async def post(self, format): + exporter = get_exporter(format, config=self.config) + + model = self.get_json_body() + name = model.get("name", "notebook.ipynb") + nbnode = from_dict(model["content"]) + + try: + output, resources = await run_sync( + lambda: exporter.from_notebook_node( + nbnode, + resources={ + "metadata": {"name": name[: name.rfind(".")]}, + "config_dir": self.application.settings["config_dir"], + }, + ) + ) + except Exception as e: + raise web.HTTPError(500, "nbconvert failed: %s" % e) from e + + if respond_zip(self, name, output, resources): + return + + # MIME type + if exporter.output_mimetype: + self.set_header("Content-Type", "%s; charset=utf-8" % exporter.output_mimetype) + + self.finish(output) + + +# ----------------------------------------------------------------------------- +# URL to handler mappings +# ----------------------------------------------------------------------------- + +_format_regex = r"(?P\w+)" + + +default_handlers = [ + (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), + (r"/nbconvert/%s%s" % (_format_regex, path_regex), NbconvertFileHandler), +] diff --git a/server/jupyter_server/prometheus/__init__.py b/server/jupyter_server/prometheus/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/prometheus/log_functions.py b/server/jupyter_server/prometheus/log_functions.py new file mode 100644 index 0000000..1f36ade --- /dev/null +++ b/server/jupyter_server/prometheus/log_functions.py @@ -0,0 +1,24 @@ +from .metrics import HTTP_REQUEST_DURATION_SECONDS + + +def prometheus_log_method(handler): + """ + Tornado log handler for recording RED metrics. + + We record the following metrics: + Rate - the number of requests, per second, your services are serving. + Errors - the number of failed requests per second. + Duration - The amount of time each request takes expressed as a time interval. + + We use a fully qualified name of the handler as a label, + rather than every url path to reduce cardinality. + + This function should be either the value of or called from a function + that is the 'log_function' tornado setting. This makes it get called + at the end of every request, allowing us to record the metrics we need. + """ + HTTP_REQUEST_DURATION_SECONDS.labels( + method=handler.request.method, + handler="{}.{}".format(handler.__class__.__module__, type(handler).__name__), + status_code=handler.get_status(), + ).observe(handler.request.request_time()) diff --git a/server/jupyter_server/prometheus/metrics.py b/server/jupyter_server/prometheus/metrics.py new file mode 100644 index 0000000..7b6746e --- /dev/null +++ b/server/jupyter_server/prometheus/metrics.py @@ -0,0 +1,37 @@ +""" +Prometheus metrics exported by Jupyter Server + +Read https://prometheus.io/docs/practices/naming/ for naming +conventions for metrics & labels. +""" + +try: + # Jupyter Notebook also defines these metrics. Re-defining them results in a ValueError. + # Try to de-duplicate by using the ones in Notebook if available. + # See https://github.com/jupyter/jupyter_server/issues/209 + from notebook.prometheus.metrics import ( + HTTP_REQUEST_DURATION_SECONDS, + TERMINAL_CURRENTLY_RUNNING_TOTAL, + KERNEL_CURRENTLY_RUNNING_TOTAL, + ) + +except ImportError: + + from prometheus_client import Histogram, Gauge + + HTTP_REQUEST_DURATION_SECONDS = Histogram( + "http_request_duration_seconds", + "duration in seconds for all HTTP requests", + ["method", "handler", "status_code"], + ) + + TERMINAL_CURRENTLY_RUNNING_TOTAL = Gauge( + "terminal_currently_running_total", + "counter for how many terminals are running", + ) + + KERNEL_CURRENTLY_RUNNING_TOTAL = Gauge( + "kernel_currently_running_total", + "counter for how many kernels are running labeled by type", + ["type"], + ) diff --git a/server/jupyter_server/pytest_plugin.py b/server/jupyter_server/pytest_plugin.py new file mode 100644 index 0000000..45f3f73 --- /dev/null +++ b/server/jupyter_server/pytest_plugin.py @@ -0,0 +1,511 @@ +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import io +import json +import logging +import os +import shutil +import sys +import urllib.parse +from binascii import hexlify + +import jupyter_core.paths +import nbformat +import pytest +import tornado +from tornado.escape import url_escape +from traitlets.config import Config + +from jupyter_server.extension import serverextension +from jupyter_server.serverapp import ServerApp +from jupyter_server.services.contents.filemanager import FileContentsManager +from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.utils import url_path_join + + +# List of dependencies needed for this plugin. +pytest_plugins = [ + "pytest_tornasync", + # Once the chunk below moves to Jupyter Core, we'll uncomment + # This plugin and use the fixtures directly from Jupyter Core. + # "jupyter_core.pytest_plugin" +] + + +import asyncio + +if os.name == "nt" and sys.version_info >= (3, 7): + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + +# ============ Move to Jupyter Core ============= + + +def mkdir(tmp_path, *parts): + path = tmp_path.joinpath(*parts) + if not path.exists(): + path.mkdir(parents=True) + return path + + +@pytest.fixture +def jp_home_dir(tmp_path): + """Provides a temporary HOME directory value.""" + return mkdir(tmp_path, "home") + + +@pytest.fixture +def jp_data_dir(tmp_path): + """Provides a temporary Jupyter data dir directory value.""" + return mkdir(tmp_path, "data") + + +@pytest.fixture +def jp_config_dir(tmp_path): + """Provides a temporary Jupyter config dir directory value.""" + return mkdir(tmp_path, "config") + + +@pytest.fixture +def jp_runtime_dir(tmp_path): + """Provides a temporary Jupyter runtime dir directory value.""" + return mkdir(tmp_path, "runtime") + + +@pytest.fixture +def jp_system_jupyter_path(tmp_path): + """Provides a temporary Jupyter system path value.""" + return mkdir(tmp_path, "share", "jupyter") + + +@pytest.fixture +def jp_env_jupyter_path(tmp_path): + """Provides a temporary Jupyter env system path value.""" + return mkdir(tmp_path, "env", "share", "jupyter") + + +@pytest.fixture +def jp_system_config_path(tmp_path): + """Provides a temporary Jupyter config path value.""" + return mkdir(tmp_path, "etc", "jupyter") + + +@pytest.fixture +def jp_env_config_path(tmp_path): + """Provides a temporary Jupyter env config path value.""" + return mkdir(tmp_path, "env", "etc", "jupyter") + + +@pytest.fixture +def jp_environ( + monkeypatch, + tmp_path, + jp_home_dir, + jp_data_dir, + jp_config_dir, + jp_runtime_dir, + jp_system_jupyter_path, + jp_system_config_path, + jp_env_jupyter_path, + jp_env_config_path, +): + """Configures a temporary environment based on Jupyter-specific environment variables.""" + monkeypatch.setenv("HOME", str(jp_home_dir)) + monkeypatch.setenv("PYTHONPATH", os.pathsep.join(sys.path)) + # monkeypatch.setenv("JUPYTER_NO_CONFIG", "1") + monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(jp_config_dir)) + monkeypatch.setenv("JUPYTER_DATA_DIR", str(jp_data_dir)) + monkeypatch.setenv("JUPYTER_RUNTIME_DIR", str(jp_runtime_dir)) + monkeypatch.setattr(jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(jp_system_jupyter_path)]) + monkeypatch.setattr(jupyter_core.paths, "ENV_JUPYTER_PATH", [str(jp_env_jupyter_path)]) + monkeypatch.setattr(jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(jp_system_config_path)]) + monkeypatch.setattr(jupyter_core.paths, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) + + +# ================= End: Move to Jupyter core ================ + + +@pytest.fixture +def jp_server_config(): + """Allows tests to setup their specific configuration values.""" + return {} + + +@pytest.fixture +def jp_root_dir(tmp_path): + """Provides a temporary Jupyter root directory value.""" + return mkdir(tmp_path, "root_dir") + + +@pytest.fixture +def jp_template_dir(tmp_path): + """Provides a temporary Jupyter templates directory value.""" + return mkdir(tmp_path, "templates") + + +@pytest.fixture +def jp_argv(): + """Allows tests to setup specific argv values.""" + return [] + + +@pytest.fixture +def jp_extension_environ(jp_env_config_path, monkeypatch): + """Monkeypatch a Jupyter Extension's config path into each test's environment variable""" + monkeypatch.setattr(serverextension, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) + + +@pytest.fixture +def jp_http_port(http_server_port): + """Returns the port value from the http_server_port fixture.""" + return http_server_port[-1] + + +@pytest.fixture +def jp_nbconvert_templates(jp_data_dir): + """Setups up a temporary directory consisting of the nbconvert templates.""" + + # Get path to nbconvert template directory *before* + # monkeypatching the paths env variable via the jp_environ fixture. + possible_paths = jupyter_core.paths.jupyter_path("nbconvert", "templates") + nbconvert_path = None + for path in possible_paths: + if os.path.exists(path): + nbconvert_path = path + break + + nbconvert_target = jp_data_dir / "nbconvert" / "templates" + + # copy nbconvert templates to new tmp data_dir. + if nbconvert_path: + shutil.copytree(nbconvert_path, str(nbconvert_target)) + + +@pytest.fixture +def jp_logging_stream(): + """StringIO stream intended to be used by the core + Jupyter ServerApp logger's default StreamHandler. This + helps avoid collision with stdout which is hijacked + by Pytest. + """ + logging_stream = io.StringIO() + yield logging_stream + output = logging_stream.getvalue() + # If output exists, print it. + if output: + print(output) + return output + + +@pytest.fixture(scope="function") +def jp_configurable_serverapp( + jp_nbconvert_templates, # this fixture must preceed jp_environ + jp_environ, + jp_server_config, + jp_argv, + jp_http_port, + jp_base_url, + tmp_path, + jp_root_dir, + io_loop, + jp_logging_stream, +): + """Starts a Jupyter Server instance based on + the provided configuration values. + + The fixture is a factory; it can be called like + a function inside a unit test. Here's a basic + example of how use this fixture: + + .. code-block:: python + + def my_test(jp_configurable_serverapp): + + app = jp_configurable_serverapp(...) + ... + """ + ServerApp.clear_instance() + + def _configurable_serverapp( + config=jp_server_config, + base_url=jp_base_url, + argv=jp_argv, + environ=jp_environ, + http_port=jp_http_port, + tmp_path=tmp_path, + root_dir=jp_root_dir, + **kwargs + ): + c = Config(config) + c.NotebookNotary.db_file = ":memory:" + token = hexlify(os.urandom(4)).decode("ascii") + app = ServerApp.instance( + # Set the log level to debug for testing purposes + log_level="DEBUG", + port=http_port, + port_retries=0, + open_browser=False, + root_dir=str(root_dir), + base_url=base_url, + config=c, + allow_root=True, + token=token, + **kwargs + ) + + app.init_signal = lambda: None + app.log.propagate = True + app.log.handlers = [] + # Initialize app without httpserver + app.initialize(argv=argv, new_httpserver=False) + # Reroute all logging StreamHandlers away from stdin/stdout since pytest hijacks + # these streams and closes them at unfortunate times. + stream_handlers = [h for h in app.log.handlers if isinstance(h, logging.StreamHandler)] + for handler in stream_handlers: + handler.setStream(jp_logging_stream) + app.log.propagate = True + app.log.handlers = [] + # Start app without ioloop + app.start_app() + return app + + return _configurable_serverapp + + +@pytest.fixture +def jp_ensure_app_fixture(request): + """Ensures that the 'app' fixture used by pytest-tornasync + is set to `jp_web_app`, the Tornado Web Application returned + by the ServerApp in Jupyter Server, provided by the jp_web_app + fixture in this module. + + Note, this hardcodes the `app_fixture` option from + pytest-tornasync to `jp_web_app`. If this value is configured + to something other than the default, it will raise an exception. + """ + app_option = request.config.getoption("app_fixture") + if app_option not in ["app", "jp_web_app"]: + raise Exception( + "jp_serverapp requires the `app-fixture` option " + "to be set to 'jp_web_app`. Try rerunning the " + "current tests with the option `--app-fixture " + "jp_web_app`." + ) + elif app_option == "app": + # Manually set the app_fixture to `jp_web_app` if it's + # not set already. + request.config.option.app_fixture = "jp_web_app" + + +@pytest.fixture(scope="function") +def jp_serverapp(jp_ensure_app_fixture, jp_server_config, jp_argv, jp_configurable_serverapp): + """Starts a Jupyter Server instance based on the established configuration values.""" + app = jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) + yield app + app.remove_server_info_file() + app.remove_browser_open_files() + + +@pytest.fixture +def jp_web_app(jp_serverapp): + """app fixture is needed by pytest_tornasync plugin""" + return jp_serverapp.web_app + + +@pytest.fixture +def jp_auth_header(jp_serverapp): + """Configures an authorization header using the token from the serverapp fixture.""" + return {"Authorization": "token {token}".format(token=jp_serverapp.token)} + + +@pytest.fixture +def jp_base_url(): + """Returns the base url to use for the test.""" + return "/a%40b/" + + +@pytest.fixture +def jp_fetch(jp_serverapp, http_server_client, jp_auth_header, jp_base_url): + """Sends an (asynchronous) HTTP request to a test server. + + The fixture is a factory; it can be called like + a function inside a unit test. Here's a basic + example of how use this fixture: + + .. code-block:: python + + async def my_test(jp_fetch): + + response = await jp_fetch("api", "spec.yaml") + ... + """ + + def client_fetch(*parts, headers=None, params=None, **kwargs): + if not headers: + headers = {} + if not params: + params = {} + # Handle URL strings + path_url = url_escape(url_path_join(*parts), plus=False) + base_path_url = url_path_join(jp_base_url, path_url) + params_url = urllib.parse.urlencode(params) + url = base_path_url + "?" + params_url + # Add auth keys to header + headers.update(jp_auth_header) + # Make request. + return http_server_client.fetch(url, headers=headers, request_timeout=20, **kwargs) + + return client_fetch + + +@pytest.fixture +def jp_ws_fetch(jp_serverapp, http_server_client, jp_auth_header, jp_http_port, jp_base_url): + """Sends a websocket request to a test server. + + The fixture is a factory; it can be called like + a function inside a unit test. Here's a basic + example of how use this fixture: + + .. code-block:: python + + async def my_test(jp_fetch, jp_ws_fetch): + # Start a kernel + r = await jp_fetch( + 'api', 'kernels', + method='POST', + body=json.dumps({ + 'name': "python3" + }) + ) + kid = json.loads(r.body.decode())['id'] + + # Open a websocket connection. + ws = await jp_ws_fetch( + 'api', 'kernels', kid, 'channels' + ) + ... + """ + + def client_fetch(*parts, headers=None, params=None, **kwargs): + if not headers: + headers = {} + if not params: + params = {} + # Handle URL strings + path_url = url_escape(url_path_join(*parts), plus=False) + base_path_url = url_path_join(jp_base_url, path_url) + urlparts = urllib.parse.urlparse("ws://localhost:{}".format(jp_http_port)) + urlparts = urlparts._replace(path=base_path_url, query=urllib.parse.urlencode(params)) + url = urlparts.geturl() + # Add auth keys to header + headers.update(jp_auth_header) + # Make request. + req = tornado.httpclient.HTTPRequest(url, headers=headers, connect_timeout=120) + return tornado.websocket.websocket_connect(req) + + return client_fetch + + +some_resource = "The very model of a modern major general" +sample_kernel_json = { + "argv": ["cat", "{connection_file}"], + "display_name": "Test kernel", +} + + +@pytest.fixture +def jp_kernelspecs(jp_data_dir): + """Configures some sample kernelspecs in the Jupyter data directory.""" + spec_names = ["sample", "sample 2", "bad"] + for name in spec_names: + sample_kernel_dir = jp_data_dir.joinpath("kernels", name) + sample_kernel_dir.mkdir(parents=True) + # Create kernel json file + sample_kernel_file = sample_kernel_dir.joinpath("kernel.json") + kernel_json = sample_kernel_json.copy() + if name == "bad": + kernel_json["argv"] = ["non_existent_path"] + sample_kernel_file.write_text(json.dumps(kernel_json)) + # Create resources text + sample_kernel_resources = sample_kernel_dir.joinpath("resource.txt") + sample_kernel_resources.write_text(some_resource) + + +@pytest.fixture(params=[True, False]) +def jp_contents_manager(request, tmp_path): + """Returns a FileContentsManager instance based on the use_atomic_writing parameter value.""" + return FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) + + +@pytest.fixture +def jp_large_contents_manager(tmp_path): + """Returns a LargeFileManager instance.""" + return LargeFileManager(root_dir=str(tmp_path)) + + +@pytest.fixture +def jp_create_notebook(jp_root_dir): + """Creates a notebook in the test's home directory.""" + + def inner(nbpath): + nbpath = jp_root_dir.joinpath(nbpath) + # Check that the notebook has the correct file extension. + if nbpath.suffix != ".ipynb": + raise Exception("File extension for notebook must be .ipynb") + # If the notebook path has a parent directory, make sure it's created. + parent = nbpath.parent + parent.mkdir(parents=True, exist_ok=True) + # Create a notebook string and write to file. + nb = nbformat.v4.new_notebook() + nbtext = nbformat.writes(nb, version=4) + nbpath.write_text(nbtext) + + return inner + + +@pytest.fixture(autouse=True) +def jp_server_cleanup(): + yield + ServerApp.clear_instance() + + +@pytest.fixture +def jp_cleanup_subprocesses(jp_serverapp): + """Clean up subprocesses started by a Jupyter Server, i.e. kernels and terminal.""" + + async def _(): + terminal_cleanup = jp_serverapp.web_app.settings["terminal_manager"].terminate_all + kernel_cleanup = jp_serverapp.kernel_manager.shutdown_all + + async def kernel_cleanup_steps(): + # Try a graceful shutdown with a timeout + try: + await asyncio.wait_for(kernel_cleanup(), timeout=15.0) + except asyncio.TimeoutError: + # Now force a shutdown + try: + await asyncio.wait_for(kernel_cleanup(now=True), timeout=15.0) + except asyncio.TimeoutError: + print(Exception("Kernel never shutdown!")) + except Exception as e: + print(e) + + if asyncio.iscoroutinefunction(terminal_cleanup): + try: + await terminal_cleanup() + except Exception as e: + print(e) + else: + try: + await terminal_cleanup() + except Exception as e: + print(e) + if asyncio.iscoroutinefunction(kernel_cleanup): + await kernel_cleanup_steps() + else: + try: + kernel_cleanup() + except Exception as e: + print(e) + + return _ diff --git a/server/jupyter_server/serverapp.py b/server/jupyter_server/serverapp.py new file mode 100644 index 0000000..c6b34ee --- /dev/null +++ b/server/jupyter_server/serverapp.py @@ -0,0 +1,2827 @@ +# coding: utf-8 +"""A tornado based Jupyter server.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import binascii +import datetime +import errno +import gettext +import hashlib +import hmac +import inspect +import io +import ipaddress +import json +import logging +import mimetypes +import os +import pathlib +import random +import re +import select +import signal +import socket +import stat +import sys +import threading +import time +import urllib +import warnings +import webbrowser +from base64 import encodebytes + +try: + import resource +except ImportError: + # Windows + resource = None + +from jinja2 import Environment, FileSystemLoader + +from jupyter_core.paths import secure_write +from jupyter_server.transutils import trans, _i18n +from jupyter_server.utils import run_sync_in_loop, urljoin, pathname2url + +# the minimum viable tornado version: needs to be kept in sync with setup.py +MIN_TORNADO = (6, 1, 0) + +try: + import tornado + + assert tornado.version_info >= MIN_TORNADO +except (ImportError, AttributeError, AssertionError) as e: # pragma: no cover + raise ImportError(_i18n("The Jupyter Server requires tornado >=%s.%s.%s") % MIN_TORNADO) from e + +from tornado import httpserver +from tornado import ioloop +from tornado import web +from tornado.httputil import url_concat +from tornado.log import LogFormatter, app_log, access_log, gen_log + +if not sys.platform.startswith("win"): + from tornado.netutil import bind_unix_socket + +from jupyter_server import ( + DEFAULT_JUPYTER_SERVER_PORT, + DEFAULT_STATIC_FILES_PATH, + DEFAULT_TEMPLATE_PATH_LIST, + __version__, +) + +from jupyter_server.base.handlers import MainHandler, RedirectWithParams, Template404 +from jupyter_server.log import log_request +from jupyter_server.services.kernels.kernelmanager import ( + MappingKernelManager, + AsyncMappingKernelManager, +) +from jupyter_server.services.config import ConfigManager +from jupyter_server.services.contents.manager import ( + AsyncContentsManager, + ContentsManager, +) +from jupyter_server.services.contents.filemanager import ( + AsyncFileContentsManager, + FileContentsManager, +) +from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.services.sessions.sessionmanager import SessionManager +from jupyter_server.gateway.managers import ( + GatewayMappingKernelManager, + GatewayKernelSpecManager, + GatewaySessionManager, + GatewayClient, +) +from jupyter_server.auth.authorizer import Authorizer, AllowAllAuthorizer + +from jupyter_server.auth.login import LoginHandler +from jupyter_server.auth.logout import LogoutHandler +from jupyter_server.base.handlers import FileFindHandler + +from traitlets.config import Config +from traitlets.config.application import catch_config_error, boolean_flag +from jupyter_core.application import ( + JupyterApp, + base_flags, + base_aliases, +) +from jupyter_core.paths import jupyter_config_path +from jupyter_client import KernelManager +from jupyter_client.kernelspec import KernelSpecManager +from jupyter_client.session import Session +from nbformat.sign import NotebookNotary +from traitlets import ( + Any, + Dict, + Unicode, + Integer, + List, + Bool, + Bytes, + Instance, + TraitError, + Type, + Float, + observe, + default, + validate, +) +from jupyter_core.paths import jupyter_runtime_dir +from jupyter_server._sysinfo import get_sys_info + +from jupyter_server._tz import utcnow +from jupyter_server.utils import ( + url_path_join, + check_pid, + url_escape, + pathname2url, + unix_socket_in_use, + urlencode_unix_socket_path, + fetch, +) + +from jupyter_server.extension.serverextension import ServerExtensionApp +from jupyter_server.extension.manager import ExtensionManager +from jupyter_server.extension.config import ExtensionConfigManager +from jupyter_server.traittypes import TypeFromClasses + +# Tolerate missing terminado package. +try: + from jupyter_server.terminal import TerminalManager + + terminado_available = True +except ImportError: + terminado_available = False + +# ----------------------------------------------------------------------------- +# Module globals +# ----------------------------------------------------------------------------- + +_examples = """ +jupyter server # start the server +jupyter server --certfile=mycert.pem # use SSL/TLS certificate +jupyter server password # enter a password to protect the server +""" + +JUPYTER_SERVICE_HANDLERS = dict( + auth=None, + api=["jupyter_server.services.api.handlers"], + config=["jupyter_server.services.config.handlers"], + contents=["jupyter_server.services.contents.handlers"], + files=["jupyter_server.files.handlers"], + kernels=["jupyter_server.services.kernels.handlers"], + kernelspecs=[ + "jupyter_server.kernelspecs.handlers", + "jupyter_server.services.kernelspecs.handlers", + ], + nbconvert=[ + "jupyter_server.nbconvert.handlers", + "jupyter_server.services.nbconvert.handlers", + ], + security=["jupyter_server.services.security.handlers"], + sessions=["jupyter_server.services.sessions.handlers"], + shutdown=["jupyter_server.services.shutdown"], + view=["jupyter_server.view.handlers"], +) + +# Added for backwards compatibility from classic notebook server. +DEFAULT_SERVER_PORT = DEFAULT_JUPYTER_SERVER_PORT + +# ----------------------------------------------------------------------------- +# Helper functions +# ----------------------------------------------------------------------------- + + +def random_ports(port, n): + """Generate a list of n random ports near the given port. + + The first 5 ports will be sequential, and the remaining n-5 will be + randomly selected in the range [port-2*n, port+2*n]. + """ + for i in range(min(5, n)): + yield port + i + for i in range(n - 5): + yield max(1, port + random.randint(-2 * n, 2 * n)) + + +def load_handlers(name): + """Load the (URL pattern, handler) tuples for each component.""" + mod = __import__(name, fromlist=["default_handlers"]) + return mod.default_handlers + + +# ----------------------------------------------------------------------------- +# The Tornado web application +# ----------------------------------------------------------------------------- + + +class ServerWebApplication(web.Application): + def __init__( + self, + jupyter_app, + default_services, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options, + authorizer=None, + ): + if authorizer is None: + warnings.warn( + "authorizer unspecified. Using permissive AllowAllAuthorizer." + " Specify an authorizer to avoid this message.", + RuntimeWarning, + stacklevel=2, + ) + authorizer = AllowAllAuthorizer(jupyter_app) + + settings = self.init_settings( + jupyter_app, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options, + authorizer=authorizer, + ) + handlers = self.init_handlers(default_services, settings) + + super(ServerWebApplication, self).__init__(handlers, **settings) + + def init_settings( + self, + jupyter_app, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options=None, + authorizer=None, + ): + + _template_path = settings_overrides.get( + "template_path", + jupyter_app.template_file_path, + ) + if isinstance(_template_path, str): + _template_path = (_template_path,) + template_path = [os.path.expanduser(path) for path in _template_path] + + jenv_opt = {"autoescape": True} + jenv_opt.update(jinja_env_options if jinja_env_options else {}) + + env = Environment( + loader=FileSystemLoader(template_path), extensions=["jinja2.ext.i18n"], **jenv_opt + ) + sys_info = get_sys_info() + + # If the user is running the server in a git directory, make the assumption + # that this is a dev install and suggest to the developer `npm run build:watch`. + base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) + dev_mode = os.path.exists(os.path.join(base_dir, ".git")) + + nbui = gettext.translation( + "nbui", + localedir=os.path.join(base_dir, "jupyter_server/i18n"), + fallback=True, + ) + env.install_gettext_translations(nbui, newstyle=False) + + if sys_info["commit_source"] == "repository": + # don't cache (rely on 304) when working from default branch + version_hash = "" + else: + # reset the cache on server restart + version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + + now = utcnow() + + root_dir = contents_manager.root_dir + home = os.path.expanduser("~") + if root_dir.startswith(home + os.path.sep): + # collapse $HOME to ~ + root_dir = "~" + root_dir[len(home) :] + + settings = dict( + # basics + log_function=log_request, + base_url=base_url, + default_url=default_url, + template_path=template_path, + static_path=jupyter_app.static_file_path, + static_custom_path=jupyter_app.static_custom_path, + static_handler_class=FileFindHandler, + static_url_prefix=url_path_join(base_url, "/static/"), + static_handler_args={ + # don't cache custom.js + "no_cache_paths": [url_path_join(base_url, "static", "custom")], + }, + version_hash=version_hash, + # kernel message protocol over websoclet + kernel_ws_protocol=jupyter_app.kernel_ws_protocol, + # rate limits + limit_rate=jupyter_app.limit_rate, + iopub_msg_rate_limit=jupyter_app.iopub_msg_rate_limit, + iopub_data_rate_limit=jupyter_app.iopub_data_rate_limit, + rate_limit_window=jupyter_app.rate_limit_window, + # authentication + cookie_secret=jupyter_app.cookie_secret, + login_url=url_path_join(base_url, "/login"), + login_handler_class=jupyter_app.login_handler_class, + logout_handler_class=jupyter_app.logout_handler_class, + password=jupyter_app.password, + xsrf_cookies=True, + disable_check_xsrf=jupyter_app.disable_check_xsrf, + allow_remote_access=jupyter_app.allow_remote_access, + local_hostnames=jupyter_app.local_hostnames, + authenticate_prometheus=jupyter_app.authenticate_prometheus, + # managers + kernel_manager=kernel_manager, + contents_manager=contents_manager, + session_manager=session_manager, + kernel_spec_manager=kernel_spec_manager, + config_manager=config_manager, + authorizer=authorizer, + # handlers + extra_services=extra_services, + # Jupyter stuff + started=now, + # place for extensions to register activity + # so that they can prevent idle-shutdown + last_activity_times={}, + jinja_template_vars=jupyter_app.jinja_template_vars, + websocket_url=jupyter_app.websocket_url, + shutdown_button=jupyter_app.quit_button, + config=jupyter_app.config, + config_dir=jupyter_app.config_dir, + allow_password_change=jupyter_app.allow_password_change, + server_root_dir=root_dir, + jinja2_env=env, + terminals_available=terminado_available and jupyter_app.terminals_enabled, + serverapp=jupyter_app, + ) + + # allow custom overrides for the tornado web app. + settings.update(settings_overrides) + + if base_url and "xsrf_cookie_kwargs" not in settings: + # default: set xsrf cookie on base_url + settings["xsrf_cookie_kwargs"] = {"path": base_url} + return settings + + def init_handlers(self, default_services, settings): + """Load the (URL pattern, handler) tuples for each component.""" + # Order matters. The first handler to match the URL will handle the request. + handlers = [] + # load extra services specified by users before default handlers + for service in settings["extra_services"]: + handlers.extend(load_handlers(service)) + + # Add auth services. + if "auth" in default_services: + handlers.extend([(r"/login", settings["login_handler_class"])]) + handlers.extend([(r"/logout", settings["logout_handler_class"])]) + + # Load default services. Raise exception if service not + # found in JUPYTER_SERVICE_HANLDERS. + for service in default_services: + if service in JUPYTER_SERVICE_HANDLERS: + locations = JUPYTER_SERVICE_HANDLERS[service] + if locations is not None: + for loc in locations: + handlers.extend(load_handlers(loc)) + else: + raise Exception( + "{} is not recognized as a jupyter_server " + "service. If this is a custom service, " + "try adding it to the " + "`extra_services` list.".format(service) + ) + + # Add extra handlers from contents manager. + handlers.extend(settings["contents_manager"].get_extra_handlers()) + + # If gateway mode is enabled, replace appropriate handlers to perform redirection + if GatewayClient.instance().gateway_enabled: + # for each handler required for gateway, locate its pattern + # in the current list and replace that entry... + gateway_handlers = load_handlers("jupyter_server.gateway.handlers") + for i, gwh in enumerate(gateway_handlers): + for j, h in enumerate(handlers): + if gwh[0] == h[0]: + handlers[j] = (gwh[0], gwh[1]) + break + + # register base handlers last + handlers.extend(load_handlers("jupyter_server.base.handlers")) + + if settings["default_url"] != settings["base_url"]: + # set the URL that will be redirected from `/` + handlers.append( + ( + r"/?", + RedirectWithParams, + { + "url": settings["default_url"], + "permanent": False, # want 302, not 301 + }, + ) + ) + else: + handlers.append((r"/", MainHandler)) + + # prepend base_url onto the patterns that we match + new_handlers = [] + for handler in handlers: + pattern = url_path_join(settings["base_url"], handler[0]) + new_handler = tuple([pattern] + list(handler[1:])) + new_handlers.append(new_handler) + # add 404 on the end, which will catch everything that falls through + new_handlers.append((r"(.*)", Template404)) + return new_handlers + + def last_activity(self): + """Get a UTC timestamp for when the server last did something. + + Includes: API activity, kernel activity, kernel shutdown, and terminal + activity. + """ + sources = [ + self.settings["started"], + self.settings["kernel_manager"].last_kernel_activity, + ] + try: + sources.append(self.settings["api_last_activity"]) + except KeyError: + pass + try: + sources.append(self.settings["terminal_last_activity"]) + except KeyError: + pass + sources.extend(self.settings["last_activity_times"].values()) + return max(sources) + + +class JupyterPasswordApp(JupyterApp): + """Set a password for the Jupyter server. + + Setting a password secures the Jupyter server + and removes the need for token-based authentication. + """ + + description = __doc__ + + def _config_file_default(self): + return os.path.join(self.config_dir, "jupyter_server_config.json") + + def start(self): + from jupyter_server.auth.security import set_password + + set_password(config_file=self.config_file) + self.log.info("Wrote hashed password to %s" % self.config_file) + + +def shutdown_server(server_info, timeout=5, log=None): + """Shutdown a Jupyter server in a separate process. + + *server_info* should be a dictionary as produced by list_running_servers(). + + Will first try to request shutdown using /api/shutdown . + On Unix, if the server is still running after *timeout* seconds, it will + send SIGTERM. After another timeout, it escalates to SIGKILL. + + Returns True if the server was stopped by any means, False if stopping it + failed (on Windows). + """ + + url = server_info["url"] + pid = server_info["pid"] + + if log: + log.debug("POST request to %sapi/shutdown", url) + + r = fetch(url, method="POST", headers={"Authorization": "token " + server_info["token"]}) + # Poll to see if it shut down. + for _ in range(timeout * 10): + if not check_pid(pid): + if log: + log.debug("Server PID %s is gone", pid) + return True + time.sleep(0.1) + + if sys.platform.startswith("win"): + return False + + if log: + log.debug("SIGTERM to PID %s", pid) + os.kill(pid, signal.SIGTERM) + + # Poll to see if it shut down. + for _ in range(timeout * 10): + if not check_pid(pid): + if log: + log.debug("Server PID %s is gone", pid) + return True + time.sleep(0.1) + + if log: + log.debug("SIGKILL to PID %s", pid) + os.kill(pid, signal.SIGKILL) + return True # SIGKILL cannot be caught + + +class JupyterServerStopApp(JupyterApp): + + version = __version__ + description = "Stop currently running Jupyter server for a given port" + + port = Integer( + DEFAULT_JUPYTER_SERVER_PORT, + config=True, + help="Port of the server to be killed. Default %s" % DEFAULT_JUPYTER_SERVER_PORT, + ) + + sock = Unicode("", config=True, help="UNIX socket of the server to be killed.") + + def parse_command_line(self, argv=None): + super(JupyterServerStopApp, self).parse_command_line(argv) + if self.extra_args: + try: + self.port = int(self.extra_args[0]) + except ValueError: + # self.extra_args[0] was not an int, so it must be a string (unix socket). + self.sock = self.extra_args[0] + + def shutdown_server(self, server): + return shutdown_server(server, log=self.log) + + def _shutdown_or_exit(self, target_endpoint, server): + print("Shutting down server on %s..." % target_endpoint) + if not self.shutdown_server(server): + sys.exit("Could not stop server on %s" % target_endpoint) + + @staticmethod + def _maybe_remove_unix_socket(socket_path): + try: + os.unlink(socket_path) + except (OSError, IOError): + pass + + def start(self): + servers = list(list_running_servers(self.runtime_dir, log=self.log)) + if not servers: + self.exit("There are no running servers (per %s)" % self.runtime_dir) + for server in servers: + if self.sock: + sock = server.get("sock", None) + if sock and sock == self.sock: + self._shutdown_or_exit(sock, server) + # Attempt to remove the UNIX socket after stopping. + self._maybe_remove_unix_socket(sock) + return + elif self.port: + port = server.get("port", None) + if port == self.port: + self._shutdown_or_exit(port, server) + return + current_endpoint = self.sock or self.port + print( + "There is currently no server running on {}".format(current_endpoint), + file=sys.stderr, + ) + print("Ports/sockets currently in use:", file=sys.stderr) + for server in servers: + print(" - {}".format(server.get("sock") or server["port"]), file=sys.stderr) + self.exit(1) + + +class JupyterServerListApp(JupyterApp): + version = __version__ + description = _i18n("List currently running Kennen servers.") + + flags = dict( + jsonlist=( + {"JupyterServerListApp": {"jsonlist": True}}, + _i18n("Produce machine-readable JSON list output."), + ), + json=( + {"JupyterServerListApp": {"json": True}}, + _i18n("Produce machine-readable JSON object on each line of output."), + ), + ) + + jsonlist = Bool( + False, + config=True, + help=_i18n( + "If True, the output will be a JSON list of objects, one per " + "active Kennen server, each with the details from the " + "relevant server info file." + ), + ) + json = Bool( + False, + config=True, + help=_i18n( + "If True, each line of output will be a JSON object with the " + "details from the server info file. For a JSON list output, " + "see the KennenServerListApp.jsonlist configuration value" + ), + ) + + def start(self): + serverinfo_list = list(list_running_servers(self.runtime_dir, log=self.log)) + if self.jsonlist: + print(json.dumps(serverinfo_list, indent=2)) + elif self.json: + for serverinfo in serverinfo_list: + print(json.dumps(serverinfo)) + else: + print("Currently running servers:") + for serverinfo in serverinfo_list: + url = serverinfo["url"] + if serverinfo.get("token"): + url = url + "?token=%s" % serverinfo["token"] + print(url, "::", serverinfo["root_dir"]) + + +# ----------------------------------------------------------------------------- +# Aliases and Flags +# ----------------------------------------------------------------------------- + +flags = dict(base_flags) + +flags["allow-root"] = ( + {"ServerApp": {"allow_root": True}}, + _i18n("Allow the server to be run from root user."), +) +flags["no-browser"] = ( + {"ServerApp": {"open_browser": False}, "ExtensionApp": {"open_browser": False}}, + _i18n("Prevent the opening of the default url in the browser."), +) +flags["debug"] = ( + {"ServerApp": {"log_level": "DEBUG"}, "ExtensionApp": {"log_level": "DEBUG"}}, + _i18n("Set debug level for the extension and underlying server applications."), +) +flags["autoreload"] = ( + {"ServerApp": {"autoreload": True}}, + """Autoreload the webapp + Enable reloading of the tornado webapp and all imported Python packages + when any changes are made to any Python src files in server or + extensions. + """, +) + + +# Add notebook manager flags +flags.update( + boolean_flag( + "script", + "FileContentsManager.save_script", + "DEPRECATED, IGNORED", + "DEPRECATED, IGNORED", + ) +) + +aliases = dict(base_aliases) + +aliases.update( + { + "ip": "ServerApp.ip", + "port": "ServerApp.port", + "port-retries": "ServerApp.port_retries", + "sock": "ServerApp.sock", + "sock-mode": "ServerApp.sock_mode", + "transport": "KernelManager.transport", + "keyfile": "ServerApp.keyfile", + "certfile": "ServerApp.certfile", + "client-ca": "ServerApp.client_ca", + "notebook-dir": "ServerApp.root_dir", + "preferred-dir": "ServerApp.preferred_dir", + "browser": "ServerApp.browser", + "pylab": "ServerApp.pylab", + "gateway-url": "GatewayClient.url", + } +) + +# ----------------------------------------------------------------------------- +# ServerApp +# ----------------------------------------------------------------------------- + + +class ServerApp(JupyterApp): + + name = "jupyter-server" + version = __version__ + description = _i18n( + """The Kennen Server. + + This launches a Tornado-based Kennen Server.""" + ) + examples = _examples + + flags = Dict(flags) + aliases = Dict(aliases) + + classes = [ + KernelManager, + Session, + MappingKernelManager, + KernelSpecManager, + AsyncMappingKernelManager, + ContentsManager, + FileContentsManager, + AsyncContentsManager, + AsyncFileContentsManager, + NotebookNotary, + GatewayMappingKernelManager, + GatewayKernelSpecManager, + GatewaySessionManager, + GatewayClient, + Authorizer, + ] + if terminado_available: # Only necessary when terminado is available + classes.append(TerminalManager) + + subcommands = dict( + list=(JupyterServerListApp, JupyterServerListApp.description.splitlines()[0]), + stop=(JupyterServerStopApp, JupyterServerStopApp.description.splitlines()[0]), + password=(JupyterPasswordApp, JupyterPasswordApp.description.splitlines()[0]), + extension=(ServerExtensionApp, ServerExtensionApp.description.splitlines()[0]), + ) + + # A list of services whose handlers will be exposed. + # Subclasses can override this list to + # expose a subset of these handlers. + default_services = ( + "api", + "auth", + "config", + "contents", + "files", + "kernels", + "kernelspecs", + "nbconvert", + "security", + "sessions", + "shutdown", + "view", + ) + + _log_formatter_cls = LogFormatter + + @default("log_level") + def _default_log_level(self): + return logging.INFO + + @default("log_format") + def _default_log_format(self): + """override default log format to include date & time""" + return ( + "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" + ) + + # file to be opened in the Jupyter server + file_to_run = Unicode("", help="Open the named file when the application is launched.").tag( + config=True + ) + + file_url_prefix = Unicode( + "notebooks", help="The URL prefix where files are opened directly." + ).tag(config=True) + + # Network related information + allow_origin = Unicode( + "", + config=True, + help="""Set the Access-Control-Allow-Origin header + + Use '*' to allow any origin to access your server. + + Takes precedence over allow_origin_pat. + """, + ) + + allow_origin_pat = Unicode( + "", + config=True, + help="""Use a regular expression for the Access-Control-Allow-Origin header + + Requests from an origin matching the expression will get replies with: + + Access-Control-Allow-Origin: origin + + where `origin` is the origin of the request. + + Ignored if allow_origin is set. + """, + ) + + allow_credentials = Bool( + False, + config=True, + help=_i18n("Set the Access-Control-Allow-Credentials: true header"), + ) + + allow_root = Bool( + False, + config=True, + help=_i18n("Whether to allow the user to run the server as root."), + ) + + autoreload = Bool( + False, + config=True, + help=_i18n("Reload the webapp when changes are made to any Python src files."), + ) + + default_url = Unicode("/", config=True, help=_i18n("The default URL to redirect to from `/`")) + + ip = Unicode( + "localhost", + config=True, + help=_i18n("The IP address the Jupyter server will listen on."), + ) + + @default("ip") + def _default_ip(self): + """Return localhost if available, 127.0.0.1 otherwise. + + On some (horribly broken) systems, localhost cannot be bound. + """ + s = socket.socket() + try: + s.bind(("localhost", 0)) + except socket.error as e: + self.log.warning( + _i18n("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e + ) + return "127.0.0.1" + else: + s.close() + return "localhost" + + @validate("ip") + def _validate_ip(self, proposal): + value = proposal["value"] + if value == "*": + value = "" + return value + + custom_display_url = Unicode( + "", + config=True, + help=_i18n( + """Override URL shown to users. + + Replace actual URL, including protocol, address, port and base URL, + with the given value when displaying URL to the users. Do not change + the actual connection URL. If authentication token is enabled, the + token is added to the custom URL automatically. + + This option is intended to be used when the URL to display to the user + cannot be determined reliably by the Jupyter server (proxified + or containerized setups for example).""" + ), + ) + + port_env = "JUPYTER_PORT" + port_default_value = DEFAULT_JUPYTER_SERVER_PORT + + port = Integer( + config=True, + help=_i18n("The port the server will listen on (env: JUPYTER_PORT)."), + ) + + @default("port") + def port_default(self): + return int(os.getenv(self.port_env, self.port_default_value)) + + port_retries_env = "JUPYTER_PORT_RETRIES" + port_retries_default_value = 50 + port_retries = Integer( + port_retries_default_value, + config=True, + help=_i18n( + "The number of additional ports to try if the specified port is not " + "available (env: JUPYTER_PORT_RETRIES)." + ), + ) + + @default("port_retries") + def port_retries_default(self): + return int(os.getenv(self.port_retries_env, self.port_retries_default_value)) + + sock = Unicode("", config=True, help="The UNIX socket the Jupyter server will listen on.") + + sock_mode = Unicode( + "0600", + config=True, + help="The permissions mode for UNIX socket creation (default: 0600).", + ) + + @validate("sock_mode") + def _validate_sock_mode(self, proposal): + value = proposal["value"] + try: + converted_value = int(value.encode(), 8) + assert all( + ( + # Ensure the mode is at least user readable/writable. + bool(converted_value & stat.S_IRUSR), + bool(converted_value & stat.S_IWUSR), + # And isn't out of bounds. + converted_value <= 2**12, + ) + ) + except ValueError: + raise TraitError('invalid --sock-mode value: %s, please specify as e.g. "0600"' % value) + except AssertionError: + raise TraitError( + "invalid --sock-mode value: %s, must have u+rw (0600) at a minimum" % value + ) + return value + + certfile = Unicode( + "", + config=True, + help=_i18n("""The full path to an SSL/TLS certificate file."""), + ) + + keyfile = Unicode( + "", + config=True, + help=_i18n("""The full path to a private key file for usage with SSL/TLS."""), + ) + + client_ca = Unicode( + "", + config=True, + help=_i18n( + """The full path to a certificate authority certificate for SSL/TLS client authentication.""" + ), + ) + + cookie_secret_file = Unicode( + config=True, help=_i18n("""The file where the cookie secret is stored.""") + ) + + @default("cookie_secret_file") + def _default_cookie_secret_file(self): + return os.path.join(self.runtime_dir, "jupyter_cookie_secret") + + cookie_secret = Bytes( + b"", + config=True, + help="""The random bytes used to secure cookies. + By default this is a new random number every time you start the server. + Set it to a value in a config file to enable logins to persist across server sessions. + + Note: Cookie secrets should be kept private, do not share config files with + cookie_secret stored in plaintext (you can read the value from a file). + """, + ) + + @default("cookie_secret") + def _default_cookie_secret(self): + if os.path.exists(self.cookie_secret_file): + with io.open(self.cookie_secret_file, "rb") as f: + key = f.read() + else: + key = encodebytes(os.urandom(32)) + self._write_cookie_secret_file(key) + h = hmac.new(key, digestmod=hashlib.sha256) + h.update(self.password.encode()) + return h.digest() + + def _write_cookie_secret_file(self, secret): + """write my secret to my secret_file""" + self.log.info(_i18n("Writing Jupyter server cookie secret to %s"), self.cookie_secret_file) + try: + with secure_write(self.cookie_secret_file, True) as f: + f.write(secret) + except OSError as e: + self.log.error( + _i18n("Failed to write cookie secret to %s: %s"), + self.cookie_secret_file, + e, + ) + + token = Unicode( + "", + help=_i18n( + """Token used for authenticating first-time connections to the server. + + The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly + with the JUPYTER_TOKEN environment variable. + + When no password is enabled, + the default is to generate a new, random token. + + Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. + """ + ), + ).tag(config=True) + + _token_generated = True + + @default("token") + def _token_default(self): + if os.getenv("JUPYTER_TOKEN"): + self._token_generated = False + return os.getenv("JUPYTER_TOKEN") + if os.getenv("JUPYTER_TOKEN_FILE"): + self._token_generated = False + with io.open(os.getenv("JUPYTER_TOKEN_FILE"), "r") as token_file: + return token_file.read() + if self.password: + # no token if password is enabled + self._token_generated = False + return "" + else: + self._token_generated = True + return binascii.hexlify(os.urandom(24)).decode("ascii") + + min_open_files_limit = Integer( + config=True, + help=""" + Gets or sets a lower bound on the open file handles process resource + limit. This may need to be increased if you run into an + OSError: [Errno 24] Too many open files. + This is not applicable when running on Windows. + """, + allow_none=True, + ) + + @default("min_open_files_limit") + def _default_min_open_files_limit(self): + if resource is None: + # Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows) + return None + + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + + DEFAULT_SOFT = 4096 + if hard >= DEFAULT_SOFT: + return DEFAULT_SOFT + + self.log.debug( + "Default value for min_open_files_limit is ignored (hard=%r, soft=%r)", + hard, + soft, + ) + + return soft + + max_body_size = Integer( + 512 * 1024 * 1024, + config=True, + help=""" + Sets the maximum allowed size of the client request body, specified in + the Content-Length request header field. If the size in a request + exceeds the configured value, a malformed HTTP message is returned to + the client. + + Note: max_body_size is applied even in streaming mode. + """, + ) + + max_buffer_size = Integer( + 512 * 1024 * 1024, + config=True, + help=""" + Gets or sets the maximum amount of memory, in bytes, that is allocated + for use by the buffer manager. + """, + ) + + @observe("token") + def _token_changed(self, change): + self._token_generated = False + + password = Unicode( + "", + config=True, + help="""Hashed password to use for web authentication. + + To generate, type in a python/IPython shell: + + from jupyter_server.auth import passwd; passwd() + + The string should be of the form type:salt:hashed-password. + """, + ) + + password_required = Bool( + False, + config=True, + help="""Forces users to use a password for the Jupyter server. + This is useful in a multi user environment, for instance when + everybody in the LAN can access each other's machine through ssh. + + In such a case, serving on localhost is not secure since + any user can connect to the Jupyter server via ssh. + + """, + ) + + allow_password_change = Bool( + True, + config=True, + help="""Allow password to be changed at login for the Jupyter server. + + While logging in with a token, the Jupyter server UI will give the opportunity to + the user to enter a new password at the same time that will replace + the token login mechanism. + + This can be set to false to prevent changing password from the UI/API. + """, + ) + + disable_check_xsrf = Bool( + False, + config=True, + help="""Disable cross-site-request-forgery protection + + Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, + requiring API requests to either: + + - originate from pages served by this server (validated with XSRF cookie and token), or + - authenticate with a token + + Some anonymous compute resources still desire the ability to run code, + completely without authentication. + These services can disable all authentication and security checks, + with the full knowledge of what that implies. + """, + ) + + allow_remote_access = Bool( + config=True, + help="""Allow requests where the Host header doesn't point to a local server + + By default, requests get a 403 forbidden response if the 'Host' header + shows that the browser thinks it's on a non-local domain. + Setting this option to True disables this check. + + This protects against 'DNS rebinding' attacks, where a remote web server + serves you a page and then changes its DNS to send later requests to a + local IP, bypassing same-origin checks. + + Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, + along with hostnames configured in local_hostnames. + """, + ) + + @default("allow_remote_access") + def _default_allow_remote(self): + """Disallow remote access if we're listening only on loopback addresses""" + + # if blank, self.ip was configured to "*" meaning bind to all interfaces, + # see _valdate_ip + if self.ip == "": + return True + + try: + addr = ipaddress.ip_address(self.ip) + except ValueError: + # Address is a hostname + for info in socket.getaddrinfo(self.ip, self.port, 0, socket.SOCK_STREAM): + addr = info[4][0] + + try: + parsed = ipaddress.ip_address(addr.split("%")[0]) + except ValueError: + self.log.warning("Unrecognised IP address: %r", addr) + continue + + # Macs map localhost to 'fe80::1%lo0', a link local address + # scoped to the loopback interface. For now, we'll assume that + # any scoped link-local address is effectively local. + if not (parsed.is_loopback or (("%" in addr) and parsed.is_link_local)): + return True + return False + else: + return not addr.is_loopback + + use_redirect_file = Bool( + True, + config=True, + help="""Disable launching browser by redirect file + For versions of notebook > 5.7.2, a security feature measure was added that + prevented the authentication token used to launch the browser from being visible. + This feature makes it difficult for other users on a multi-user system from + running code in your Jupyter session as you. + However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), + launching a browser using a redirect file can lead the browser failing to load. + This is because of the difference in file structures/paths between the runtime and + the browser. + + Disabling this setting to False will disable this behavior, allowing the browser + to launch by using a URL and visible token (as before). + """, + ) + + local_hostnames = List( + Unicode(), + ["localhost"], + config=True, + help="""Hostnames to allow as local when allow_remote_access is False. + + Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted + as local as well. + """, + ) + + open_browser = Bool( + False, + config=True, + help="""Whether to open in a browser after starting. + The specific browser used is platform dependent and + determined by the python standard library `webbrowser` + module, unless it is overridden using the --browser + (ServerApp.browser) configuration option. + """, + ) + + browser = Unicode( + "", + config=True, + help="""Specify what command to use to invoke a web + browser when starting the server. If not specified, the + default browser will be determined by the `webbrowser` + standard library module, which allows setting of the + BROWSER environment variable to override it. + """, + ) + + webbrowser_open_new = Integer( + 2, + config=True, + help=_i18n( + """Specify where to open the server on startup. This is the + `new` argument passed to the standard library method `webbrowser.open`. + The behaviour is not guaranteed, but depends on browser support. Valid + values are: + + - 2 opens a new tab, + - 1 opens a new window, + - 0 opens in an existing window. + + See the `webbrowser.open` documentation for details. + """ + ), + ) + + tornado_settings = Dict( + config=True, + help=_i18n( + "Supply overrides for the tornado.web.Application that the " "Jupyter server uses." + ), + ) + + websocket_compression_options = Any( + None, + config=True, + help=_i18n( + """ + Set the tornado compression options for websocket connections. + + This value will be returned from :meth:`WebSocketHandler.get_compression_options`. + None (default) will disable compression. + A dict (even an empty one) will enable compression. + + See the tornado docs for WebSocketHandler.get_compression_options for details. + """ + ), + ) + terminado_settings = Dict( + config=True, + help=_i18n('Supply overrides for terminado. Currently only supports "shell_command".'), + ) + + cookie_options = Dict( + config=True, + help=_i18n( + "Extra keyword arguments to pass to `set_secure_cookie`." + " See tornado's set_secure_cookie docs for details." + ), + ) + get_secure_cookie_kwargs = Dict( + config=True, + help=_i18n( + "Extra keyword arguments to pass to `get_secure_cookie`." + " See tornado's get_secure_cookie docs for details." + ), + ) + ssl_options = Dict( + allow_none=True, + config=True, + help=_i18n( + """Supply SSL options for the tornado HTTPServer. + See the tornado docs for details.""" + ), + ) + + jinja_environment_options = Dict( + config=True, + help=_i18n("Supply extra arguments that will be passed to Jinja environment."), + ) + + jinja_template_vars = Dict( + config=True, + help=_i18n("Extra variables to supply to jinja templates when rendering."), + ) + + base_url = Unicode( + "/", + config=True, + help="""The base URL for the Jupyter server. + + Leading and trailing slashes can be omitted, + and will automatically be added. + """, + ) + + @validate("base_url") + def _update_base_url(self, proposal): + value = proposal["value"] + if not value.startswith("/"): + value = "/" + value + if not value.endswith("/"): + value = value + "/" + return value + + extra_static_paths = List( + Unicode(), + config=True, + help="""Extra paths to search for serving static files. + + This allows adding javascript/css to be available from the Jupyter server machine, + or overriding individual files in the IPython""", + ) + + @property + def static_file_path(self): + """return extra paths + the default location""" + return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] + + static_custom_path = List(Unicode(), help=_i18n("""Path to search for custom.js, css""")) + + @default("static_custom_path") + def _default_static_custom_path(self): + return [os.path.join(d, "custom") for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH)] + + extra_template_paths = List( + Unicode(), + config=True, + help=_i18n( + """Extra paths to search for serving jinja templates. + + Can be used to override templates from jupyter_server.templates.""" + ), + ) + + @property + def template_file_path(self): + """return extra paths + the default locations""" + return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST + + extra_services = List( + Unicode(), + config=True, + help=_i18n( + """handlers that should be loaded at higher priority than the default services""" + ), + ) + + websocket_url = Unicode( + "", + config=True, + help="""The base URL for websockets, + if it differs from the HTTP server (hint: it almost certainly doesn't). + + Should be in the form of an HTTP origin: ws[s]://hostname[:port] + """, + ) + + quit_button = Bool( + True, + config=True, + help="""If True, display controls to shut down the Jupyter server, such as menu items or buttons.""", + ) + + # REMOVE in VERSION 2.0 + # Temporarily allow content managers to inherit from the 'notebook' + # package. We will deprecate this in the next major release. + contents_manager_class = TypeFromClasses( + default_value=LargeFileManager, + klasses=[ + "jupyter_server.services.contents.manager.ContentsManager", + "notebook.services.contents.manager.ContentsManager", + ], + config=True, + help=_i18n("The content manager class to use."), + ) + + # Throws a deprecation warning to notebook based contents managers. + @observe("contents_manager_class") + def _observe_contents_manager_class(self, change): + new = change["new"] + # If 'new' is a class, get a string representing the import + # module path. + if inspect.isclass(new): + new = new.__module__ + + if new.startswith("notebook"): + self.log.warning( + "The specified 'contents_manager_class' class inherits a manager from the " + "'notebook' package. This is not guaranteed to work in future " + "releases of Jupyter Server. Instead, consider switching the " + "manager to inherit from the 'jupyter_server' managers. " + "Jupyter Server will temporarily allow 'notebook' managers " + "until its next major release (2.x)." + ) + + kernel_manager_class = Type( + default_value=AsyncMappingKernelManager, + klass=MappingKernelManager, + config=True, + help=_i18n("The kernel manager class to use."), + ) + + session_manager_class = Type( + default_value=SessionManager, + config=True, + help=_i18n("The session manager class to use."), + ) + + config_manager_class = Type( + default_value=ConfigManager, + config=True, + help=_i18n("The config manager class to use"), + ) + + kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) + + kernel_spec_manager_class = Type( + default_value=KernelSpecManager, + config=True, + help=""" + The kernel spec manager class to use. Should be a subclass + of `jupyter_client.kernelspec.KernelSpecManager`. + + The Api of KernelSpecManager is provisional and might change + without warning between this version of Jupyter and the next stable one. + """, + ) + + login_handler_class = Type( + default_value=LoginHandler, + klass=web.RequestHandler, + config=True, + help=_i18n("The login handler class to use."), + ) + + logout_handler_class = Type( + default_value=LogoutHandler, + klass=web.RequestHandler, + config=True, + help=_i18n("The logout handler class to use."), + ) + + authorizer_class = Type( + default_value=AllowAllAuthorizer, + klass=Authorizer, + config=True, + help=_i18n("The authorizer class to use."), + ) + + trust_xheaders = Bool( + False, + config=True, + help=( + _i18n( + "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" + "sent by the upstream reverse proxy. Necessary if the proxy handles SSL" + ) + ), + ) + + info_file = Unicode() + + @default("info_file") + def _default_info_file(self): + info_file = "jpserver-%s.json" % os.getpid() + return os.path.join(self.runtime_dir, info_file) + + browser_open_file = Unicode() + + @default("browser_open_file") + def _default_browser_open_file(self): + basename = "jpserver-%s-open.html" % os.getpid() + return os.path.join(self.runtime_dir, basename) + + browser_open_file_to_run = Unicode() + + @default("browser_open_file_to_run") + def _default_browser_open_file_to_run(self): + basename = "jpserver-file-to-run-%s-open.html" % os.getpid() + return os.path.join(self.runtime_dir, basename) + + pylab = Unicode( + "disabled", + config=True, + help=_i18n( + """ + DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. + """ + ), + ) + + @observe("pylab") + def _update_pylab(self, change): + """when --pylab is specified, display a warning and exit""" + if change["new"] != "warn": + backend = " %s" % change["new"] + else: + backend = "" + self.log.error( + _i18n("Support for specifying --pylab on the command line has been removed.") + ) + self.log.error( + _i18n("Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.").format( + backend + ) + ) + self.exit(1) + + notebook_dir = Unicode(config=True, help=_i18n("DEPRECATED, use root_dir.")) + + @observe("notebook_dir") + def _update_notebook_dir(self, change): + if self._root_dir_set: + # only use deprecated config if new config is not set + return + self.log.warning(_i18n("notebook_dir is deprecated, use root_dir")) + self.root_dir = change["new"] + + root_dir = Unicode(config=True, help=_i18n("The directory to use for notebooks and kernels.")) + _root_dir_set = False + + @default("root_dir") + def _default_root_dir(self): + if self.file_to_run: + self._root_dir_set = True + return os.path.dirname(os.path.abspath(self.file_to_run)) + else: + return os.getcwd() + + def _normalize_dir(self, value): + # Strip any trailing slashes + # *except* if it's root + _, path = os.path.splitdrive(value) + if path == os.sep: + return value + value = value.rstrip(os.sep) + if not os.path.isabs(value): + # If we receive a non-absolute path, make it absolute. + value = os.path.abspath(value) + return value + + @validate("root_dir") + def _root_dir_validate(self, proposal): + value = self._normalize_dir(proposal["value"]) + if not os.path.isdir(value): + raise TraitError(trans.gettext("No such directory: '%r'") % value) + return value + + preferred_dir = Unicode( + config=True, + help=trans.gettext("Preferred starting directory to use for notebooks and kernels."), + ) + + @default("preferred_dir") + def _default_prefered_dir(self): + return self.root_dir + + @validate("preferred_dir") + def _preferred_dir_validate(self, proposal): + value = self._normalize_dir(proposal["value"]) + if not os.path.isdir(value): + raise TraitError(trans.gettext("No such preferred dir: '%r'") % value) + + # preferred_dir must be equal or a subdir of root_dir + if not value.startswith(self.root_dir): + raise TraitError( + trans.gettext("preferred_dir must be equal or a subdir of root_dir: '%r'") % value + ) + + return value + + @observe("root_dir") + def _root_dir_changed(self, change): + self._root_dir_set = True + if not self.preferred_dir.startswith(change["new"]): + self.log.warning( + trans.gettext("Value of preferred_dir updated to use value of root_dir") + ) + self.preferred_dir = change["new"] + + @observe("server_extensions") + def _update_server_extensions(self, change): + self.log.warning(_i18n("server_extensions is deprecated, use jpserver_extensions")) + self.server_extensions = change["new"] + + jpserver_extensions = Dict( + default_value={}, + value_trait=Bool(), + config=True, + help=( + _i18n( + "Dict of Python modules to load as Jupyter server extensions." + "Entry values can be used to enable and disable the loading of" + "the extensions. The extensions will be loaded in alphabetical " + "order." + ) + ), + ) + + reraise_server_extension_failures = Bool( + False, + config=True, + help=_i18n("Reraise exceptions encountered loading server extensions?"), + ) + + kernel_ws_protocol = Unicode( + None, + allow_none=True, + config=True, + help=_i18n( + "Preferred kernel message protocol over websocket to use (default: None). " + "If an empty string is passed, select the legacy protocol. If None, " + "the selected protocol will depend on what the front-end supports " + "(usually the most recent protocol supported by the back-end and the " + "front-end)." + ), + ) + + limit_rate = Bool( + True, + config=True, + help=_i18n( + "Whether to limit the rate of IOPub messages (default: True). " + "If True, use iopub_msg_rate_limit, iopub_data_rate_limit and/or rate_limit_window " + "to tune the rate." + ), + ) + + iopub_msg_rate_limit = Float( + 1000, + config=True, + help=_i18n( + """(msgs/sec) + Maximum rate at which messages can be sent on iopub before they are + limited.""" + ), + ) + + iopub_data_rate_limit = Float( + 1000000, + config=True, + help=_i18n( + """(bytes/sec) + Maximum rate at which stream output can be sent on iopub before they are + limited.""" + ), + ) + + rate_limit_window = Float( + 3, + config=True, + help=_i18n( + """(sec) Time window used to + check the message and data rate limits.""" + ), + ) + + shutdown_no_activity_timeout = Integer( + 0, + config=True, + help=( + "Shut down the server after N seconds with no kernels or " + "terminals running and no activity. " + "This can be used together with culling idle kernels " + "(MappingKernelManager.cull_idle_timeout) to " + "shutdown the Jupyter server when it's not in use. This is not " + "precisely timed: it may shut down up to a minute later. " + "0 (the default) disables this automatic shutdown." + ), + ) + + terminals_enabled = Bool( + True, + config=True, + help=_i18n( + """Set to False to disable terminals. + + This does *not* make the server more secure by itself. + Anything the user can in a terminal, they can also do in a notebook. + + Terminals may also be automatically disabled if the terminado package + is not available. + """ + ), + ) + + # Since use of terminals is also a function of whether the terminado package is + # available, this variable holds the "final indication" of whether terminal functionality + # should be considered (particularly during shutdown/cleanup). It is enabled only + # once both the terminals "service" can be initialized and terminals_enabled is True. + # Note: this variable is slightly different from 'terminals_available' in the web settings + # in that this variable *could* remain false if terminado is available, yet the terminal + # service's initialization still fails. As a result, this variable holds the truth. + terminals_available = False + + authenticate_prometheus = Bool( + True, + help="""" + Require authentication to access prometheus metrics. + """, + config=True, + ) + + _starter_app = Instance( + default_value=None, + allow_none=True, + klass="jupyter_server.extension.application.ExtensionApp", + ) + + @property + def starter_app(self): + """Get the Extension that started this server.""" + return self._starter_app + + def parse_command_line(self, argv=None): + + super(ServerApp, self).parse_command_line(argv) + + if self.extra_args: + arg0 = self.extra_args[0] + f = os.path.abspath(arg0) + self.argv.remove(arg0) + if not os.path.exists(f): + self.log.critical(_i18n("No such file or directory: %s"), f) + self.exit(1) + + # Use config here, to ensure that it takes higher priority than + # anything that comes from the config dirs. + c = Config() + if os.path.isdir(f): + c.ServerApp.root_dir = f + elif os.path.isfile(f): + c.ServerApp.file_to_run = f + self.update_config(c) + + def init_configurables(self): + + # If gateway server is configured, replace appropriate managers to perform redirection. To make + # this determination, instantiate the GatewayClient config singleton. + self.gateway_config = GatewayClient.instance(parent=self) + + if self.gateway_config.gateway_enabled: + self.kernel_manager_class = ( + "jupyter_server.gateway.managers.GatewayMappingKernelManager" + ) + self.session_manager_class = "jupyter_server.gateway.managers.GatewaySessionManager" + self.kernel_spec_manager_class = ( + "jupyter_server.gateway.managers.GatewayKernelSpecManager" + ) + + self.kernel_spec_manager = self.kernel_spec_manager_class( + parent=self, + ) + self.kernel_manager = self.kernel_manager_class( + parent=self, + log=self.log, + connection_dir=self.runtime_dir, + kernel_spec_manager=self.kernel_spec_manager, + ) + self.contents_manager = self.contents_manager_class( + parent=self, + log=self.log, + ) + self.session_manager = self.session_manager_class( + parent=self, + log=self.log, + kernel_manager=self.kernel_manager, + contents_manager=self.contents_manager, + ) + self.config_manager = self.config_manager_class( + parent=self, + log=self.log, + ) + self.authorizer = self.authorizer_class(parent=self, log=self.log) + + def init_logging(self): + # This prevents double log messages because tornado use a root logger that + # self.log is a child of. The logging module dipatches log messages to a log + # and all of its ancenstors until propagate is set to False. + self.log.propagate = False + + for log in app_log, access_log, gen_log: + # consistent log output name (ServerApp instead of tornado.access, etc.) + log.name = self.log.name + # hook up tornado 3's loggers to our app handlers + logger = logging.getLogger("tornado") + logger.propagate = True + logger.parent = self.log + logger.setLevel(self.log.level) + + def init_webapp(self): + """initialize tornado webapp""" + self.tornado_settings["allow_origin"] = self.allow_origin + self.tornado_settings["websocket_compression_options"] = self.websocket_compression_options + if self.allow_origin_pat: + self.tornado_settings["allow_origin_pat"] = re.compile(self.allow_origin_pat) + self.tornado_settings["allow_credentials"] = self.allow_credentials + self.tornado_settings["autoreload"] = self.autoreload + self.tornado_settings["cookie_options"] = self.cookie_options + self.tornado_settings["get_secure_cookie_kwargs"] = self.get_secure_cookie_kwargs + self.tornado_settings["token"] = self.token + + # ensure default_url starts with base_url + if not self.default_url.startswith(self.base_url): + self.default_url = url_path_join(self.base_url, self.default_url) + + if self.password_required and (not self.password): + self.log.critical( + _i18n("Jupyter servers are configured to only be run with a password.") + ) + self.log.critical(_i18n("Hint: run the following command to set a password")) + self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) + sys.exit(1) + + # Socket options validation. + if self.sock: + if self.port != DEFAULT_JUPYTER_SERVER_PORT: + self.log.critical( + ("Options --port and --sock are mutually exclusive. Aborting."), + ) + sys.exit(1) + else: + # Reset the default port if we're using a UNIX socket. + self.port = 0 + + if self.open_browser: + # If we're bound to a UNIX socket, we can't reliably connect from a browser. + self.log.info( + ("Ignoring --ServerApp.open_browser due to --sock being used."), + ) + + if self.file_to_run: + self.log.critical( + ("Options --ServerApp.file_to_run and --sock are mutually exclusive."), + ) + sys.exit(1) + + if sys.platform.startswith("win"): + self.log.critical( + ( + "Option --sock is not supported on Windows, but got value of %s. Aborting." + % self.sock + ), + ) + sys.exit(1) + + self.web_app = ServerWebApplication( + self, + self.default_services, + self.kernel_manager, + self.contents_manager, + self.session_manager, + self.kernel_spec_manager, + self.config_manager, + self.extra_services, + self.log, + self.base_url, + self.default_url, + self.tornado_settings, + self.jinja_environment_options, + authorizer=self.authorizer, + ) + if self.certfile: + self.ssl_options["certfile"] = self.certfile + if self.keyfile: + self.ssl_options["keyfile"] = self.keyfile + if self.client_ca: + self.ssl_options["ca_certs"] = self.client_ca + if not self.ssl_options: + # could be an empty dict or None + # None indicates no SSL config + self.ssl_options = None + else: + # SSL may be missing, so only import it if it's to be used + import ssl + + # PROTOCOL_TLS selects the highest ssl/tls protocol version that both the client and + # server support. When PROTOCOL_TLS is not available use PROTOCOL_SSLv23. + self.ssl_options.setdefault( + "ssl_version", getattr(ssl, "PROTOCOL_TLS", ssl.PROTOCOL_SSLv23) + ) + if self.ssl_options.get("ca_certs", False): + self.ssl_options.setdefault("cert_reqs", ssl.CERT_REQUIRED) + ssl_options = self.ssl_options + + self.login_handler_class.validate_security(self, ssl_options=self.ssl_options) + + def init_resources(self): + """initialize system resources""" + if resource is None: + self.log.debug( + "Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows)" + ) + return + + old_soft, old_hard = resource.getrlimit(resource.RLIMIT_NOFILE) + soft = self.min_open_files_limit + hard = old_hard + if old_soft < soft: + if hard < soft: + hard = soft + self.log.debug( + "Raising open file limit: soft {}->{}; hard {}->{}".format( + old_soft, soft, old_hard, hard + ) + ) + resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) + + def _get_urlparts(self, path=None, include_token=False): + """Constructs a urllib named tuple, ParseResult, + with default values set by server config. + The returned tuple can be manipulated using the `_replace` method. + """ + if self.sock: + scheme = "http+unix" + netloc = urlencode_unix_socket_path(self.sock) + else: + # Handle nonexplicit hostname. + if self.ip in ("", "0.0.0.0", "::"): + ip = "%s" % socket.gethostname() + else: + ip = "[{}]".format(self.ip) if ":" in self.ip else self.ip + netloc = "{ip}:{port}".format(ip=ip, port=self.port) + if self.certfile: + scheme = "https" + else: + scheme = "http" + if not path: + path = self.default_url + query = None + if include_token: + if self.token: # Don't log full token if it came from config + token = self.token if self._token_generated else "..." + query = urllib.parse.urlencode({"token": token}) + # Build the URL Parts to dump. + urlparts = urllib.parse.ParseResult( + scheme=scheme, + netloc=netloc, + path=path, + params=None, + query=query, + fragment=None, + ) + return urlparts + + @property + def public_url(self): + parts = self._get_urlparts(include_token=True) + # Update with custom pieces. + if self.custom_display_url: + # Parse custom display_url + custom = urllib.parse.urlparse(self.custom_display_url)._asdict() + # Get pieces that are matter (non None) + custom_updates = {key: item for key, item in custom.items() if item} + # Update public URL parts with custom pieces. + parts = parts._replace(**custom_updates) + return parts.geturl() + + @property + def local_url(self): + parts = self._get_urlparts(include_token=True) + # Update with custom pieces. + if not self.sock: + parts = parts._replace(netloc="127.0.0.1:{port}".format(port=self.port)) + return parts.geturl() + + @property + def display_url(self): + """Human readable string with URLs for interacting + with the running Jupyter Server + """ + url = self.public_url + "\n or " + self.local_url + return url + + @property + def connection_url(self): + urlparts = self._get_urlparts(path=self.base_url) + return urlparts.geturl() + + def init_terminals(self): + if not self.terminals_enabled: + return + + try: + from jupyter_server.terminal import initialize + + initialize( + self.web_app, + self.root_dir, + self.connection_url, + self.terminado_settings, + ) + self.terminals_available = True + except ImportError as e: + self.log.warning(_i18n("Terminals not available (error was %s)"), e) + + def init_signal(self): + if not sys.platform.startswith("win") and sys.stdin and sys.stdin.isatty(): + signal.signal(signal.SIGINT, self._handle_sigint) + signal.signal(signal.SIGTERM, self._signal_stop) + if hasattr(signal, "SIGUSR1"): + # Windows doesn't support SIGUSR1 + signal.signal(signal.SIGUSR1, self._signal_info) + if hasattr(signal, "SIGINFO"): + # only on BSD-based systems + signal.signal(signal.SIGINFO, self._signal_info) + + def _handle_sigint(self, sig, frame): + """SIGINT handler spawns confirmation dialog""" + # register more forceful signal handler for ^C^C case + signal.signal(signal.SIGINT, self._signal_stop) + # request confirmation dialog in bg thread, to avoid + # blocking the App + thread = threading.Thread(target=self._confirm_exit) + thread.daemon = True + thread.start() + + def _restore_sigint_handler(self): + """callback for restoring original SIGINT handler""" + signal.signal(signal.SIGINT, self._handle_sigint) + + def _confirm_exit(self): + """confirm shutdown on ^C + + A second ^C, or answering 'y' within 5s will cause shutdown, + otherwise original SIGINT handler will be restored. + + This doesn't work on Windows. + """ + info = self.log.info + info(_i18n("interrupted")) + # Check if answer_yes is set + if self.answer_yes: + self.log.critical(_i18n("Shutting down...")) + # schedule stop on the main thread, + # since this might be called from a signal handler + self.stop(from_signal=True) + return + print(self.running_server_info()) + yes = _i18n("y") + no = _i18n("n") + sys.stdout.write(_i18n("Shutdown this Jupyter server (%s/[%s])? ") % (yes, no)) + sys.stdout.flush() + r, w, x = select.select([sys.stdin], [], [], 5) + if r: + line = sys.stdin.readline() + if line.lower().startswith(yes) and no not in line.lower(): + self.log.critical(_i18n("Shutdown confirmed")) + # schedule stop on the main thread, + # since this might be called from a signal handler + self.stop(from_signal=True) + return + else: + print(_i18n("No answer for 5s:"), end=" ") + print(_i18n("resuming operation...")) + # no answer, or answer is no: + # set it back to original SIGINT handler + # use IOLoop.add_callback because signal.signal must be called + # from main thread + self.io_loop.add_callback_from_signal(self._restore_sigint_handler) + + def _signal_stop(self, sig, frame): + self.log.critical(_i18n("received signal %s, stopping"), sig) + self.stop(from_signal=True) + + def _signal_info(self, sig, frame): + print(self.running_server_info()) + + def init_components(self): + """Check the components submodule, and warn if it's unclean""" + # TODO: this should still check, but now we use bower, not git submodule + pass + + def find_server_extensions(self): + """ + Searches Jupyter paths for jpserver_extensions. + """ + + # Walk through all config files looking for jpserver_extensions. + # + # Each extension will likely have a JSON config file enabling itself in + # the "jupyter_server_config.d" directory. Find each of these and + # merge there results in order of precedence. + # + # Load server extensions with ConfigManager. + # This enables merging on keys, which we want for extension enabling. + # Regular config loading only merges at the class level, + # so each level clobbers the previous. + config_paths = jupyter_config_path() + if self.config_dir not in config_paths: + # add self.config_dir to the front, if set manually + config_paths.insert(0, self.config_dir) + manager = ExtensionConfigManager(read_config_path=config_paths) + extensions = manager.get_jpserver_extensions() + + for modulename, enabled in sorted(extensions.items()): + if modulename not in self.jpserver_extensions: + self.config.ServerApp.jpserver_extensions.update({modulename: enabled}) + self.jpserver_extensions.update({modulename: enabled}) + + def init_server_extensions(self): + """ + If an extension's metadata includes an 'app' key, + the value must be a subclass of ExtensionApp. An instance + of the class will be created at this step. The config for + this instance will inherit the ServerApp's config object + and load its own config. + """ + # Create an instance of the ExtensionManager. + self.extension_manager = ExtensionManager(log=self.log, serverapp=self) + self.extension_manager.from_jpserver_extensions(self.jpserver_extensions) + self.extension_manager.link_all_extensions() + + def load_server_extensions(self): + """Load any extensions specified by config. + + Import the module, then call the load_jupyter_server_extension function, + if one exists. + + The extension API is experimental, and may change in future releases. + """ + self.extension_manager.load_all_extensions() + + def init_mime_overrides(self): + # On some Windows machines, an application has registered incorrect + # mimetypes in the registry. + # Tornado uses this when serving .css and .js files, causing browsers to + # reject these files. We know the mimetype always needs to be text/css for css + # and application/javascript for JS, so we override it here + # and explicitly tell the mimetypes to not trust the Windows registry + if os.name == "nt": + # do not trust windows registry, which regularly has bad info + mimetypes.init(files=[]) + # ensure css, js are correct, which are required for pages to function + mimetypes.add_type("text/css", ".css") + mimetypes.add_type("application/javascript", ".js") + # for python <3.8 + mimetypes.add_type("application/wasm", ".wasm") + + def shutdown_no_activity(self): + """Shutdown server on timeout when there are no kernels or terminals.""" + km = self.kernel_manager + if len(km) != 0: + return # Kernels still running + + if self.terminals_available: + term_mgr = self.web_app.settings["terminal_manager"] + if term_mgr.terminals: + return # Terminals still running + + seconds_since_active = (utcnow() - self.web_app.last_activity()).total_seconds() + self.log.debug("No activity for %d seconds.", seconds_since_active) + if seconds_since_active > self.shutdown_no_activity_timeout: + self.log.info( + "No kernels or terminals for %d seconds; shutting down.", + seconds_since_active, + ) + self.stop() + + def init_shutdown_no_activity(self): + if self.shutdown_no_activity_timeout > 0: + self.log.info( + "Will shut down after %d seconds with no kernels or terminals.", + self.shutdown_no_activity_timeout, + ) + pc = ioloop.PeriodicCallback(self.shutdown_no_activity, 60000) + pc.start() + + @property + def http_server(self): + """An instance of Tornado's HTTPServer class for the Server Web Application.""" + try: + return self._http_server + except AttributeError as e: + raise AttributeError( + "An HTTPServer instance has not been created for the " + "Server Web Application. To create an HTTPServer for this " + "application, call `.init_httpserver()`." + ) from e + + def init_httpserver(self): + """Creates an instance of a Tornado HTTPServer for the Server Web Application + and sets the http_server attribute. + """ + # Check that a web_app has been initialized before starting a server. + if not hasattr(self, "web_app"): + raise AttributeError( + "A tornado web application has not be initialized. " + "Try calling `.init_webapp()` first." + ) + + # Create an instance of the server. + self._http_server = httpserver.HTTPServer( + self.web_app, + ssl_options=self.ssl_options, + xheaders=self.trust_xheaders, + max_body_size=self.max_body_size, + max_buffer_size=self.max_buffer_size, + ) + + success = self._bind_http_server() + if not success: + self.log.critical( + _i18n( + "ERROR: the Jupyter server could not be started because " + "no available port could be found." + ) + ) + self.exit(1) + + def _bind_http_server(self): + return self._bind_http_server_unix() if self.sock else self._bind_http_server_tcp() + + def _bind_http_server_unix(self): + if unix_socket_in_use(self.sock): + self.log.warning(_i18n("The socket %s is already in use.") % self.sock) + return False + + try: + sock = bind_unix_socket(self.sock, mode=int(self.sock_mode.encode(), 8)) + self.http_server.add_socket(sock) + except socket.error as e: + if e.errno == errno.EADDRINUSE: + self.log.warning(_i18n("The socket %s is already in use.") % self.sock) + return False + elif e.errno in (errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES)): + self.log.warning(_i18n("Permission to listen on sock %s denied") % self.sock) + return False + else: + raise + else: + return True + + def _bind_http_server_tcp(self): + success = None + for port in random_ports(self.port, self.port_retries + 1): + try: + self.http_server.listen(port, self.ip) + except socket.error as e: + if e.errno == errno.EADDRINUSE: + if self.port_retries: + self.log.info( + _i18n("The port %i is already in use, trying another port.") % port + ) + else: + self.log.info(_i18n("The port %i is already in use.") % port) + continue + elif e.errno in ( + errno.EACCES, + getattr(errno, "WSAEACCES", errno.EACCES), + ): + self.log.warning(_i18n("Permission to listen on port %i denied.") % port) + continue + else: + raise + else: + self.port = port + success = True + break + if not success: + if self.port_retries: + self.log.critical( + _i18n( + "ERROR: the Jupyter server could not be started because " + "no available port could be found." + ) + ) + else: + self.log.critical( + _i18n( + "ERROR: the Jupyter server could not be started because " + "port %i is not available." + ) + % port + ) + self.exit(1) + return success + + @staticmethod + def _init_asyncio_patch(): + """set default asyncio policy to be compatible with tornado + + Tornado 6.0 is not compatible with default asyncio + ProactorEventLoop, which lacks basic *_reader methods. + Tornado 6.1 adds a workaround to add these methods in a thread, + but SelectorEventLoop should still be preferred + to avoid the extra thread for ~all of our events, + at least until asyncio adds *_reader methods + to proactor. + """ + if sys.platform.startswith("win") and sys.version_info >= (3, 8): + import asyncio + + try: + from asyncio import ( + WindowsProactorEventLoopPolicy, + WindowsSelectorEventLoopPolicy, + ) + except ImportError: + pass + # not affected + else: + if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: + # prefer Selector to Proactor for tornado + pyzmq + asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) + + @catch_config_error + def initialize( + self, + argv=None, + find_extensions=True, + new_httpserver=True, + starter_extension=None, + ): + """Initialize the Server application class, configurables, web application, and http server. + + Parameters + ---------- + argv : list or None + CLI arguments to parse. + find_extensions : bool + If True, find and load extensions listed in Jupyter config paths. If False, + only load extensions that are passed to ServerApp directy through + the `argv`, `config`, or `jpserver_extensions` arguments. + new_httpserver : bool + If True, a tornado HTTPServer instance will be created and configured for the Server Web + Application. This will set the http_server attribute of this class. + starter_extension : str + If given, it references the name of an extension point that started the Server. + We will try to load configuration from extension point + """ + self._init_asyncio_patch() + # Parse command line, load ServerApp config files, + # and update ServerApp config. + super(ServerApp, self).initialize(argv=argv) + if self._dispatching: + return + # Then, use extensions' config loading mechanism to + # update config. ServerApp config takes precedence. + if find_extensions: + self.find_server_extensions() + self.init_logging() + self.init_server_extensions() + + # Special case the starter extension and load + # any server configuration is provides. + if starter_extension: + # Configure ServerApp based on named extension. + point = self.extension_manager.extension_points[starter_extension] + # Set starter_app property. + if point.app: + self._starter_app = point.app + # Load any configuration that comes from the Extension point. + self.update_config(Config(point.config)) + + # Initialize other pieces of the server. + self.init_resources() + self.init_configurables() + self.init_components() + self.init_webapp() + self.init_terminals() + self.init_signal() + self.init_ioloop() + self.load_server_extensions() + self.init_mime_overrides() + self.init_shutdown_no_activity() + if new_httpserver: + self.init_httpserver() + + async def cleanup_kernels(self): + """Shutdown all kernels. + + The kernels will shutdown themselves when this process no longer exists, + but explicit shutdown allows the KernelManagers to cleanup the connection files. + """ + n_kernels = len(self.kernel_manager.list_kernel_ids()) + kernel_msg = trans.ngettext( + "Shutting down %d kernel", "Shutting down %d kernels", n_kernels + ) + self.log.info(kernel_msg % n_kernels) + await run_sync_in_loop(self.kernel_manager.shutdown_all()) + + async def cleanup_terminals(self): + """Shutdown all terminals. + + The terminals will shutdown themselves when this process no longer exists, + but explicit shutdown allows the TerminalManager to cleanup. + """ + if not self.terminals_available: + return + + terminal_manager = self.web_app.settings["terminal_manager"] + n_terminals = len(terminal_manager.list()) + terminal_msg = trans.ngettext( + "Shutting down %d terminal", "Shutting down %d terminals", n_terminals + ) + self.log.info(terminal_msg % n_terminals) + await run_sync_in_loop(terminal_manager.terminate_all()) + + async def cleanup_extensions(self): + """Call shutdown hooks in all extensions.""" + n_extensions = len(self.extension_manager.extension_apps) + extension_msg = trans.ngettext( + "Shutting down %d extension", "Shutting down %d extensions", n_extensions + ) + self.log.info(extension_msg % n_extensions) + await run_sync_in_loop(self.extension_manager.stop_all_extensions()) + + def running_server_info(self, kernel_count=True): + "Return the current working directory and the server url information" + info = self.contents_manager.info_string() + "\n" + if kernel_count: + n_kernels = len(self.kernel_manager.list_kernel_ids()) + kernel_msg = trans.ngettext("%d active kernel", "%d active kernels", n_kernels) + info += kernel_msg % n_kernels + info += "\n" + # Format the info so that the URL fits on a single line in 80 char display + info += _i18n( + "Jupyter Server {version} is running at:\n{url}".format( + version=ServerApp.version, url=self.display_url + ) + ) + if self.gateway_config.gateway_enabled: + info += ( + _i18n("\nKernels will be managed by the Gateway server running at:\n%s") + % self.gateway_config.url + ) + return info + + def server_info(self): + """Return a JSONable dict of information about this server.""" + return { + "url": self.connection_url, + "hostname": self.ip if self.ip else "localhost", + "port": self.port, + "sock": self.sock, + "secure": bool(self.certfile), + "base_url": self.base_url, + "token": self.token, + "root_dir": os.path.abspath(self.root_dir), + "password": bool(self.password), + "pid": os.getpid(), + "version": ServerApp.version, + } + + def write_server_info_file(self): + """Write the result of server_info() to the JSON file info_file.""" + try: + with secure_write(self.info_file) as f: + json.dump(self.server_info(), f, indent=2, sort_keys=True) + except OSError as e: + self.log.error(_i18n("Failed to write server-info to %s: %s"), self.info_file, e) + + def remove_server_info_file(self): + """Remove the jpserver-.json file created for this server. + + Ignores the error raised when the file has already been removed. + """ + try: + os.unlink(self.info_file) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + def _resolve_file_to_run_and_root_dir(self): + """Returns a relative path from file_to_run + to root_dir. If root_dir and file_to_run + are incompatible, i.e. on different subtrees, + crash the app and log a critical message. Note + that if root_dir is not configured and file_to_run + is configured, root_dir will be set to the parent + directory of file_to_run. + """ + rootdir_abspath = pathlib.Path(self.root_dir).resolve() + file_rawpath = pathlib.Path(self.file_to_run) + combined_path = (rootdir_abspath / file_rawpath).resolve() + is_child = str(combined_path).startswith(str(rootdir_abspath)) + + if is_child: + if combined_path.parent != rootdir_abspath: + self.log.debug( + "The `root_dir` trait is set to a directory that's not " + "the immediate parent directory of `file_to_run`. Note that " + "the server will start at `root_dir` and open the " + "the file from the relative path to the `root_dir`." + ) + return str(combined_path.relative_to(rootdir_abspath)) + + self.log.critical( + "`root_dir` and `file_to_run` are incompatible. They " + "don't share the same subtrees. Make sure `file_to_run` " + "is on the same path as `root_dir`." + ) + self.exit(1) + + def _write_browser_open_file(self, url, fh): + if self.token: + url = url_concat(url, {"token": self.token}) + url = url_path_join(self.connection_url, url) + + jinja2_env = self.web_app.settings["jinja2_env"] + template = jinja2_env.get_template("browser-open.html") + fh.write(template.render(open_url=url, base_url=self.base_url)) + + def write_browser_open_files(self): + """Write an `browser_open_file` and `browser_open_file_to_run` files + + This can be used to open a file directly in a browser. + """ + # default_url contains base_url, but so does connection_url + self.write_browser_open_file() + + # Create a second browser open file if + # file_to_run is set. + if self.file_to_run: + # Make sure file_to_run and root_dir are compatible. + file_to_run_relpath = self._resolve_file_to_run_and_root_dir() + + file_open_url = url_escape( + url_path_join(self.file_url_prefix, *file_to_run_relpath.split(os.sep)) + ) + + with open(self.browser_open_file_to_run, "w", encoding="utf-8") as f: + self._write_browser_open_file(file_open_url, f) + + def write_browser_open_file(self): + """Write an jpserver--open.html file + + This can be used to open the notebook in a browser + """ + # default_url contains base_url, but so does connection_url + open_url = self.default_url[len(self.base_url) :] + + with open(self.browser_open_file, "w", encoding="utf-8") as f: + self._write_browser_open_file(open_url, f) + + def remove_browser_open_files(self): + """Remove the `browser_open_file` and `browser_open_file_to_run` files + created for this server. + + Ignores the error raised when the file has already been removed. + """ + self.remove_browser_open_file() + try: + os.unlink(self.browser_open_file_to_run) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + def remove_browser_open_file(self): + """Remove the jpserver--open.html file created for this server. + + Ignores the error raised when the file has already been removed. + """ + try: + os.unlink(self.browser_open_file) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + def _prepare_browser_open(self): + if not self.use_redirect_file: + uri = self.default_url[len(self.base_url) :] + + if self.token: + uri = url_concat(uri, {"token": self.token}) + + if self.file_to_run: + # Create a separate, temporary open-browser-file + # pointing at a specific file. + open_file = self.browser_open_file_to_run + else: + # otherwise, just return the usual open browser file. + open_file = self.browser_open_file + + if self.use_redirect_file: + assembled_url = urljoin("file:", pathname2url(open_file)) + else: + assembled_url = url_path_join(self.connection_url, uri) + + return assembled_url, open_file + + def launch_browser(self): + try: + browser = webbrowser.get(self.browser or None) + except webbrowser.Error as e: + self.log.warning(_i18n("No web browser found: %s.") % e) + browser = None + + if not browser: + return + + assembled_url, _ = self._prepare_browser_open() + + b = lambda: browser.open(assembled_url, new=self.webbrowser_open_new) + threading.Thread(target=b).start() + + def start_app(self): + super(ServerApp, self).start() + + if not self.allow_root: + # check if we are running as root, and abort if it's not allowed + try: + uid = os.geteuid() + except AttributeError: + uid = -1 # anything nonzero here, since we can't check UID assume non-root + if uid == 0: + self.log.critical( + _i18n("Running as root is not recommended. Use --allow-root to bypass.") + ) + self.exit(1) + + info = self.log.info + for line in self.running_server_info(kernel_count=False).split("\n"): + info(line) + info( + _i18n( + "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." + ) + ) + if "dev" in __version__: + info( + _i18n( + "Welcome to Project Kennen! Explore the various tools available" + " and their corresponding documentation. If you are interested" + " in contributing to the platform, please visit the community" + " resources section at https://jupyter.org/community.html." + ) + ) + + self.write_server_info_file() + self.write_browser_open_files() + + # Handle the browser opening. + if self.open_browser and not self.sock: + self.launch_browser() + + if self.token and self._token_generated: + # log full URL with generated token, so there's a copy/pasteable link + # with auth info. + if self.sock: + self.log.critical( + "\n".join( + [ + "\n", + "Kennen Server is listening on %s" % self.display_url, + "", + ( + "UNIX sockets are not browser-connectable, but you can tunnel to " + "the instance via e.g.`ssh -L 8888:%s -N user@this_host` and then " + "open e.g. %s in a browser." + ) + % (self.sock, self.connection_url), + ] + ) + ) + else: + self.log.critical( + "\n".join( + [ + "\n", + "To access the server, open this file in a browser:", + " %s" % urljoin("file:", pathname2url(self.browser_open_file)), + "Or copy and paste one of these URLs:", + " %s" % self.display_url, + ] + ) + ) + + async def _cleanup(self): + """General cleanup of files, extensions and kernels created + by this instance ServerApp. + """ + self.remove_server_info_file() + self.remove_browser_open_files() + await self.cleanup_extensions() + await self.cleanup_kernels() + await self.cleanup_terminals() + + def start_ioloop(self): + """Start the IO Loop.""" + if sys.platform.startswith("win"): + # add no-op to wake every 5s + # to handle signals that may be ignored by the inner loop + pc = ioloop.PeriodicCallback(lambda: None, 5000) + pc.start() + try: + self.io_loop.start() + except KeyboardInterrupt: + self.log.info(_i18n("Interrupted...")) + + def init_ioloop(self): + """init self.io_loop so that an extension can use it by io_loop.call_later() to create background tasks""" + self.io_loop = ioloop.IOLoop.current() + + def start(self): + """Start the Jupyter server app, after initialization + + This method takes no arguments so all configuration and initialization + must be done prior to calling this method.""" + self.start_app() + self.start_ioloop() + + async def _stop(self): + """Cleanup resources and stop the IO Loop.""" + await self._cleanup() + self.io_loop.stop() + + def stop(self, from_signal=False): + """Cleanup resources and stop the server.""" + if hasattr(self, "_http_server"): + # Stop a server if its set. + self.http_server.stop() + if getattr(self, "io_loop", None): + # use IOLoop.add_callback because signal.signal must be called + # from main thread + if from_signal: + self.io_loop.add_callback_from_signal(self._stop) + else: + self.io_loop.add_callback(self._stop) + + +def list_running_servers(runtime_dir=None, log=None): + """Iterate over the server info files of running Jupyter servers. + + Given a runtime directory, find jpserver-* files in the security directory, + and yield dicts of their information, each one pertaining to + a currently running Jupyter server instance. + """ + if runtime_dir is None: + runtime_dir = jupyter_runtime_dir() + + # The runtime dir might not exist + if not os.path.isdir(runtime_dir): + return + + for file_name in os.listdir(runtime_dir): + if re.match("jpserver-(.+).json", file_name): + with io.open(os.path.join(runtime_dir, file_name), encoding="utf-8") as f: + info = json.load(f) + + # Simple check whether that process is really still running + # Also remove leftover files from IPython 2.x without a pid field + if ("pid" in info) and check_pid(info["pid"]): + yield info + else: + # If the process has died, try to delete its info file + try: + os.unlink(os.path.join(runtime_dir, file_name)) + except OSError as e: + if log: + log.warning(_i18n("Deleting server info file failed: %s.") % e) + + +# ----------------------------------------------------------------------------- +# Main entry point +# ----------------------------------------------------------------------------- + +main = launch_new_instance = ServerApp.launch_instance diff --git a/server/jupyter_server/services/__init__.py b/server/jupyter_server/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/api/__init__.py b/server/jupyter_server/services/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/api/api.yaml b/server/jupyter_server/services/api/api.yaml new file mode 100644 index 0000000..87e901a --- /dev/null +++ b/server/jupyter_server/services/api/api.yaml @@ -0,0 +1,857 @@ +swagger: "2.0" +info: + title: Kennen Server API + description: Server API + version: "5" + contact: + name: Kennen Project + url: https://jupyter.org +# will be prefixed to all paths +basePath: / +produces: + - application/json +consumes: + - application/json +parameters: + kernel: + name: kernel_id + required: true + in: path + description: kernel uuid + type: string + format: uuid + session: + name: session + required: true + in: path + description: session uuid + type: string + format: uuid + path: + name: path + required: true + in: path + description: file path + type: string + checkpoint_id: + name: checkpoint_id + required: true + in: path + description: Checkpoint id for a file + type: string + section_name: + name: section_name + required: true + in: path + description: Name of config section + type: string + terminal_id: + name: terminal_id + required: true + in: path + description: ID of terminal session + type: string + +paths: + /api/contents/{path}: + parameters: + - $ref: "#/parameters/path" + get: + summary: Get contents of file or directory + description: "A client can optionally specify a type and/or format argument via URL parameter. When given, the Contents service shall return a model in the requested type and/or format. If the request cannot be satisfied, e.g. type=text is requested, but the file is binary, then the request shall fail with 400 and have a JSON response containing a 'reason' field, with the value 'bad format' or 'bad type', depending on what was requested." + tags: + - contents + parameters: + - name: type + in: query + description: File type ('file', 'directory') + type: string + enum: + - file + - directory + - name: format + in: query + description: "How file content should be returned ('text', 'base64')" + type: string + enum: + - text + - base64 + - name: content + in: query + description: "Return content (0 for no content, 1 for return content)" + type: integer + responses: + 404: + description: No item found + 400: + description: Bad request + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + 200: + description: Contents of file or directory + headers: + Last-Modified: + description: Last modified date for file + type: string + format: dateTime + schema: + $ref: "#/definitions/Contents" + 500: + description: Model key error + post: + summary: Create a new file in the specified path + description: "A POST to /api/contents/path creates a New untitled, empty file or directory. A POST to /api/contents/path with body {'copy_from': '/path/to/OtherNotebook.ipynb'} creates a new copy of OtherNotebook in path." + tags: + - contents + parameters: + - name: model + in: body + description: Path of file to copy + schema: + type: object + properties: + copy_from: + type: string + ext: + type: string + type: + type: string + responses: + 201: + description: File created + headers: + Location: + description: URL for the new file + type: string + format: url + schema: + $ref: "#/definitions/Contents" + 404: + description: No item found + 400: + description: Bad request + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + patch: + summary: Rename a file or directory without re-uploading content + tags: + - contents + parameters: + - name: path + in: body + required: true + description: New path for file or directory. + schema: + type: object + properties: + path: + type: string + format: path + description: New path for file or directory + responses: + 200: + description: Path updated + headers: + Location: + description: Updated URL for the file or directory + type: string + format: url + schema: + $ref: "#/definitions/Contents" + 400: + description: No data provided + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + put: + summary: Save or upload file. + description: "Saves the file in the location specified by name and path. PUT is very similar to POST, but the requester specifies the name, whereas with POST, the server picks the name." + tags: + - contents + parameters: + - name: model + in: body + description: New path for file or directory + schema: + type: object + properties: + name: + type: string + description: The new filename if changed + path: + type: string + description: New path for file or directory + type: + type: string + description: Path dtype ('notebook', 'file', 'directory') + format: + type: string + description: File format ('json', 'text', 'base64') + content: + type: string + description: The actual body of the document excluding directory type + responses: + 200: + description: File saved + headers: + Location: + description: Updated URL for the file or directory + type: string + format: url + schema: + $ref: "#/definitions/Contents" + 201: + description: Path created + headers: + Location: + description: URL for the file or directory + type: string + format: url + schema: + $ref: "#/definitions/Contents" + 400: + description: No data provided + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + delete: + summary: Delete a file in the given path + tags: + - contents + responses: + 204: + description: File deleted + headers: + Location: + description: URL for the removed file + type: string + format: url + /api/contents/{path}/checkpoints: + parameters: + - $ref: "#/parameters/path" + get: + summary: Get a list of checkpoints for a file + description: List checkpoints for a given file. There will typically be zero or one results. + tags: + - contents + responses: + 404: + description: No item found + 400: + description: Bad request + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + 200: + description: List of checkpoints for a file + schema: + type: array + items: + $ref: "#/definitions/Checkpoints" + 500: + description: Model key error + post: + summary: Create a new checkpoint for a file + description: "Create a new checkpoint with the current state of a file. With the default FileContentsManager, only one checkpoint is supported, so creating new checkpoints clobbers existing ones." + tags: + - contents + responses: + 201: + description: Checkpoint created + headers: + Location: + description: URL for the checkpoint + type: string + format: url + schema: + $ref: "#/definitions/Checkpoints" + 404: + description: No item found + 400: + description: Bad request + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + /api/contents/{path}/checkpoints/{checkpoint_id}: + post: + summary: Restore a file to a particular checkpointed state + parameters: + - $ref: "#/parameters/path" + - $ref: "#/parameters/checkpoint_id" + tags: + - contents + responses: + 204: + description: Checkpoint restored + 400: + description: Bad request + schema: + type: object + properties: + error: + type: string + description: Error condition + reason: + type: string + description: Explanation of error reason + delete: + summary: Delete a checkpoint + parameters: + - $ref: "#/parameters/path" + - $ref: "#/parameters/checkpoint_id" + tags: + - contents + responses: + 204: + description: Checkpoint deleted + /api/sessions/{session}: + parameters: + - $ref: "#/parameters/session" + get: + summary: Get session + tags: + - sessions + responses: + 200: + description: Session + schema: + $ref: "#/definitions/Session" + patch: + summary: "This can be used to rename the session." + tags: + - sessions + parameters: + - name: model + in: body + required: true + schema: + $ref: "#/definitions/Session" + responses: + 200: + description: Session + schema: + $ref: "#/definitions/Session" + 400: + description: No data provided + delete: + summary: Delete a session + tags: + - sessions + responses: + 204: + description: Session (and kernel) were deleted + 410: + description: "Kernel was deleted before the session, and the session was *not* deleted (TODO - check to make sure session wasn't deleted)" + /api/sessions: + get: + summary: List available sessions + tags: + - sessions + responses: + 200: + description: List of current sessions + schema: + type: array + items: + $ref: "#/definitions/Session" + post: + summary: "Create a new session, or return an existing session if a session of the same name already exists" + tags: + - sessions + parameters: + - name: session + in: body + schema: + $ref: "#/definitions/Session" + responses: + 201: + description: Session created or returned + schema: + $ref: "#/definitions/Session" + headers: + Location: + description: URL for session commands + type: string + format: url + 501: + description: Session not available + schema: + type: object + description: error message + properties: + message: + type: string + short_message: + type: string + + /api/kernels: + get: + summary: List the JSON data for all kernels that are currently running + tags: + - kernels + responses: + 200: + description: List of currently-running kernel uuids + schema: + type: array + items: + $ref: "#/definitions/Kernel" + post: + summary: Start a kernel and return the uuid + tags: + - kernels + parameters: + - name: options + in: body + schema: + type: object + required: + - name + properties: + name: + type: string + description: Kernel spec name (defaults to default kernel spec for server) + path: + type: string + description: API path from root to the cwd of the kernel + responses: + 201: + description: Kernel started + schema: + $ref: "#/definitions/Kernel" + headers: + Location: + description: Model for started kernel + type: string + format: url + /api/kernels/{kernel_id}: + parameters: + - $ref: "#/parameters/kernel" + get: + summary: Get kernel information + tags: + - kernels + responses: + 200: + description: Kernel information + schema: + $ref: "#/definitions/Kernel" + delete: + summary: Kill a kernel and delete the kernel id + tags: + - kernels + responses: + 204: + description: Kernel deleted + /api/kernels/{kernel_id}/interrupt: + parameters: + - $ref: "#/parameters/kernel" + post: + summary: Interrupt a kernel + tags: + - kernels + responses: + 204: + description: Kernel interrupted + /api/kernels/{kernel_id}/restart: + parameters: + - $ref: "#/parameters/kernel" + post: + summary: Restart a kernel + tags: + - kernels + responses: + 200: + description: Kernel restarted + headers: + Location: + description: URL for kernel commands + type: string + format: url + schema: + $ref: "#/definitions/Kernel" + + /api/kernelspecs: + get: + summary: Get kernel specs + tags: + - kernelspecs + responses: + 200: + description: Kernel specs + schema: + type: object + properties: + default: + type: string + description: Default kernel name + kernelspecs: + type: object + additionalProperties: + $ref: "#/definitions/KernelSpec" + /api/config/{section_name}: + get: + summary: Get a configuration section by name + parameters: + - $ref: "#/parameters/section_name" + tags: + - config + responses: + 200: + description: Configuration object + schema: + type: object + patch: + summary: Update a configuration section by name + tags: + - config + parameters: + - $ref: "#/parameters/section_name" + - name: configuration + in: body + schema: + type: object + responses: + 200: + description: Configuration object + schema: + type: object + + /api/terminals: + get: + summary: Get available terminals + tags: + - terminals + responses: + 200: + description: A list of all available terminal ids. + schema: + type: array + items: + $ref: "#/definitions/Terminal" + 403: + description: Forbidden to access + 404: + description: Not found + + post: + summary: Create a new terminal + tags: + - terminals + responses: + 200: + description: Succesfully created a new terminal + schema: + $ref: "#/definitions/Terminal" + 403: + description: Forbidden to access + 404: + description: Not found + + /api/terminals/{terminal_id}: + get: + summary: Get a terminal session corresponding to an id. + tags: + - terminals + parameters: + - $ref: "#/parameters/terminal_id" + responses: + 200: + description: Terminal session with given id + schema: + $ref: "#/definitions/Terminal" + 403: + description: Forbidden to access + 404: + description: Not found + + delete: + summary: Delete a terminal session corresponding to an id. + tags: + - terminals + parameters: + - $ref: "#/parameters/terminal_id" + responses: + 204: + description: Succesfully deleted terminal session + 403: + description: Forbidden to access + 404: + description: Not found + + /api/status: + get: + summary: Get the current status/activity of the server. + tags: + - status + responses: + 200: + description: The current status of the server + schema: + $ref: "#/definitions/APIStatus" + + /api/spec.yaml: + get: + summary: Get the current spec for the notebook server's APIs. + tags: + - api-spec + produces: + - text/x-yaml + responses: + 200: + description: The current spec for the notebook server's APIs. + schema: + type: file +definitions: + APIStatus: + description: | + Notebook server API status. + Added in notebook 5.0. + properties: + started: + type: string + description: | + ISO8601 timestamp indicating when the notebook server started. + last_activity: + type: string + description: | + ISO8601 timestamp indicating the last activity on the server, + either on the REST API or kernel activity. + connections: + type: number + description: | + The total number of currently open connections to kernels. + kernels: + type: number + description: | + The total number of running kernels. + KernelSpec: + description: Kernel spec (contents of kernel.json) + properties: + name: + type: string + description: Unique name for kernel + KernelSpecFile: + $ref: "#/definitions/KernelSpecFile" + resources: + type: object + properties: + kernel.js: + type: string + format: filename + description: path for kernel.js file + kernel.css: + type: string + format: filename + description: path for kernel.css file + logo-*: + type: string + format: filename + description: path for logo file. Logo filenames are of the form `logo-widthxheight` + KernelSpecFile: + description: Kernel spec json file + required: + - argv + - display_name + - language + properties: + language: + type: string + description: The programming language which this kernel runs. This will be stored in notebook metadata. + argv: + type: array + description: "A list of command line arguments used to start the kernel. The text `{connection_file}` in any argument will be replaced with the path to the connection file." + items: + type: string + display_name: + type: string + description: "The kernel's name as it should be displayed in the UI. Unlike the kernel name used in the API, this can contain arbitrary unicode characters." + codemirror_mode: + type: string + description: Codemirror mode. Can be a string *or* an valid Codemirror mode object. This defaults to the string from the `language` property. + env: + type: object + description: A dictionary of environment variables to set for the kernel. These will be added to the current environment variables. + additionalProperties: + type: string + help_links: + type: array + description: Help items to be displayed in the help menu in the notebook UI. + items: + type: object + required: + - text + - url + properties: + text: + type: string + description: menu item link text + url: + type: string + format: URL + description: menu item link url + Kernel: + description: Kernel information + required: + - id + - name + properties: + id: + type: string + format: uuid + description: uuid of kernel + name: + type: string + description: kernel spec name + last_activity: + type: string + description: | + ISO 8601 timestamp for the last-seen activity on this kernel. + Use this in combination with execution_state == 'idle' to identify + which kernels have been idle since a given time. + Timestamps will be UTC, indicated 'Z' suffix. + Added in notebook server 5.0. + connections: + type: number + description: | + The number of active connections to this kernel. + execution_state: + type: string + description: | + Current execution state of the kernel (typically 'idle' or 'busy', but may be other values, such as 'starting'). + Added in notebook server 5.0. + Session: + description: A session + type: object + properties: + id: + type: string + format: uuid + path: + type: string + description: path to the session + name: + type: string + description: name of the session + type: + type: string + description: session type + kernel: + $ref: "#/definitions/Kernel" + Contents: + description: "A contents object. The content and format keys may be null if content is not contained. If type is 'file', then the mimetype will be null." + type: object + required: + - type + - name + - path + - writable + - created + - last_modified + - mimetype + - format + - content + properties: + name: + type: string + description: "Name of file or directory, equivalent to the last part of the path" + path: + type: string + description: Full path for file or directory + type: + type: string + description: Type of content + enum: + - directory + - file + - notebook + writable: + type: boolean + description: indicates whether the requester has permission to edit the file + created: + type: string + description: Creation timestamp + format: dateTime + last_modified: + type: string + description: Last modified timestamp + format: dateTime + size: + type: integer + description: "The size of the file or notebook in bytes. If no size is provided, defaults to null." + mimetype: + type: string + description: "The mimetype of a file. If content is not null, and type is 'file', this will contain the mimetype of the file, otherwise this will be null." + content: + type: string + description: "The content, if requested (otherwise null). Will be an array if type is 'directory'" + format: + type: string + description: Format of content (one of null, 'text', 'base64', 'json') + Checkpoints: + description: A checkpoint object. + type: object + required: + - id + - last_modified + properties: + id: + type: string + description: Unique id for the checkpoint. + last_modified: + type: string + description: Last modified timestamp + format: dateTime + Terminal: + description: A Terminal object + type: object + required: + - name + properties: + name: + type: string + description: name of terminal + last_activity: + type: string + description: | + ISO 8601 timestamp for the last-seen activity on this terminal. Use + this to identify which terminals have been inactive since a given time. + Timestamps will be UTC, indicated 'Z' suffix. diff --git a/server/jupyter_server/services/api/handlers.py b/server/jupyter_server/services/api/handlers.py new file mode 100644 index 0000000..8974215 --- /dev/null +++ b/server/jupyter_server/services/api/handlers.py @@ -0,0 +1,63 @@ +"""Tornado handlers for api specifications.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json +import os + +from tornado import web + +from ...base.handlers import APIHandler +from ...base.handlers import JupyterHandler +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcfromtimestamp +from jupyter_server.auth import authorized +from jupyter_server.utils import ensure_async + + +AUTH_RESOURCE = "api" + + +class APISpecHandler(web.StaticFileHandler, JupyterHandler): + auth_resource = AUTH_RESOURCE + + def initialize(self): + web.StaticFileHandler.initialize(self, path=os.path.dirname(__file__)) + + @web.authenticated + @authorized + def get(self): + self.log.warning("Serving api spec (experimental, incomplete)") + return web.StaticFileHandler.get(self, "api.yaml") + + def get_content_type(self): + return "text/x-yaml" + + +class APIStatusHandler(APIHandler): + + auth_resource = AUTH_RESOURCE + _track_activity = False + + @web.authenticated + @authorized + async def get(self): + # if started was missing, use unix epoch + started = self.settings.get("started", utcfromtimestamp(0)) + started = isoformat(started) + + kernels = await ensure_async(self.kernel_manager.list_kernels()) + total_connections = sum(k["connections"] for k in kernels) + last_activity = isoformat(self.application.last_activity()) + model = { + "started": started, + "last_activity": last_activity, + "kernels": len(kernels), + "connections": total_connections, + } + self.finish(json.dumps(model, sort_keys=True)) + + +default_handlers = [ + (r"/api/spec.yaml", APISpecHandler), + (r"/api/status", APIStatusHandler), +] diff --git a/server/jupyter_server/services/config/__init__.py b/server/jupyter_server/services/config/__init__.py new file mode 100644 index 0000000..9a2aee2 --- /dev/null +++ b/server/jupyter_server/services/config/__init__.py @@ -0,0 +1 @@ +from .manager import ConfigManager # noqa diff --git a/server/jupyter_server/services/config/handlers.py b/server/jupyter_server/services/config/handlers.py new file mode 100644 index 0000000..09bb88f --- /dev/null +++ b/server/jupyter_server/services/config/handlers.py @@ -0,0 +1,45 @@ +"""Tornado handlers for frontend config storage.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json + +from tornado import web + +from ...base.handlers import APIHandler +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "config" + + +class ConfigHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + @web.authenticated + @authorized + def get(self, section_name): + self.set_header("Content-Type", "application/json") + self.finish(json.dumps(self.config_manager.get(section_name))) + + @web.authenticated + @authorized + def put(self, section_name): + data = self.get_json_body() # Will raise 400 if content is not valid JSON + self.config_manager.set(section_name, data) + self.set_status(204) + + @web.authenticated + @authorized + def patch(self, section_name): + new_data = self.get_json_body() + section = self.config_manager.update(section_name, new_data) + self.finish(json.dumps(section)) + + +# URL to handler mappings + +section_name_regex = r"(?P\w+)" + +default_handlers = [ + (r"/api/config/%s" % section_name_regex, ConfigHandler), +] diff --git a/server/jupyter_server/services/config/manager.py b/server/jupyter_server/services/config/manager.py new file mode 100644 index 0000000..b252cf5 --- /dev/null +++ b/server/jupyter_server/services/config/manager.py @@ -0,0 +1,68 @@ +"""Manager to read and modify frontend config data in JSON files. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import os.path + +from jupyter_core.paths import jupyter_config_dir +from jupyter_core.paths import jupyter_config_path +from traitlets import default +from traitlets import Instance +from traitlets import List +from traitlets import observe +from traitlets import Unicode +from traitlets.config import LoggingConfigurable + +from jupyter_server.config_manager import BaseJSONConfigManager +from jupyter_server.config_manager import recursive_update + + +class ConfigManager(LoggingConfigurable): + """Config Manager used for storing frontend config""" + + config_dir_name = Unicode("serverconfig", help="""Name of the config directory.""").tag( + config=True + ) + + # Public API + + def get(self, section_name): + """Get the config from all config sections.""" + config = {} + # step through back to front, to ensure front of the list is top priority + for p in self.read_config_path[::-1]: + cm = BaseJSONConfigManager(config_dir=p) + recursive_update(config, cm.get(section_name)) + return config + + def set(self, section_name, data): + """Set the config only to the user's config.""" + return self.write_config_manager.set(section_name, data) + + def update(self, section_name, new_data): + """Update the config only to the user's config.""" + return self.write_config_manager.update(section_name, new_data) + + # Private API + + read_config_path = List(Unicode()) + + @default("read_config_path") + def _default_read_config_path(self): + return [os.path.join(p, self.config_dir_name) for p in jupyter_config_path()] + + write_config_dir = Unicode() + + @default("write_config_dir") + def _default_write_config_dir(self): + return os.path.join(jupyter_config_dir(), self.config_dir_name) + + write_config_manager = Instance(BaseJSONConfigManager) + + @default("write_config_manager") + def _default_write_config_manager(self): + return BaseJSONConfigManager(config_dir=self.write_config_dir) + + @observe("write_config_dir") + def _update_write_config_dir(self, change): + self.write_config_manager = BaseJSONConfigManager(config_dir=self.write_config_dir) diff --git a/server/jupyter_server/services/contents/__init__.py b/server/jupyter_server/services/contents/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/contents/checkpoints.py b/server/jupyter_server/services/contents/checkpoints.py new file mode 100644 index 0000000..7a5fa2e --- /dev/null +++ b/server/jupyter_server/services/contents/checkpoints.py @@ -0,0 +1,249 @@ +""" +Classes for managing Checkpoints. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from tornado.web import HTTPError +from traitlets.config.configurable import LoggingConfigurable + + +class Checkpoints(LoggingConfigurable): + """ + Base class for managing checkpoints for a ContentsManager. + + Subclasses are required to implement: + + create_checkpoint(self, contents_mgr, path) + restore_checkpoint(self, contents_mgr, checkpoint_id, path) + rename_checkpoint(self, checkpoint_id, old_path, new_path) + delete_checkpoint(self, checkpoint_id, path) + list_checkpoints(self, path) + """ + + def create_checkpoint(self, contents_mgr, path): + """Create a checkpoint.""" + raise NotImplementedError("must be implemented in a subclass") + + def restore_checkpoint(self, contents_mgr, checkpoint_id, path): + """Restore a checkpoint""" + raise NotImplementedError("must be implemented in a subclass") + + def rename_checkpoint(self, checkpoint_id, old_path, new_path): + """Rename a single checkpoint from old_path to new_path.""" + raise NotImplementedError("must be implemented in a subclass") + + def delete_checkpoint(self, checkpoint_id, path): + """delete a checkpoint for a file""" + raise NotImplementedError("must be implemented in a subclass") + + def list_checkpoints(self, path): + """Return a list of checkpoints for a given file""" + raise NotImplementedError("must be implemented in a subclass") + + def rename_all_checkpoints(self, old_path, new_path): + """Rename all checkpoints for old_path to new_path.""" + for cp in self.list_checkpoints(old_path): + self.rename_checkpoint(cp["id"], old_path, new_path) + + def delete_all_checkpoints(self, path): + """Delete all checkpoints for the given path.""" + for checkpoint in self.list_checkpoints(path): + self.delete_checkpoint(checkpoint["id"], path) + + +class GenericCheckpointsMixin(object): + """ + Helper for creating Checkpoints subclasses that can be used with any + ContentsManager. + + Provides a ContentsManager-agnostic implementation of `create_checkpoint` + and `restore_checkpoint` in terms of the following operations: + + - create_file_checkpoint(self, content, format, path) + - create_notebook_checkpoint(self, nb, path) + - get_file_checkpoint(self, checkpoint_id, path) + - get_notebook_checkpoint(self, checkpoint_id, path) + + To create a generic CheckpointManager, add this mixin to a class that + implement the above four methods plus the remaining Checkpoints API + methods: + + - delete_checkpoint(self, checkpoint_id, path) + - list_checkpoints(self, path) + - rename_checkpoint(self, checkpoint_id, old_path, new_path) + """ + + def create_checkpoint(self, contents_mgr, path): + model = contents_mgr.get(path, content=True) + type = model["type"] + if type == "notebook": + return self.create_notebook_checkpoint( + model["content"], + path, + ) + elif type == "file": + return self.create_file_checkpoint( + model["content"], + model["format"], + path, + ) + else: + raise HTTPError(500, "Unexpected type %s" % type) + + def restore_checkpoint(self, contents_mgr, checkpoint_id, path): + """Restore a checkpoint.""" + type = contents_mgr.get(path, content=False)["type"] + if type == "notebook": + model = self.get_notebook_checkpoint(checkpoint_id, path) + elif type == "file": + model = self.get_file_checkpoint(checkpoint_id, path) + else: + raise HTTPError(500, "Unexpected type %s" % type) + contents_mgr.save(model, path) + + # Required Methods + def create_file_checkpoint(self, content, format, path): + """Create a checkpoint of the current state of a file + + Returns a checkpoint model for the new checkpoint. + """ + raise NotImplementedError("must be implemented in a subclass") + + def create_notebook_checkpoint(self, nb, path): + """Create a checkpoint of the current state of a file + + Returns a checkpoint model for the new checkpoint. + """ + raise NotImplementedError("must be implemented in a subclass") + + def get_file_checkpoint(self, checkpoint_id, path): + """Get the content of a checkpoint for a non-notebook file. + + Returns a dict of the form: + { + 'type': 'file', + 'content': , + 'format': {'text','base64'}, + } + """ + raise NotImplementedError("must be implemented in a subclass") + + def get_notebook_checkpoint(self, checkpoint_id, path): + """Get the content of a checkpoint for a notebook. + + Returns a dict of the form: + { + 'type': 'notebook', + 'content': , + } + """ + raise NotImplementedError("must be implemented in a subclass") + + +class AsyncCheckpoints(Checkpoints): + """ + Base class for managing checkpoints for a ContentsManager asynchronously. + """ + + async def create_checkpoint(self, contents_mgr, path): + """Create a checkpoint.""" + raise NotImplementedError("must be implemented in a subclass") + + async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): + """Restore a checkpoint""" + raise NotImplementedError("must be implemented in a subclass") + + async def rename_checkpoint(self, checkpoint_id, old_path, new_path): + """Rename a single checkpoint from old_path to new_path.""" + raise NotImplementedError("must be implemented in a subclass") + + async def delete_checkpoint(self, checkpoint_id, path): + """delete a checkpoint for a file""" + raise NotImplementedError("must be implemented in a subclass") + + async def list_checkpoints(self, path): + """Return a list of checkpoints for a given file""" + raise NotImplementedError("must be implemented in a subclass") + + async def rename_all_checkpoints(self, old_path, new_path): + """Rename all checkpoints for old_path to new_path.""" + for cp in await self.list_checkpoints(old_path): + await self.rename_checkpoint(cp["id"], old_path, new_path) + + async def delete_all_checkpoints(self, path): + """Delete all checkpoints for the given path.""" + for checkpoint in await self.list_checkpoints(path): + await self.delete_checkpoint(checkpoint["id"], path) + + +class AsyncGenericCheckpointsMixin(GenericCheckpointsMixin): + """ + Helper for creating Asynchronous Checkpoints subclasses that can be used with any + ContentsManager. + """ + + async def create_checkpoint(self, contents_mgr, path): + model = await contents_mgr.get(path, content=True) + type = model["type"] + if type == "notebook": + return await self.create_notebook_checkpoint( + model["content"], + path, + ) + elif type == "file": + return await self.create_file_checkpoint( + model["content"], + model["format"], + path, + ) + else: + raise HTTPError(500, "Unexpected type %s" % type) + + async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): + """Restore a checkpoint.""" + type = await contents_mgr.get(path, content=False)["type"] + if type == "notebook": + model = await self.get_notebook_checkpoint(checkpoint_id, path) + elif type == "file": + model = await self.get_file_checkpoint(checkpoint_id, path) + else: + raise HTTPError(500, "Unexpected type %s" % type) + await contents_mgr.save(model, path) + + # Required Methods + async def create_file_checkpoint(self, content, format, path): + """Create a checkpoint of the current state of a file + + Returns a checkpoint model for the new checkpoint. + """ + raise NotImplementedError("must be implemented in a subclass") + + async def create_notebook_checkpoint(self, nb, path): + """Create a checkpoint of the current state of a file + + Returns a checkpoint model for the new checkpoint. + """ + raise NotImplementedError("must be implemented in a subclass") + + async def get_file_checkpoint(self, checkpoint_id, path): + """Get the content of a checkpoint for a non-notebook file. + + Returns a dict of the form: + { + 'type': 'file', + 'content': , + 'format': {'text','base64'}, + } + """ + raise NotImplementedError("must be implemented in a subclass") + + async def get_notebook_checkpoint(self, checkpoint_id, path): + """Get the content of a checkpoint for a notebook. + + Returns a dict of the form: + { + 'type': 'notebook', + 'content': , + } + """ + raise NotImplementedError("must be implemented in a subclass") diff --git a/server/jupyter_server/services/contents/filecheckpoints.py b/server/jupyter_server/services/contents/filecheckpoints.py new file mode 100644 index 0000000..b4c89f7 --- /dev/null +++ b/server/jupyter_server/services/contents/filecheckpoints.py @@ -0,0 +1,329 @@ +""" +File-based Checkpoints implementations. +""" +import os +import shutil + +from anyio.to_thread import run_sync +from jupyter_core.utils import ensure_dir_exists +from tornado.web import HTTPError +from traitlets import Unicode + +from .checkpoints import AsyncCheckpoints +from .checkpoints import AsyncGenericCheckpointsMixin +from .checkpoints import Checkpoints +from .checkpoints import GenericCheckpointsMixin +from .fileio import AsyncFileManagerMixin +from .fileio import FileManagerMixin +from jupyter_server import _tz as tz + + +class FileCheckpoints(FileManagerMixin, Checkpoints): + """ + A Checkpoints that caches checkpoints for files in adjacent + directories. + + Only works with FileContentsManager. Use GenericFileCheckpoints if + you want file-based checkpoints with another ContentsManager. + """ + + checkpoint_dir = Unicode( + ".ipynb_checkpoints", + config=True, + help="""The directory name in which to keep file checkpoints + + This is a path relative to the file's own directory. + + By default, it is .ipynb_checkpoints + """, + ) + + root_dir = Unicode(config=True) + + def _root_dir_default(self): + try: + return self.parent.root_dir + except AttributeError: + return os.getcwd() + + # ContentsManager-dependent checkpoint API + def create_checkpoint(self, contents_mgr, path): + """Create a checkpoint.""" + checkpoint_id = "checkpoint" + src_path = contents_mgr._get_os_path(path) + dest_path = self.checkpoint_path(checkpoint_id, path) + self._copy(src_path, dest_path) + return self.checkpoint_model(checkpoint_id, dest_path) + + def restore_checkpoint(self, contents_mgr, checkpoint_id, path): + """Restore a checkpoint.""" + src_path = self.checkpoint_path(checkpoint_id, path) + dest_path = contents_mgr._get_os_path(path) + self._copy(src_path, dest_path) + + # ContentsManager-independent checkpoint API + def rename_checkpoint(self, checkpoint_id, old_path, new_path): + """Rename a checkpoint from old_path to new_path.""" + old_cp_path = self.checkpoint_path(checkpoint_id, old_path) + new_cp_path = self.checkpoint_path(checkpoint_id, new_path) + if os.path.isfile(old_cp_path): + self.log.debug( + "Renaming checkpoint %s -> %s", + old_cp_path, + new_cp_path, + ) + with self.perm_to_403(): + shutil.move(old_cp_path, new_cp_path) + + def delete_checkpoint(self, checkpoint_id, path): + """delete a file's checkpoint""" + path = path.strip("/") + cp_path = self.checkpoint_path(checkpoint_id, path) + if not os.path.isfile(cp_path): + self.no_such_checkpoint(path, checkpoint_id) + + self.log.debug("unlinking %s", cp_path) + with self.perm_to_403(): + os.unlink(cp_path) + + def list_checkpoints(self, path): + """list the checkpoints for a given file + + This contents manager currently only supports one checkpoint per file. + """ + path = path.strip("/") + checkpoint_id = "checkpoint" + os_path = self.checkpoint_path(checkpoint_id, path) + if not os.path.isfile(os_path): + return [] + else: + return [self.checkpoint_model(checkpoint_id, os_path)] + + # Checkpoint-related utilities + def checkpoint_path(self, checkpoint_id, path): + """find the path to a checkpoint""" + path = path.strip("/") + parent, name = ("/" + path).rsplit("/", 1) + parent = parent.strip("/") + basename, ext = os.path.splitext(name) + filename = "{name}-{checkpoint_id}{ext}".format( + name=basename, + checkpoint_id=checkpoint_id, + ext=ext, + ) + os_path = self._get_os_path(path=parent) + cp_dir = os.path.join(os_path, self.checkpoint_dir) + with self.perm_to_403(): + ensure_dir_exists(cp_dir) + cp_path = os.path.join(cp_dir, filename) + return cp_path + + def checkpoint_model(self, checkpoint_id, os_path): + """construct the info dict for a given checkpoint""" + stats = os.stat(os_path) + last_modified = tz.utcfromtimestamp(stats.st_mtime) + info = dict( + id=checkpoint_id, + last_modified=last_modified, + ) + return info + + # Error Handling + def no_such_checkpoint(self, path, checkpoint_id): + raise HTTPError(404, "Checkpoint does not exist: %s@%s" % (path, checkpoint_id)) + + +class AsyncFileCheckpoints(FileCheckpoints, AsyncFileManagerMixin, AsyncCheckpoints): + async def create_checkpoint(self, contents_mgr, path): + """Create a checkpoint.""" + checkpoint_id = "checkpoint" + src_path = contents_mgr._get_os_path(path) + dest_path = self.checkpoint_path(checkpoint_id, path) + await self._copy(src_path, dest_path) + return await self.checkpoint_model(checkpoint_id, dest_path) + + async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): + """Restore a checkpoint.""" + src_path = self.checkpoint_path(checkpoint_id, path) + dest_path = contents_mgr._get_os_path(path) + await self._copy(src_path, dest_path) + + async def checkpoint_model(self, checkpoint_id, os_path): + """construct the info dict for a given checkpoint""" + stats = await run_sync(os.stat, os_path) + last_modified = tz.utcfromtimestamp(stats.st_mtime) + info = dict( + id=checkpoint_id, + last_modified=last_modified, + ) + return info + + # ContentsManager-independent checkpoint API + async def rename_checkpoint(self, checkpoint_id, old_path, new_path): + """Rename a checkpoint from old_path to new_path.""" + old_cp_path = self.checkpoint_path(checkpoint_id, old_path) + new_cp_path = self.checkpoint_path(checkpoint_id, new_path) + if os.path.isfile(old_cp_path): + self.log.debug( + "Renaming checkpoint %s -> %s", + old_cp_path, + new_cp_path, + ) + with self.perm_to_403(): + await run_sync(shutil.move, old_cp_path, new_cp_path) + + async def delete_checkpoint(self, checkpoint_id, path): + """delete a file's checkpoint""" + path = path.strip("/") + cp_path = self.checkpoint_path(checkpoint_id, path) + if not os.path.isfile(cp_path): + self.no_such_checkpoint(path, checkpoint_id) + + self.log.debug("unlinking %s", cp_path) + with self.perm_to_403(): + await run_sync(os.unlink, cp_path) + + async def list_checkpoints(self, path): + """list the checkpoints for a given file + + This contents manager currently only supports one checkpoint per file. + """ + path = path.strip("/") + checkpoint_id = "checkpoint" + os_path = self.checkpoint_path(checkpoint_id, path) + if not os.path.isfile(os_path): + return [] + else: + return [await self.checkpoint_model(checkpoint_id, os_path)] + + +class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints): + """ + Local filesystem Checkpoints that works with any conforming + ContentsManager. + """ + + def create_file_checkpoint(self, content, format, path): + """Create a checkpoint from the current content of a file.""" + path = path.strip("/") + # only the one checkpoint ID: + checkpoint_id = "checkpoint" + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + self.log.debug("creating checkpoint for %s", path) + with self.perm_to_403(): + self._save_file(os_checkpoint_path, content, format=format) + + # return the checkpoint info + return self.checkpoint_model(checkpoint_id, os_checkpoint_path) + + def create_notebook_checkpoint(self, nb, path): + """Create a checkpoint from the current content of a notebook.""" + path = path.strip("/") + # only the one checkpoint ID: + checkpoint_id = "checkpoint" + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + self.log.debug("creating checkpoint for %s", path) + with self.perm_to_403(): + self._save_notebook(os_checkpoint_path, nb) + + # return the checkpoint info + return self.checkpoint_model(checkpoint_id, os_checkpoint_path) + + def get_notebook_checkpoint(self, checkpoint_id, path): + """Get a checkpoint for a notebook.""" + path = path.strip("/") + self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + + if not os.path.isfile(os_checkpoint_path): + self.no_such_checkpoint(path, checkpoint_id) + + return { + "type": "notebook", + "content": self._read_notebook( + os_checkpoint_path, + as_version=4, + ), + } + + def get_file_checkpoint(self, checkpoint_id, path): + """Get a checkpoint for a file.""" + path = path.strip("/") + self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + + if not os.path.isfile(os_checkpoint_path): + self.no_such_checkpoint(path, checkpoint_id) + + content, format = self._read_file(os_checkpoint_path, format=None) + return { + "type": "file", + "content": content, + "format": format, + } + + +class AsyncGenericFileCheckpoints(AsyncGenericCheckpointsMixin, AsyncFileCheckpoints): + """ + Asynchronous Local filesystem Checkpoints that works with any conforming + ContentsManager. + """ + + async def create_file_checkpoint(self, content, format, path): + """Create a checkpoint from the current content of a file.""" + path = path.strip("/") + # only the one checkpoint ID: + checkpoint_id = "checkpoint" + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + self.log.debug("creating checkpoint for %s", path) + with self.perm_to_403(): + await self._save_file(os_checkpoint_path, content, format=format) + + # return the checkpoint info + return await self.checkpoint_model(checkpoint_id, os_checkpoint_path) + + async def create_notebook_checkpoint(self, nb, path): + """Create a checkpoint from the current content of a notebook.""" + path = path.strip("/") + # only the one checkpoint ID: + checkpoint_id = "checkpoint" + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + self.log.debug("creating checkpoint for %s", path) + with self.perm_to_403(): + await self._save_notebook(os_checkpoint_path, nb) + + # return the checkpoint info + return await self.checkpoint_model(checkpoint_id, os_checkpoint_path) + + async def get_notebook_checkpoint(self, checkpoint_id, path): + """Get a checkpoint for a notebook.""" + path = path.strip("/") + self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + + if not os.path.isfile(os_checkpoint_path): + self.no_such_checkpoint(path, checkpoint_id) + + return { + "type": "notebook", + "content": await self._read_notebook( + os_checkpoint_path, + as_version=4, + ), + } + + async def get_file_checkpoint(self, checkpoint_id, path): + """Get a checkpoint for a file.""" + path = path.strip("/") + self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) + os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) + + if not os.path.isfile(os_checkpoint_path): + self.no_such_checkpoint(path, checkpoint_id) + + content, format = await self._read_file(os_checkpoint_path, format=None) + return { + "type": "file", + "content": content, + "format": format, + } diff --git a/server/jupyter_server/services/contents/fileio.py b/server/jupyter_server/services/contents/fileio.py new file mode 100644 index 0000000..4910433 --- /dev/null +++ b/server/jupyter_server/services/contents/fileio.py @@ -0,0 +1,431 @@ +""" +Utilities for file-based Contents/Checkpoints managers. +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import errno +import io +import os +import shutil +from base64 import decodebytes +from base64 import encodebytes +from contextlib import contextmanager +from functools import partial + +import nbformat +from anyio.to_thread import run_sync +from tornado.web import HTTPError +from traitlets import Bool +from traitlets.config import Configurable + +from jupyter_server.utils import to_api_path +from jupyter_server.utils import to_os_path + + +def replace_file(src, dst): + """replace dst with src""" + os.replace(src, dst) + + +async def async_replace_file(src, dst): + """replace dst with src asynchronously""" + await run_sync(os.replace, src, dst) + + +def copy2_safe(src, dst, log=None): + """copy src to dst + + like shutil.copy2, but log errors in copystat instead of raising + """ + shutil.copyfile(src, dst) + try: + shutil.copystat(src, dst) + except OSError: + if log: + log.debug("copystat on %s failed", dst, exc_info=True) + + +async def async_copy2_safe(src, dst, log=None): + """copy src to dst asynchronously + + like shutil.copy2, but log errors in copystat instead of raising + """ + await run_sync(shutil.copyfile, src, dst) + try: + await run_sync(shutil.copystat, src, dst) + except OSError: + if log: + log.debug("copystat on %s failed", dst, exc_info=True) + + +def path_to_intermediate(path): + """Name of the intermediate file used in atomic writes. + + The .~ prefix will make Dropbox ignore the temporary file.""" + dirname, basename = os.path.split(path) + return os.path.join(dirname, ".~" + basename) + + +def path_to_invalid(path): + """Name of invalid file after a failed atomic write and subsequent read.""" + dirname, basename = os.path.split(path) + return os.path.join(dirname, basename + ".invalid") + + +@contextmanager +def atomic_writing(path, text=True, encoding="utf-8", log=None, **kwargs): + """Context manager to write to a file only if the entire write is successful. + + This works by copying the previous file contents to a temporary file in the + same directory, and renaming that file back to the target if the context + exits with an error. If the context is successful, the new data is synced to + disk and the temporary file is removed. + + Parameters + ---------- + path : str + The target file to write to. + text : bool, optional + Whether to open the file in text mode (i.e. to write unicode). Default is + True. + encoding : str, optional + The encoding to use for files opened in text mode. Default is UTF-8. + **kwargs + Passed to :func:`io.open`. + """ + # realpath doesn't work on Windows: https://bugs.python.org/issue9949 + # Luckily, we only need to resolve the file itself being a symlink, not + # any of its directories, so this will suffice: + if os.path.islink(path): + path = os.path.join(os.path.dirname(path), os.readlink(path)) + + tmp_path = path_to_intermediate(path) + + if os.path.isfile(path): + copy2_safe(path, tmp_path, log=log) + + if text: + # Make sure that text files have Unix linefeeds by default + kwargs.setdefault("newline", "\n") + fileobj = io.open(path, "w", encoding=encoding, **kwargs) + else: + fileobj = io.open(path, "wb", **kwargs) + + try: + yield fileobj + except: + # Failed! Move the backup file back to the real path to avoid corruption + fileobj.close() + replace_file(tmp_path, path) + raise + + # Flush to disk + fileobj.flush() + os.fsync(fileobj.fileno()) + fileobj.close() + + # Written successfully, now remove the backup copy + if os.path.isfile(tmp_path): + os.remove(tmp_path) + + +@contextmanager +def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): + """Context manager to write file without doing atomic writing + (for weird filesystem eg: nfs). + + Parameters + ---------- + path : str + The target file to write to. + text : bool, optional + Whether to open the file in text mode (i.e. to write unicode). Default is + True. + encoding : str, optional + The encoding to use for files opened in text mode. Default is UTF-8. + **kwargs + Passed to :func:`io.open`. + """ + # realpath doesn't work on Windows: https://bugs.python.org/issue9949 + # Luckily, we only need to resolve the file itself being a symlink, not + # any of its directories, so this will suffice: + if os.path.islink(path): + path = os.path.join(os.path.dirname(path), os.readlink(path)) + + if text: + # Make sure that text files have Unix linefeeds by default + kwargs.setdefault("newline", "\n") + fileobj = io.open(path, "w", encoding=encoding, **kwargs) + else: + fileobj = io.open(path, "wb", **kwargs) + + try: + yield fileobj + except: + fileobj.close() + raise + + fileobj.close() + + +class FileManagerMixin(Configurable): + """ + Mixin for ContentsAPI classes that interact with the filesystem. + + Provides facilities for reading, writing, and copying files. + + Shared by FileContentsManager and FileCheckpoints. + + Note + ---- + Classes using this mixin must provide the following attributes: + + root_dir : unicode + A directory against against which API-style paths are to be resolved. + + log : logging.Logger + """ + + use_atomic_writing = Bool( + True, + config=True, + help="""By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. + This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). + If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""", + ) + + @contextmanager + def open(self, os_path, *args, **kwargs): + """wrapper around io.open that turns permission errors into 403""" + with self.perm_to_403(os_path): + with io.open(os_path, *args, **kwargs) as f: + yield f + + @contextmanager + def atomic_writing(self, os_path, *args, **kwargs): + """wrapper around atomic_writing that turns permission errors to 403. + Depending on flag 'use_atomic_writing', the wrapper perform an actual atomic writing or + simply writes the file (whatever an old exists or not)""" + with self.perm_to_403(os_path): + if self.use_atomic_writing: + with atomic_writing(os_path, *args, log=self.log, **kwargs) as f: + yield f + else: + with _simple_writing(os_path, *args, log=self.log, **kwargs) as f: + yield f + + @contextmanager + def perm_to_403(self, os_path=""): + """context manager for turning permission errors into 403.""" + try: + yield + except (OSError, IOError) as e: + if e.errno in {errno.EPERM, errno.EACCES}: + # make 403 error message without root prefix + # this may not work perfectly on unicode paths on Python 2, + # but nobody should be doing that anyway. + if not os_path: + os_path = e.filename or "unknown file" + path = to_api_path(os_path, root=self.root_dir) + raise HTTPError(403, "Permission denied: %s" % path) from e + else: + raise + + def _copy(self, src, dest): + """copy src to dest + + like shutil.copy2, but log errors in copystat + """ + copy2_safe(src, dest, log=self.log) + + def _get_os_path(self, path): + """Given an API path, return its file system path. + + Parameters + ---------- + path : string + The relative API path to the named file. + + Returns + ------- + path : string + Native, absolute OS path to for a file. + + Raises + ------ + 404: if path is outside root + """ + root = os.path.abspath(self.root_dir) + os_path = to_os_path(path, root) + if not (os.path.abspath(os_path) + os.path.sep).startswith(root): + raise HTTPError(404, "%s is outside root contents directory" % path) + return os_path + + def _read_notebook(self, os_path, as_version=4): + """Read a notebook from an os path.""" + with self.open(os_path, "r", encoding="utf-8") as f: + try: + return nbformat.read(f, as_version=as_version) + except Exception as e: + e_orig = e + + # If use_atomic_writing is enabled, we'll guess that it was also + # enabled when this notebook was written and look for a valid + # atomic intermediate. + tmp_path = path_to_intermediate(os_path) + + if not self.use_atomic_writing or not os.path.exists(tmp_path): + raise HTTPError( + 400, + "Unreadable Notebook: %s %r" % (os_path, e_orig), + ) + + # Move the bad file aside, restore the intermediate, and try again. + invalid_file = path_to_invalid(os_path) + replace_file(os_path, invalid_file) + replace_file(tmp_path, os_path) + return self._read_notebook(os_path, as_version) + + def _save_notebook(self, os_path, nb): + """Save a notebook to an os_path.""" + with self.atomic_writing(os_path, encoding="utf-8") as f: + nbformat.write(nb, f, version=nbformat.NO_CONVERT) + + def _read_file(self, os_path, format): + """Read a non-notebook file. + + os_path: The path to be read. + format: + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If not specified, try to decode as UTF-8, and fall back to base64 + """ + if not os.path.isfile(os_path): + raise HTTPError(400, "Cannot read non-file %s" % os_path) + + with self.open(os_path, "rb") as f: + bcontent = f.read() + + if format is None or format == "text": + # Try to interpret as unicode if format is unknown or if unicode + # was explicitly requested. + try: + return bcontent.decode("utf8"), "text" + except UnicodeError as e: + if format == "text": + raise HTTPError( + 400, + "%s is not UTF-8 encoded" % os_path, + reason="bad format", + ) from e + return encodebytes(bcontent).decode("ascii"), "base64" + + def _save_file(self, os_path, content, format): + """Save content of a generic file.""" + if format not in {"text", "base64"}: + raise HTTPError( + 400, + "Must specify format of file contents as 'text' or 'base64'", + ) + try: + if format == "text": + bcontent = content.encode("utf8") + else: + b64_bytes = content.encode("ascii") + bcontent = decodebytes(b64_bytes) + except Exception as e: + raise HTTPError(400, "Encoding error saving %s: %s" % (os_path, e)) from e + + with self.atomic_writing(os_path, text=False) as f: + f.write(bcontent) + + +class AsyncFileManagerMixin(FileManagerMixin): + """ + Mixin for ContentsAPI classes that interact with the filesystem asynchronously. + """ + + async def _copy(self, src, dest): + """copy src to dest + + like shutil.copy2, but log errors in copystat + """ + await async_copy2_safe(src, dest, log=self.log) + + async def _read_notebook(self, os_path, as_version=4): + """Read a notebook from an os path.""" + with self.open(os_path, "r", encoding="utf-8") as f: + try: + return await run_sync(partial(nbformat.read, as_version=as_version), f) + except Exception as e: + e_orig = e + + # If use_atomic_writing is enabled, we'll guess that it was also + # enabled when this notebook was written and look for a valid + # atomic intermediate. + tmp_path = path_to_intermediate(os_path) + + if not self.use_atomic_writing or not os.path.exists(tmp_path): + raise HTTPError( + 400, + "Unreadable Notebook: %s %r" % (os_path, e_orig), + ) + + # Move the bad file aside, restore the intermediate, and try again. + invalid_file = path_to_invalid(os_path) + await async_replace_file(os_path, invalid_file) + await async_replace_file(tmp_path, os_path) + return await self._read_notebook(os_path, as_version) + + async def _save_notebook(self, os_path, nb): + """Save a notebook to an os_path.""" + with self.atomic_writing(os_path, encoding="utf-8") as f: + await run_sync(partial(nbformat.write, version=nbformat.NO_CONVERT), nb, f) + + async def _read_file(self, os_path, format): + """Read a non-notebook file. + + os_path: The path to be read. + format: + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If not specified, try to decode as UTF-8, and fall back to base64 + """ + if not os.path.isfile(os_path): + raise HTTPError(400, "Cannot read non-file %s" % os_path) + + with self.open(os_path, "rb") as f: + bcontent = await run_sync(f.read) + + if format is None or format == "text": + # Try to interpret as unicode if format is unknown or if unicode + # was explicitly requested. + try: + return bcontent.decode("utf8"), "text" + except UnicodeError as e: + if format == "text": + raise HTTPError( + 400, + "%s is not UTF-8 encoded" % os_path, + reason="bad format", + ) from e + return encodebytes(bcontent).decode("ascii"), "base64" + + async def _save_file(self, os_path, content, format): + """Save content of a generic file.""" + if format not in {"text", "base64"}: + raise HTTPError( + 400, + "Must specify format of file contents as 'text' or 'base64'", + ) + try: + if format == "text": + bcontent = content.encode("utf8") + else: + b64_bytes = content.encode("ascii") + bcontent = decodebytes(b64_bytes) + except Exception as e: + raise HTTPError(400, "Encoding error saving %s: %s" % (os_path, e)) from e + + with self.atomic_writing(os_path, text=False) as f: + await run_sync(f.write, bcontent) diff --git a/server/jupyter_server/services/contents/filemanager.py b/server/jupyter_server/services/contents/filemanager.py new file mode 100644 index 0000000..5540875 --- /dev/null +++ b/server/jupyter_server/services/contents/filemanager.py @@ -0,0 +1,908 @@ +"""A contents manager that uses the local file system for storage.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import errno +import mimetypes +import os +import shutil +import stat +import sys +from datetime import datetime + +import nbformat +from anyio.to_thread import run_sync +from ipython_genutils.importstring import import_item +from jupyter_core.paths import exists +from jupyter_core.paths import is_file_hidden +from jupyter_core.paths import is_hidden +from send2trash import send2trash +from tornado import web +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import TraitError +from traitlets import Unicode +from traitlets import validate + +from .filecheckpoints import AsyncFileCheckpoints +from .filecheckpoints import FileCheckpoints +from .fileio import AsyncFileManagerMixin +from .fileio import FileManagerMixin +from .manager import AsyncContentsManager +from .manager import ContentsManager +from jupyter_server import _tz as tz +from jupyter_server.base.handlers import AuthenticatedFileHandler +from jupyter_server.transutils import _i18n + +try: + from os.path import samefile +except ImportError: + # windows + py2 + from jupyter_server.utils import samefile_simple as samefile + +_script_exporter = None + + +class FileContentsManager(FileManagerMixin, ContentsManager): + + root_dir = Unicode(config=True) + + @default("root_dir") + def _default_root_dir(self): + try: + return self.parent.root_dir + except AttributeError: + return os.getcwd() + + post_save_hook = Any( + None, + config=True, + allow_none=True, + help="""Python callable or importstring thereof + + to be called on the path of a file just saved. + + This can be used to process the file on disk, + such as converting the notebook to a script or HTML via nbconvert. + + It will be called as (all arguments passed by keyword):: + + hook(os_path=os_path, model=model, contents_manager=instance) + + - path: the filesystem path to the file just written + - model: the model representing the file + - contents_manager: this ContentsManager instance + """, + ) + + @validate("post_save_hook") + def _validate_post_save_hook(self, proposal): + value = proposal["value"] + if isinstance(value, str): + value = import_item(value) + if not callable(value): + raise TraitError("post_save_hook must be callable") + return value + + def run_post_save_hook(self, model, os_path): + """Run the post-save hook if defined, and log errors""" + if self.post_save_hook: + try: + self.log.debug("Running post-save hook on %s", os_path) + self.post_save_hook(os_path=os_path, model=model, contents_manager=self) + except Exception as e: + self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) + raise web.HTTPError( + 500, "Unexpected error while running post hook save: %s" % e + ) from e + + @validate("root_dir") + def _validate_root_dir(self, proposal): + """Do a bit of validation of the root_dir.""" + value = proposal["value"] + if not os.path.isabs(value): + # If we receive a non-absolute path, make it absolute. + value = os.path.abspath(value) + if not os.path.isdir(value): + raise TraitError("%r is not a directory" % value) + return value + + @default("checkpoints_class") + def _checkpoints_class_default(self): + return FileCheckpoints + + delete_to_trash = Bool( + True, + config=True, + help="""If True (default), deleting files will send them to the + platform's trash/recycle bin, where they can be recovered. If False, + deleting files really deletes them.""", + ) + + always_delete_dir = Bool( + False, + config=True, + help="""If True, deleting a non-empty directory will always be allowed. + WARNING this may result in files being permanently removed; e.g. on Windows, + if the data size is too big for the trash/recycle bin the directory will be permanently + deleted. If False (default), the non-empty directory will be sent to the trash only + if safe. And if ``delete_to_trash`` is True, the directory won't be deleted.""", + ) + + @default("files_handler_class") + def _files_handler_class_default(self): + return AuthenticatedFileHandler + + @default("files_handler_params") + def _files_handler_params_default(self): + return {"path": self.root_dir} + + def is_hidden(self, path): + """Does the API style path correspond to a hidden directory or file? + + Parameters + ---------- + path : string + The path to check. This is an API path (`/` separated, + relative to root_dir). + + Returns + ------- + hidden : bool + Whether the path exists and is hidden. + """ + path = path.strip("/") + os_path = self._get_os_path(path=path) + return is_hidden(os_path, self.root_dir) + + def is_writable(self, path): + """Does the API style path correspond to a writable directory or file? + + Parameters + ---------- + path : string + The path to check. This is an API path (`/` separated, + relative to root_dir). + + Returns + ------- + hidden : bool + Whether the path exists and is writable. + """ + path = path.strip("/") + os_path = self._get_os_path(path=path) + try: + return os.access(os_path, os.W_OK) + except OSError: + self.log.error("Failed to check write permissions on %s", os_path) + return False + + def file_exists(self, path): + """Returns True if the file exists, else returns False. + + API-style wrapper for os.path.isfile + + Parameters + ---------- + path : string + The relative path to the file (with '/' as separator) + + Returns + ------- + exists : bool + Whether the file exists. + """ + path = path.strip("/") + os_path = self._get_os_path(path) + return os.path.isfile(os_path) + + def dir_exists(self, path): + """Does the API-style path refer to an extant directory? + + API-style wrapper for os.path.isdir + + Parameters + ---------- + path : string + The path to check. This is an API path (`/` separated, + relative to root_dir). + + Returns + ------- + exists : bool + Whether the path is indeed a directory. + """ + path = path.strip("/") + os_path = self._get_os_path(path=path) + return os.path.isdir(os_path) + + def exists(self, path): + """Returns True if the path exists, else returns False. + + API-style wrapper for os.path.exists + + Parameters + ---------- + path : string + The API path to the file (with '/' as separator) + + Returns + ------- + exists : bool + Whether the target exists. + """ + path = path.strip("/") + os_path = self._get_os_path(path=path) + return exists(os_path) + + def _base_model(self, path): + """Build the common base of a contents model""" + os_path = self._get_os_path(path) + info = os.lstat(os_path) + + try: + # size of file + size = info.st_size + except (ValueError, OSError): + self.log.warning("Unable to get size.") + size = None + + try: + last_modified = tz.utcfromtimestamp(info.st_mtime) + except (ValueError, OSError): + # Files can rarely have an invalid timestamp + # https://github.com/jupyter/notebook/issues/2539 + # https://github.com/jupyter/notebook/issues/2757 + # Use the Unix epoch as a fallback so we don't crash. + self.log.warning("Invalid mtime %s for %s", info.st_mtime, os_path) + last_modified = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) + + try: + created = tz.utcfromtimestamp(info.st_ctime) + except (ValueError, OSError): # See above + self.log.warning("Invalid ctime %s for %s", info.st_ctime, os_path) + created = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) + + # Create the base model. + model = {} + model["name"] = path.rsplit("/", 1)[-1] + model["path"] = path + model["last_modified"] = last_modified + model["created"] = created + model["content"] = None + model["format"] = None + model["mimetype"] = None + model["size"] = size + model["writable"] = self.is_writable(path) + + return model + + def _dir_model(self, path, content=True): + """Build a model for a directory + + if content is requested, will include a listing of the directory + """ + os_path = self._get_os_path(path) + + four_o_four = "directory does not exist: %r" % path + + if not os.path.isdir(os_path): + raise web.HTTPError(404, four_o_four) + elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: + self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) + raise web.HTTPError(404, four_o_four) + + model = self._base_model(path) + model["type"] = "directory" + model["size"] = None + if content: + model["content"] = contents = [] + os_dir = self._get_os_path(path) + for name in os.listdir(os_dir): + try: + os_path = os.path.join(os_dir, name) + except UnicodeDecodeError as e: + self.log.warning("failed to decode filename '%s': %s", name, e) + continue + + try: + st = os.lstat(os_path) + except OSError as e: + # skip over broken symlinks in listing + if e.errno == errno.ENOENT: + self.log.warning("%s doesn't exist", os_path) + elif e.errno != errno.EACCES: # Don't provide clues about protected files + self.log.warning("Error stat-ing %s: %s", os_path, e) + continue + + if ( + not stat.S_ISLNK(st.st_mode) + and not stat.S_ISREG(st.st_mode) + and not stat.S_ISDIR(st.st_mode) + ): + self.log.debug("%s not a regular file", os_path) + continue + + try: + if self.should_list(name): + if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): + contents.append(self.get(path="%s/%s" % (path, name), content=False)) + except OSError as e: + # ELOOP: recursive symlink, also don't show failure due to permissions + if e.errno not in [errno.ELOOP, errno.EACCES]: + self.log.warning( + "Unknown error checking if file %r is hidden", + os_path, + exc_info=True, + ) + + model["format"] = "json" + + return model + + def _file_model(self, path, content=True, format=None): + """Build a model for a file + + if content is requested, include the file contents. + + format: + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If not specified, try to decode as UTF-8, and fall back to base64 + """ + model = self._base_model(path) + model["type"] = "file" + + os_path = self._get_os_path(path) + model["mimetype"] = mimetypes.guess_type(os_path)[0] + + if content: + content, format = self._read_file(os_path, format) + if model["mimetype"] is None: + default_mime = { + "text": "text/plain", + "base64": "application/octet-stream", + }[format] + model["mimetype"] = default_mime + + model.update( + content=content, + format=format, + ) + + return model + + def _notebook_model(self, path, content=True): + """Build a notebook model + + if content is requested, the notebook content will be populated + as a JSON structure (not double-serialized) + """ + model = self._base_model(path) + model["type"] = "notebook" + os_path = self._get_os_path(path) + + if content: + nb = self._read_notebook(os_path, as_version=4) + self.mark_trusted_cells(nb, path) + model["content"] = nb + model["format"] = "json" + self.validate_notebook_model(model) + + return model + + def get(self, path, content=True, type=None, format=None): + """Takes a path for an entity and returns its model + + Parameters + ---------- + path : str + the API path that describes the relative path for the target + content : bool + Whether to include the contents in the reply + type : str, optional + The requested type - 'file', 'notebook', or 'directory'. + Will raise HTTPError 400 if the content doesn't match. + format : str, optional + The requested format for file contents. 'text' or 'base64'. + Ignored if this returns a notebook or directory model. + + Returns + ------- + model : dict + the contents model. If content=True, returns the contents + of the file or directory as well. + """ + path = path.strip("/") + + if not self.exists(path): + raise web.HTTPError(404, "No such file or directory: %s" % path) + + os_path = self._get_os_path(path) + if os.path.isdir(os_path): + if type not in (None, "directory"): + raise web.HTTPError( + 400, + "%s is a directory, not a %s" % (path, type), + reason="bad type", + ) + model = self._dir_model(path, content=content) + elif type == "notebook" or (type is None and path.endswith(".ipynb")): + model = self._notebook_model(path, content=content) + else: + if type == "directory": + raise web.HTTPError(400, "%s is not a directory" % path, reason="bad type") + model = self._file_model(path, content=content, format=format) + return model + + def _save_directory(self, os_path, model, path=""): + """create a directory""" + if is_hidden(os_path, self.root_dir) and not self.allow_hidden: + raise web.HTTPError(400, "Cannot create hidden directory %r" % os_path) + if not os.path.exists(os_path): + with self.perm_to_403(): + os.mkdir(os_path) + elif not os.path.isdir(os_path): + raise web.HTTPError(400, "Not a directory: %s" % (os_path)) + else: + self.log.debug("Directory %r already exists", os_path) + + def save(self, model, path=""): + """Save the file model and return the model with no content.""" + path = path.strip("/") + + self.run_pre_save_hook(model=model, path=path) + + if "type" not in model: + raise web.HTTPError(400, "No file type provided") + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, "No file content provided") + + os_path = self._get_os_path(path) + self.log.debug("Saving %s", os_path) + + try: + if model["type"] == "notebook": + nb = nbformat.from_dict(model["content"]) + self.check_and_sign(nb, path) + self._save_notebook(os_path, nb) + # One checkpoint should always exist for notebooks. + if not self.checkpoints.list_checkpoints(path): + self.create_checkpoint(path) + elif model["type"] == "file": + # Missing format will be handled internally by _save_file. + self._save_file(os_path, model["content"], model.get("format")) + elif model["type"] == "directory": + self._save_directory(os_path, model, path) + else: + raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) + except web.HTTPError: + raise + except Exception as e: + self.log.error("Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError(500, "Unexpected error while saving file: %s %s" % (path, e)) from e + + validation_message = None + if model["type"] == "notebook": + self.validate_notebook_model(model) + validation_message = model.get("message", None) + + model = self.get(path, content=False) + if validation_message: + model["message"] = validation_message + + self.run_post_save_hook(model=model, os_path=os_path) + + return model + + def delete_file(self, path): + """Delete file at path.""" + path = path.strip("/") + os_path = self._get_os_path(path) + rm = os.unlink + if not os.path.exists(os_path): + raise web.HTTPError(404, "File or directory does not exist: %s" % os_path) + + def _check_trash(os_path): + if sys.platform in {"win32", "darwin"}: + return True + + # It's a bit more nuanced than this, but until we can better + # distinguish errors from send2trash, assume that we can only trash + # files on the same partition as the home directory. + file_dev = os.stat(os_path).st_dev + home_dev = os.stat(os.path.expanduser("~")).st_dev + return file_dev == home_dev + + def is_non_empty_dir(os_path): + if os.path.isdir(os_path): + # A directory containing only leftover checkpoints is + # considered empty. + cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) + if set(os.listdir(os_path)) - {cp_dir}: + return True + + return False + + if self.delete_to_trash: + if not self.always_delete_dir and sys.platform == "win32" and is_non_empty_dir(os_path): + # send2trash can really delete files on Windows, so disallow + # deleting non-empty files. See Github issue 3631. + raise web.HTTPError(400, "Directory %s not empty" % os_path) + if _check_trash(os_path): + # Looking at the code in send2trash, I don't think the errors it + # raises let us distinguish permission errors from other errors in + # code. So for now, the "look before you leap" approach is used. + if not self.is_writable(path): + raise web.HTTPError(403, "Permission denied: %s" % path) + self.log.debug("Sending %s to trash", os_path) + send2trash(os_path) + return + else: + self.log.warning( + "Skipping trash for %s, on different device " "to home directory", + os_path, + ) + + if os.path.isdir(os_path): + # Don't permanently delete non-empty directories. + if not self.always_delete_dir and is_non_empty_dir(os_path): + raise web.HTTPError(400, "Directory %s not empty" % os_path) + self.log.debug("Removing directory %s", os_path) + with self.perm_to_403(): + shutil.rmtree(os_path) + else: + self.log.debug("Unlinking file %s", os_path) + with self.perm_to_403(): + rm(os_path) + + def rename_file(self, old_path, new_path): + """Rename a file.""" + old_path = old_path.strip("/") + new_path = new_path.strip("/") + if new_path == old_path: + return + + new_os_path = self._get_os_path(new_path) + old_os_path = self._get_os_path(old_path) + + # Should we proceed with the move? + if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): + raise web.HTTPError(409, "File already exists: %s" % new_path) + + # Move the file + try: + with self.perm_to_403(): + shutil.move(old_os_path, new_os_path) + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPError(500, "Unknown error renaming file: %s %s" % (old_path, e)) from e + + def info_string(self): + return _i18n("Serving notebooks from local directory: %s") % self.root_dir + + def get_kernel_path(self, path, model=None): + """Return the initial API path of a kernel associated with a given notebook""" + if self.dir_exists(path): + return path + if "/" in path: + parent_dir = path.rsplit("/", 1)[0] + else: + parent_dir = "" + return parent_dir + + +class AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager): + @default("checkpoints_class") + def _checkpoints_class_default(self): + return AsyncFileCheckpoints + + async def _dir_model(self, path, content=True): + """Build a model for a directory + + if content is requested, will include a listing of the directory + """ + os_path = self._get_os_path(path) + + four_o_four = "directory does not exist: %r" % path + + if not os.path.isdir(os_path): + raise web.HTTPError(404, four_o_four) + elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: + self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) + raise web.HTTPError(404, four_o_four) + + model = self._base_model(path) + model["type"] = "directory" + model["size"] = None + if content: + model["content"] = contents = [] + os_dir = self._get_os_path(path) + dir_contents = await run_sync(os.listdir, os_dir) + for name in dir_contents: + try: + os_path = os.path.join(os_dir, name) + except UnicodeDecodeError as e: + self.log.warning("failed to decode filename '%s': %s", name, e) + continue + + try: + st = await run_sync(os.lstat, os_path) + except OSError as e: + # skip over broken symlinks in listing + if e.errno == errno.ENOENT: + self.log.warning("%s doesn't exist", os_path) + elif e.errno != errno.EACCES: # Don't provide clues about protected files + self.log.warning("Error stat-ing %s: %s", os_path, e) + continue + + if ( + not stat.S_ISLNK(st.st_mode) + and not stat.S_ISREG(st.st_mode) + and not stat.S_ISDIR(st.st_mode) + ): + self.log.debug("%s not a regular file", os_path) + continue + + try: + if self.should_list(name): + if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): + contents.append( + await self.get(path="%s/%s" % (path, name), content=False) + ) + except OSError as e: + # ELOOP: recursive symlink, also don't show failure due to permissions + if e.errno not in [errno.ELOOP, errno.EACCES]: + self.log.warning( + "Unknown error checking if file %r is hidden", + os_path, + exc_info=True, + ) + + model["format"] = "json" + + return model + + async def _file_model(self, path, content=True, format=None): + """Build a model for a file + + if content is requested, include the file contents. + + format: + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If not specified, try to decode as UTF-8, and fall back to base64 + """ + model = self._base_model(path) + model["type"] = "file" + + os_path = self._get_os_path(path) + model["mimetype"] = mimetypes.guess_type(os_path)[0] + + if content: + content, format = await self._read_file(os_path, format) + if model["mimetype"] is None: + default_mime = { + "text": "text/plain", + "base64": "application/octet-stream", + }[format] + model["mimetype"] = default_mime + + model.update( + content=content, + format=format, + ) + + return model + + async def _notebook_model(self, path, content=True): + """Build a notebook model + + if content is requested, the notebook content will be populated + as a JSON structure (not double-serialized) + """ + model = self._base_model(path) + model["type"] = "notebook" + os_path = self._get_os_path(path) + + if content: + nb = await self._read_notebook(os_path, as_version=4) + self.mark_trusted_cells(nb, path) + model["content"] = nb + model["format"] = "json" + self.validate_notebook_model(model) + + return model + + async def get(self, path, content=True, type=None, format=None): + """Takes a path for an entity and returns its model + + Parameters + ---------- + path : str + the API path that describes the relative path for the target + content : bool + Whether to include the contents in the reply + type : str, optional + The requested type - 'file', 'notebook', or 'directory'. + Will raise HTTPError 400 if the content doesn't match. + format : str, optional + The requested format for file contents. 'text' or 'base64'. + Ignored if this returns a notebook or directory model. + + Returns + ------- + model : dict + the contents model. If content=True, returns the contents + of the file or directory as well. + """ + path = path.strip("/") + + if not self.exists(path): + raise web.HTTPError(404, "No such file or directory: %s" % path) + + os_path = self._get_os_path(path) + if os.path.isdir(os_path): + if type not in (None, "directory"): + raise web.HTTPError( + 400, + "%s is a directory, not a %s" % (path, type), + reason="bad type", + ) + model = await self._dir_model(path, content=content) + elif type == "notebook" or (type is None and path.endswith(".ipynb")): + model = await self._notebook_model(path, content=content) + else: + if type == "directory": + raise web.HTTPError(400, "%s is not a directory" % path, reason="bad type") + model = await self._file_model(path, content=content, format=format) + return model + + async def _save_directory(self, os_path, model, path=""): + """create a directory""" + if is_hidden(os_path, self.root_dir) and not self.allow_hidden: + raise web.HTTPError(400, "Cannot create hidden directory %r" % os_path) + if not os.path.exists(os_path): + with self.perm_to_403(): + await run_sync(os.mkdir, os_path) + elif not os.path.isdir(os_path): + raise web.HTTPError(400, "Not a directory: %s" % (os_path)) + else: + self.log.debug("Directory %r already exists", os_path) + + async def save(self, model, path=""): + """Save the file model and return the model with no content.""" + path = path.strip("/") + + os_path = self._get_os_path(path) + self.log.debug("Saving %s", os_path) + self.run_pre_save_hook(model=model, path=path) + + if "type" not in model: + raise web.HTTPError(400, "No file type provided") + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, "No file content provided") + + try: + if model["type"] == "notebook": + nb = nbformat.from_dict(model["content"]) + self.check_and_sign(nb, path) + await self._save_notebook(os_path, nb) + # One checkpoint should always exist for notebooks. + if not (await self.checkpoints.list_checkpoints(path)): + await self.create_checkpoint(path) + elif model["type"] == "file": + # Missing format will be handled internally by _save_file. + await self._save_file(os_path, model["content"], model.get("format")) + elif model["type"] == "directory": + await self._save_directory(os_path, model, path) + else: + raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) + except web.HTTPError: + raise + except Exception as e: + self.log.error("Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError(500, "Unexpected error while saving file: %s %s" % (path, e)) from e + + validation_message = None + if model["type"] == "notebook": + self.validate_notebook_model(model) + validation_message = model.get("message", None) + + model = await self.get(path, content=False) + if validation_message: + model["message"] = validation_message + + self.run_post_save_hook(model=model, os_path=os_path) + + return model + + async def delete_file(self, path): + """Delete file at path.""" + path = path.strip("/") + os_path = self._get_os_path(path) + rm = os.unlink + if not os.path.exists(os_path): + raise web.HTTPError(404, "File or directory does not exist: %s" % os_path) + + async def _check_trash(os_path): + if sys.platform in {"win32", "darwin"}: + return True + + # It's a bit more nuanced than this, but until we can better + # distinguish errors from send2trash, assume that we can only trash + # files on the same partition as the home directory. + file_dev = (await run_sync(os.stat, os_path)).st_dev + home_dev = (await run_sync(os.stat, os.path.expanduser("~"))).st_dev + return file_dev == home_dev + + async def is_non_empty_dir(os_path): + if os.path.isdir(os_path): + # A directory containing only leftover checkpoints is + # considered empty. + cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) + dir_contents = set(await run_sync(os.listdir, os_path)) + if dir_contents - {cp_dir}: + return True + + return False + + if self.delete_to_trash: + if ( + not self.always_delete_dir + and sys.platform == "win32" + and await is_non_empty_dir(os_path) + ): + # send2trash can really delete files on Windows, so disallow + # deleting non-empty files. See Github issue 3631. + raise web.HTTPError(400, "Directory %s not empty" % os_path) + if await _check_trash(os_path): + # Looking at the code in send2trash, I don't think the errors it + # raises let us distinguish permission errors from other errors in + # code. So for now, the "look before you leap" approach is used. + if not self.is_writable(path): + raise web.HTTPError(403, "Permission denied: %s" % path) + self.log.debug("Sending %s to trash", os_path) + send2trash(os_path) + return + else: + self.log.warning( + "Skipping trash for %s, on different device " "to home directory", + os_path, + ) + + if os.path.isdir(os_path): + # Don't permanently delete non-empty directories. + if not self.always_delete_dir and await is_non_empty_dir(os_path): + raise web.HTTPError(400, "Directory %s not empty" % os_path) + self.log.debug("Removing directory %s", os_path) + with self.perm_to_403(): + await run_sync(shutil.rmtree, os_path) + else: + self.log.debug("Unlinking file %s", os_path) + with self.perm_to_403(): + await run_sync(rm, os_path) + + async def rename_file(self, old_path, new_path): + """Rename a file.""" + old_path = old_path.strip("/") + new_path = new_path.strip("/") + if new_path == old_path: + return + + new_os_path = self._get_os_path(new_path) + old_os_path = self._get_os_path(old_path) + + # Should we proceed with the move? + if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): + raise web.HTTPError(409, "File already exists: %s" % new_path) + + # Move the file + try: + with self.perm_to_403(): + await run_sync(shutil.move, old_os_path, new_os_path) + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPError(500, "Unknown error renaming file: %s %s" % (old_path, e)) from e diff --git a/server/jupyter_server/services/contents/handlers.py b/server/jupyter_server/services/contents/handlers.py new file mode 100644 index 0000000..83db1f9 --- /dev/null +++ b/server/jupyter_server/services/contents/handlers.py @@ -0,0 +1,340 @@ +"""Tornado handlers for the contents web service. + +Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-27%3A-Contents-Service +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json + +try: + from jupyter_client.jsonutil import json_default +except ImportError: + from jupyter_client.jsonutil import date_default as json_default +from tornado import web + +from jupyter_server.base.handlers import APIHandler +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.base.handlers import path_regex +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_path_join +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "contents" + + +def validate_model(model, expect_content): + """ + Validate a model returned by a ContentsManager method. + + If expect_content is True, then we expect non-null entries for 'content' + and 'format'. + """ + required_keys = { + "name", + "path", + "type", + "writable", + "created", + "last_modified", + "mimetype", + "content", + "format", + } + missing = required_keys - set(model.keys()) + if missing: + raise web.HTTPError( + 500, + "Missing Model Keys: {missing}".format(missing=missing), + ) + + maybe_none_keys = ["content", "format"] + if expect_content: + errors = [key for key in maybe_none_keys if model[key] is None] + if errors: + raise web.HTTPError( + 500, + "Keys unexpectedly None: {keys}".format(keys=errors), + ) + else: + errors = {key: model[key] for key in maybe_none_keys if model[key] is not None} + if errors: + raise web.HTTPError( + 500, + "Keys unexpectedly not None: {keys}".format(keys=errors), + ) + + +class ContentsAPIHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + +class ContentsHandler(ContentsAPIHandler): + def location_url(self, path): + """Return the full URL location of a file. + + Parameters + ---------- + path : unicode + The API path of the file, such as "foo/bar.txt". + """ + return url_path_join(self.base_url, "api", "contents", url_escape(path)) + + def _finish_model(self, model, location=True): + """Finish a JSON request with a model, setting relevant headers, etc.""" + if location: + location = self.location_url(model["path"]) + self.set_header("Location", location) + self.set_header("Last-Modified", model["last_modified"]) + self.set_header("Content-Type", "application/json") + self.finish(json.dumps(model, default=json_default)) + + @web.authenticated + @authorized + async def get(self, path=""): + """Return a model for a file or directory. + + A directory model contains a list of models (without content) + of the files and directories it contains. + """ + path = path or "" + type = self.get_query_argument("type", default=None) + if type not in {None, "directory", "file", "notebook"}: + raise web.HTTPError(400, "Type %r is invalid" % type) + + format = self.get_query_argument("format", default=None) + if format not in {None, "text", "base64"}: + raise web.HTTPError(400, "Format %r is invalid" % format) + content = self.get_query_argument("content", default="1") + if content not in {"0", "1"}: + raise web.HTTPError(400, "Content %r is invalid" % content) + content = int(content) + + model = await ensure_async( + self.contents_manager.get( + path=path, + type=type, + format=format, + content=content, + ) + ) + validate_model(model, expect_content=content) + self._finish_model(model, location=False) + + @web.authenticated + @authorized + async def patch(self, path=""): + """PATCH renames a file or directory without re-uploading content.""" + cm = self.contents_manager + model = self.get_json_body() + if model is None: + raise web.HTTPError(400, "JSON body missing") + model = await ensure_async(cm.update(model, path)) + validate_model(model, expect_content=False) + self._finish_model(model) + + async def _copy(self, copy_from, copy_to=None): + """Copy a file, optionally specifying a target directory.""" + self.log.info( + "Copying {copy_from} to {copy_to}".format( + copy_from=copy_from, + copy_to=copy_to or "", + ) + ) + model = await ensure_async(self.contents_manager.copy(copy_from, copy_to)) + self.set_status(201) + validate_model(model, expect_content=False) + self._finish_model(model) + + async def _upload(self, model, path): + """Handle upload of a new file to path""" + self.log.info("Uploading file to %s", path) + model = await ensure_async(self.contents_manager.new(model, path)) + self.set_status(201) + validate_model(model, expect_content=False) + self._finish_model(model) + + async def _new_untitled(self, path, type="", ext=""): + """Create a new, empty untitled entity""" + self.log.info("Creating new %s in %s", type or "file", path) + model = await ensure_async( + self.contents_manager.new_untitled(path=path, type=type, ext=ext) + ) + self.set_status(201) + validate_model(model, expect_content=False) + self._finish_model(model) + + async def _save(self, model, path): + """Save an existing file.""" + chunk = model.get("chunk", None) + if not chunk or chunk == -1: # Avoid tedious log information + self.log.info("Saving file at %s", path) + model = await ensure_async(self.contents_manager.save(model, path)) + validate_model(model, expect_content=False) + self._finish_model(model) + + @web.authenticated + @authorized + async def post(self, path=""): + """Create a new file in the specified path. + + POST creates new files. The server always decides on the name. + + POST /api/contents/path + New untitled, empty file or directory. + POST /api/contents/path + with body {"copy_from" : "/path/to/OtherNotebook.ipynb"} + New copy of OtherNotebook in path + """ + + cm = self.contents_manager + + file_exists = await ensure_async(cm.file_exists(path)) + if file_exists: + raise web.HTTPError(400, "Cannot POST to files, use PUT instead.") + + dir_exists = await ensure_async(cm.dir_exists(path)) + if not dir_exists: + raise web.HTTPError(404, "No such directory: %s" % path) + + model = self.get_json_body() + + if model is not None: + copy_from = model.get("copy_from") + ext = model.get("ext", "") + type = model.get("type", "") + if copy_from: + await self._copy(copy_from, path) + else: + await self._new_untitled(path, type=type, ext=ext) + else: + await self._new_untitled(path) + + @web.authenticated + @authorized + async def put(self, path=""): + """Saves the file in the location specified by name and path. + + PUT is very similar to POST, but the requester specifies the name, + whereas with POST, the server picks the name. + + PUT /api/contents/path/Name.ipynb + Save notebook at ``path/Name.ipynb``. Notebook structure is specified + in `content` key of JSON request body. If content is not specified, + create a new empty notebook. + """ + model = self.get_json_body() + if model: + if model.get("copy_from"): + raise web.HTTPError(400, "Cannot copy with PUT, only POST") + exists = await ensure_async(self.contents_manager.file_exists(path)) + if exists: + await self._save(model, path) + else: + await self._upload(model, path) + else: + await self._new_untitled(path) + + @web.authenticated + @authorized + async def delete(self, path=""): + """delete a file in the given path""" + cm = self.contents_manager + self.log.warning("delete %s", path) + await ensure_async(cm.delete(path)) + self.set_status(204) + self.finish() + + +class CheckpointsHandler(ContentsAPIHandler): + @web.authenticated + @authorized + async def get(self, path=""): + """get lists checkpoints for a file""" + cm = self.contents_manager + checkpoints = await ensure_async(cm.list_checkpoints(path)) + data = json.dumps(checkpoints, default=json_default) + self.finish(data) + + @web.authenticated + @authorized + async def post(self, path=""): + """post creates a new checkpoint""" + cm = self.contents_manager + checkpoint = await ensure_async(cm.create_checkpoint(path)) + data = json.dumps(checkpoint, default=json_default) + location = url_path_join( + self.base_url, + "api/contents", + url_escape(path), + "checkpoints", + url_escape(checkpoint["id"]), + ) + self.set_header("Location", location) + self.set_status(201) + self.finish(data) + + +class ModifyCheckpointsHandler(ContentsAPIHandler): + @web.authenticated + @authorized + async def post(self, path, checkpoint_id): + """post restores a file from a checkpoint""" + cm = self.contents_manager + await ensure_async(cm.restore_checkpoint(checkpoint_id, path)) + self.set_status(204) + self.finish() + + @web.authenticated + @authorized + async def delete(self, path, checkpoint_id): + """delete clears a checkpoint for a given file""" + cm = self.contents_manager + await ensure_async(cm.delete_checkpoint(checkpoint_id, path)) + self.set_status(204) + self.finish() + + +class NotebooksRedirectHandler(JupyterHandler): + """Redirect /api/notebooks to /api/contents""" + + SUPPORTED_METHODS = ("GET", "PUT", "PATCH", "POST", "DELETE") + + def get(self, path): + self.log.warning("/api/notebooks is deprecated, use /api/contents") + self.redirect(url_path_join(self.base_url, "api/contents", url_escape(path))) + + put = patch = post = delete = get + + +class TrustNotebooksHandler(JupyterHandler): + """Handles trust/signing of notebooks""" + + @web.authenticated + @authorized(resource=AUTH_RESOURCE) + async def post(self, path=""): + cm = self.contents_manager + await ensure_async(cm.trust_notebook(path)) + self.set_status(201) + self.finish() + + +# ----------------------------------------------------------------------------- +# URL to handler mappings +# ----------------------------------------------------------------------------- + + +_checkpoint_id_regex = r"(?P[\w-]+)" + + +default_handlers = [ + (r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler), + ( + r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), + ModifyCheckpointsHandler, + ), + (r"/api/contents%s/trust" % path_regex, TrustNotebooksHandler), + (r"/api/contents%s" % path_regex, ContentsHandler), + (r"/api/notebooks/?(.*)", NotebooksRedirectHandler), +] diff --git a/server/jupyter_server/services/contents/largefilemanager.py b/server/jupyter_server/services/contents/largefilemanager.py new file mode 100644 index 0000000..3b404ab --- /dev/null +++ b/server/jupyter_server/services/contents/largefilemanager.py @@ -0,0 +1,150 @@ +import base64 +import io +import os + +from anyio.to_thread import run_sync +from tornado import web + +from jupyter_server.services.contents.filemanager import AsyncFileContentsManager +from jupyter_server.services.contents.filemanager import FileContentsManager + + +class LargeFileManager(FileContentsManager): + """Handle large file upload.""" + + def save(self, model, path=""): + """Save the file model and return the model with no content.""" + chunk = model.get("chunk", None) + if chunk is not None: + path = path.strip("/") + + self.run_pre_save_hook(model=model, path=path) + + if "type" not in model: + raise web.HTTPError(400, "No file type provided") + if model["type"] != "file": + raise web.HTTPError( + 400, + 'File type "{}" is not supported for large file transfer'.format(model["type"]), + ) + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, "No file content provided") + + os_path = self._get_os_path(path) + self.log.debug("Saving %s", os_path) + + try: + if chunk == 1: + super(LargeFileManager, self)._save_file( + os_path, model["content"], model.get("format") + ) + else: + self._save_large_file(os_path, model["content"], model.get("format")) + except web.HTTPError: + raise + except Exception as e: + self.log.error("Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, "Unexpected error while saving file: %s %s" % (path, e) + ) from e + + model = self.get(path, content=False) + + # Last chunk + if chunk == -1: + self.run_post_save_hook(model=model, os_path=os_path) + return model + else: + return super(LargeFileManager, self).save(model, path) + + def _save_large_file(self, os_path, content, format): + """Save content of a generic file.""" + if format not in {"text", "base64"}: + raise web.HTTPError( + 400, + "Must specify format of file contents as 'text' or 'base64'", + ) + try: + if format == "text": + bcontent = content.encode("utf8") + else: + b64_bytes = content.encode("ascii") + bcontent = base64.b64decode(b64_bytes) + except Exception as e: + raise web.HTTPError(400, "Encoding error saving %s: %s" % (os_path, e)) from e + + with self.perm_to_403(os_path): + if os.path.islink(os_path): + os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) + with io.open(os_path, "ab") as f: + f.write(bcontent) + + +class AsyncLargeFileManager(AsyncFileContentsManager): + """Handle large file upload asynchronously""" + + async def save(self, model, path=""): + """Save the file model and return the model with no content.""" + chunk = model.get("chunk", None) + if chunk is not None: + path = path.strip("/") + + os_path = self._get_os_path(path) + self.log.debug("Saving %s", os_path) + self.run_pre_save_hook(model=model, path=path) + + if "type" not in model: + raise web.HTTPError(400, "No file type provided") + if model["type"] != "file": + raise web.HTTPError( + 400, + 'File type "{}" is not supported for large file transfer'.format(model["type"]), + ) + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, "No file content provided") + + try: + if chunk == 1: + await super(AsyncLargeFileManager, self)._save_file( + os_path, model["content"], model.get("format") + ) + else: + await self._save_large_file(os_path, model["content"], model.get("format")) + except web.HTTPError: + raise + except Exception as e: + self.log.error("Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, "Unexpected error while saving file: %s %s" % (path, e) + ) from e + + model = await self.get(path, content=False) + + # Last chunk + if chunk == -1: + self.run_post_save_hook(model=model, os_path=os_path) + return model + else: + return await super(AsyncLargeFileManager, self).save(model, path) + + async def _save_large_file(self, os_path, content, format): + """Save content of a generic file.""" + if format not in {"text", "base64"}: + raise web.HTTPError( + 400, + "Must specify format of file contents as 'text' or 'base64'", + ) + try: + if format == "text": + bcontent = content.encode("utf8") + else: + b64_bytes = content.encode("ascii") + bcontent = base64.b64decode(b64_bytes) + except Exception as e: + raise web.HTTPError(400, "Encoding error saving %s: %s" % (os_path, e)) from e + + with self.perm_to_403(os_path): + if os.path.islink(os_path): + os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) + with io.open(os_path, "ab") as f: + await run_sync(f.write, bcontent) diff --git a/server/jupyter_server/services/contents/manager.py b/server/jupyter_server/services/contents/manager.py new file mode 100644 index 0000000..3e47a63 --- /dev/null +++ b/server/jupyter_server/services/contents/manager.py @@ -0,0 +1,875 @@ +"""A base class for contents managers.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import itertools +import json +import re +from fnmatch import fnmatch + +from ipython_genutils.importstring import import_item +from nbformat import sign +from nbformat import validate as validate_nb +from nbformat import ValidationError +from nbformat.v4 import new_notebook +from tornado.web import HTTPError +from tornado.web import RequestHandler +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import Instance +from traitlets import List +from traitlets import TraitError +from traitlets import Type +from traitlets import Unicode +from traitlets import validate +from traitlets.config.configurable import LoggingConfigurable + +from ...files.handlers import FilesHandler +from .checkpoints import AsyncCheckpoints +from .checkpoints import Checkpoints +from jupyter_server.transutils import _i18n +from jupyter_server.utils import ensure_async + + +copy_pat = re.compile(r"\-Copy\d*\.") + + +class ContentsManager(LoggingConfigurable): + """Base class for serving files and directories. + + This serves any text or binary file, + as well as directories, + with special handling for JSON notebook documents. + + Most APIs take a path argument, + which is always an API-style unicode path, + and always refers to a directory. + + - unicode, not url-escaped + - '/'-separated + - leading and trailing '/' will be stripped + - if unspecified, path defaults to '', + indicating the root path. + + """ + + root_dir = Unicode("/", config=True) + + allow_hidden = Bool(False, config=True, help="Allow access to hidden files") + + notary = Instance(sign.NotebookNotary) + + def _notary_default(self): + return sign.NotebookNotary(parent=self) + + hide_globs = List( + Unicode(), + [ + "__pycache__", + "*.pyc", + "*.pyo", + ".DS_Store", + "*.so", + "*.dylib", + "*~", + ], + config=True, + help=""" + Glob patterns to hide in file and directory listings. + """, + ) + + untitled_notebook = Unicode( + _i18n("Untitled"), + config=True, + help="The base name used when creating untitled notebooks.", + ) + + untitled_file = Unicode( + "untitled", config=True, help="The base name used when creating untitled files." + ) + + untitled_directory = Unicode( + "Untitled Folder", + config=True, + help="The base name used when creating untitled directories.", + ) + + pre_save_hook = Any( + None, + config=True, + allow_none=True, + help="""Python callable or importstring thereof + + To be called on a contents model prior to save. + + This can be used to process the structure, + such as removing notebook outputs or other side effects that + should not be saved. + + It will be called as (all arguments passed by keyword):: + + hook(path=path, model=model, contents_manager=self) + + - model: the model to be saved. Includes file contents. + Modifying this dict will affect the file that is stored. + - path: the API path of the save destination + - contents_manager: this ContentsManager instance + """, + ) + + @validate("pre_save_hook") + def _validate_pre_save_hook(self, proposal): + value = proposal["value"] + if isinstance(value, str): + value = import_item(self.pre_save_hook) + if not callable(value): + raise TraitError("pre_save_hook must be callable") + return value + + def run_pre_save_hook(self, model, path, **kwargs): + """Run the pre-save hook if defined, and log errors""" + if self.pre_save_hook: + try: + self.log.debug("Running pre-save hook on %s", path) + self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) + except HTTPError: + # allow custom HTTPErrors to raise, + # rejecting the save with a message. + raise + except Exception: + # unhandled errors don't prevent saving, + # which could cause frustrating data loss + self.log.error("Pre-save hook failed on %s", path, exc_info=True) + + checkpoints_class = Type(Checkpoints, config=True) + checkpoints = Instance(Checkpoints, config=True) + checkpoints_kwargs = Dict(config=True) + + @default("checkpoints") + def _default_checkpoints(self): + return self.checkpoints_class(**self.checkpoints_kwargs) + + @default("checkpoints_kwargs") + def _default_checkpoints_kwargs(self): + return dict( + parent=self, + log=self.log, + ) + + files_handler_class = Type( + FilesHandler, + klass=RequestHandler, + allow_none=True, + config=True, + help="""handler class to use when serving raw file requests. + + Default is a fallback that talks to the ContentsManager API, + which may be inefficient, especially for large files. + + Local files-based ContentsManagers can use a StaticFileHandler subclass, + which will be much more efficient. + + Access to these files should be Authenticated. + """, + ) + + files_handler_params = Dict( + config=True, + help="""Extra parameters to pass to files_handler_class. + + For example, StaticFileHandlers generally expect a `path` argument + specifying the root directory from which to serve files. + """, + ) + + def get_extra_handlers(self): + """Return additional handlers + + Default: self.files_handler_class on /files/.* + """ + handlers = [] + if self.files_handler_class: + handlers.append((r"/files/(.*)", self.files_handler_class, self.files_handler_params)) + return handlers + + # ContentsManager API part 1: methods that must be + # implemented in subclasses. + + def dir_exists(self, path): + """Does a directory exist at the given path? + + Like os.path.isdir + + Override this method in subclasses. + + Parameters + ---------- + path : string + The path to check + + Returns + ------- + exists : bool + Whether the path does indeed exist. + """ + raise NotImplementedError + + def is_hidden(self, path): + """Is path a hidden directory or file? + + Parameters + ---------- + path : string + The path to check. This is an API path (`/` separated, + relative to root dir). + + Returns + ------- + hidden : bool + Whether the path is hidden. + + """ + raise NotImplementedError + + def file_exists(self, path=""): + """Does a file exist at the given path? + + Like os.path.isfile + + Override this method in subclasses. + + Parameters + ---------- + path : string + The API path of a file to check for. + + Returns + ------- + exists : bool + Whether the file exists. + """ + raise NotImplementedError("must be implemented in a subclass") + + def exists(self, path): + """Does a file or directory exist at the given path? + + Like os.path.exists + + Parameters + ---------- + path : string + The API path of a file or directory to check for. + + Returns + ------- + exists : bool + Whether the target exists. + """ + return self.file_exists(path) or self.dir_exists(path) + + def get(self, path, content=True, type=None, format=None): + """Get a file or directory model.""" + raise NotImplementedError("must be implemented in a subclass") + + def save(self, model, path): + """ + Save a file or directory model to path. + + Should return the saved model with no content. Save implementations + should call self.run_pre_save_hook(model=model, path=path) prior to + writing any data. + """ + raise NotImplementedError("must be implemented in a subclass") + + def delete_file(self, path): + """Delete the file or directory at path.""" + raise NotImplementedError("must be implemented in a subclass") + + def rename_file(self, old_path, new_path): + """Rename a file or directory.""" + raise NotImplementedError("must be implemented in a subclass") + + # ContentsManager API part 2: methods that have useable default + # implementations, but can be overridden in subclasses. + + def delete(self, path): + """Delete a file/directory and any associated checkpoints.""" + path = path.strip("/") + if not path: + raise HTTPError(400, "Can't delete root") + self.delete_file(path) + self.checkpoints.delete_all_checkpoints(path) + + def rename(self, old_path, new_path): + """Rename a file and any checkpoints associated with that file.""" + self.rename_file(old_path, new_path) + self.checkpoints.rename_all_checkpoints(old_path, new_path) + + def update(self, model, path): + """Update the file's path + + For use in PATCH requests, to enable renaming a file without + re-uploading its contents. Only used for renaming at the moment. + """ + path = path.strip("/") + new_path = model.get("path", path).strip("/") + if path != new_path: + self.rename(path, new_path) + model = self.get(new_path, content=False) + return model + + def info_string(self): + return "Serving contents" + + def get_kernel_path(self, path, model=None): + """Return the API path for the kernel + + KernelManagers can turn this value into a filesystem path, + or ignore it altogether. + + The default value here will start kernels in the directory of the + notebook server. FileContentsManager overrides this to use the + directory containing the notebook. + """ + return "" + + def increment_filename(self, filename, path="", insert=""): + """Increment a filename until it is unique. + + Parameters + ---------- + filename : unicode + The name of a file, including extension + path : unicode + The API path of the target's directory + insert : unicode + The characters to insert after the base filename + + Returns + ------- + name : unicode + A filename that is unique, based on the input filename. + """ + # Extract the full suffix from the filename (e.g. .tar.gz) + path = path.strip("/") + basename, dot, ext = filename.rpartition(".") + if ext != "ipynb": + basename, dot, ext = filename.partition(".") + + suffix = dot + ext + + for i in itertools.count(): + if i: + insert_i = "{}{}".format(insert, i) + else: + insert_i = "" + name = "{basename}{insert}{suffix}".format( + basename=basename, insert=insert_i, suffix=suffix + ) + if not self.exists("{}/{}".format(path, name)): + break + return name + + def validate_notebook_model(self, model): + """Add failed-validation message to model""" + try: + validate_nb(model["content"]) + except ValidationError as e: + model["message"] = "Notebook validation failed: {}:\n{}".format( + e.message, + json.dumps(e.instance, indent=1, default=lambda obj: ""), + ) + return model + + def new_untitled(self, path="", type="", ext=""): + """Create a new untitled file or directory in path + + path must be a directory + + File extension can be specified. + + Use `new` to create files with a fully specified path (including filename). + """ + path = path.strip("/") + if not self.dir_exists(path): + raise HTTPError(404, "No such directory: %s" % path) + + model = {} + if type: + model["type"] = type + + if ext == ".ipynb": + model.setdefault("type", "notebook") + else: + model.setdefault("type", "file") + + insert = "" + if model["type"] == "directory": + untitled = self.untitled_directory + insert = " " + elif model["type"] == "notebook": + untitled = self.untitled_notebook + ext = ".ipynb" + elif model["type"] == "file": + untitled = self.untitled_file + else: + raise HTTPError(400, "Unexpected model type: %r" % model["type"]) + + name = self.increment_filename(untitled + ext, path, insert=insert) + path = "{0}/{1}".format(path, name) + return self.new(model, path) + + def new(self, model=None, path=""): + """Create a new file or directory and return its model with no content. + + To create a new untitled entity in a directory, use `new_untitled`. + """ + path = path.strip("/") + if model is None: + model = {} + + if path.endswith(".ipynb"): + model.setdefault("type", "notebook") + else: + model.setdefault("type", "file") + + # no content, not a directory, so fill out new-file model + if "content" not in model and model["type"] != "directory": + if model["type"] == "notebook": + model["content"] = new_notebook() + model["format"] = "json" + else: + model["content"] = "" + model["type"] = "file" + model["format"] = "text" + + model = self.save(model, path) + return model + + def copy(self, from_path, to_path=None): + """Copy an existing file and return its new model. + + If to_path not specified, it will be the parent directory of from_path. + If to_path is a directory, filename will increment `from_path-Copy#.ext`. + Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. + For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. + + from_path must be a full path to a file. + """ + path = from_path.strip("/") + if to_path is not None: + to_path = to_path.strip("/") + + if "/" in path: + from_dir, from_name = path.rsplit("/", 1) + else: + from_dir = "" + from_name = path + + model = self.get(path) + model.pop("path", None) + model.pop("name", None) + if model["type"] == "directory": + raise HTTPError(400, "Can't copy directories") + + if to_path is None: + to_path = from_dir + if self.dir_exists(to_path): + name = copy_pat.sub(".", from_name) + to_name = self.increment_filename(name, to_path, insert="-Copy") + to_path = "{0}/{1}".format(to_path, to_name) + + model = self.save(model, to_path) + return model + + def log_info(self): + self.log.info(self.info_string()) + + def trust_notebook(self, path): + """Explicitly trust a notebook + + Parameters + ---------- + path : string + The path of a notebook + """ + model = self.get(path) + nb = model["content"] + self.log.warning("Trusting notebook %s", path) + self.notary.mark_cells(nb, True) + self.check_and_sign(nb, path) + + def check_and_sign(self, nb, path=""): + """Check for trusted cells, and sign the notebook. + + Called as a part of saving notebooks. + + Parameters + ---------- + nb : dict + The notebook dict + path : string + The notebook's path (for logging) + """ + if self.notary.check_cells(nb): + self.notary.sign(nb) + else: + self.log.warning("Notebook %s is not trusted", path) + + def mark_trusted_cells(self, nb, path=""): + """Mark cells as trusted if the notebook signature matches. + + Called as a part of loading notebooks. + + Parameters + ---------- + nb : dict + The notebook object (in current nbformat) + path : string + The notebook's path (for logging) + """ + trusted = self.notary.check_signature(nb) + if not trusted: + self.log.warning("Notebook %s is not trusted", path) + self.notary.mark_cells(nb, trusted) + + def should_list(self, name): + """Should this file/directory name be displayed in a listing?""" + return not any(fnmatch(name, glob) for glob in self.hide_globs) + + # Part 3: Checkpoints API + def create_checkpoint(self, path): + """Create a checkpoint.""" + return self.checkpoints.create_checkpoint(self, path) + + def restore_checkpoint(self, checkpoint_id, path): + """ + Restore a checkpoint. + """ + self.checkpoints.restore_checkpoint(self, checkpoint_id, path) + + def list_checkpoints(self, path): + return self.checkpoints.list_checkpoints(path) + + def delete_checkpoint(self, checkpoint_id, path): + return self.checkpoints.delete_checkpoint(checkpoint_id, path) + + +class AsyncContentsManager(ContentsManager): + """Base class for serving files and directories asynchronously.""" + + checkpoints_class = Type(AsyncCheckpoints, config=True) + checkpoints = Instance(AsyncCheckpoints, config=True) + checkpoints_kwargs = Dict(config=True) + + @default("checkpoints") + def _default_checkpoints(self): + return self.checkpoints_class(**self.checkpoints_kwargs) + + @default("checkpoints_kwargs") + def _default_checkpoints_kwargs(self): + return dict( + parent=self, + log=self.log, + ) + + # ContentsManager API part 1: methods that must be + # implemented in subclasses. + + async def dir_exists(self, path): + """Does a directory exist at the given path? + + Like os.path.isdir + + Override this method in subclasses. + + Parameters + ---------- + path : string + The path to check + + Returns + ------- + exists : bool + Whether the path does indeed exist. + """ + raise NotImplementedError + + async def is_hidden(self, path): + """Is path a hidden directory or file? + + Parameters + ---------- + path : string + The path to check. This is an API path (`/` separated, + relative to root dir). + + Returns + ------- + hidden : bool + Whether the path is hidden. + + """ + raise NotImplementedError + + async def file_exists(self, path=""): + """Does a file exist at the given path? + + Like os.path.isfile + + Override this method in subclasses. + + Parameters + ---------- + path : string + The API path of a file to check for. + + Returns + ------- + exists : bool + Whether the file exists. + """ + raise NotImplementedError("must be implemented in a subclass") + + async def exists(self, path): + """Does a file or directory exist at the given path? + + Like os.path.exists + + Parameters + ---------- + path : string + The API path of a file or directory to check for. + + Returns + ------- + exists : bool + Whether the target exists. + """ + return await ensure_async(self.file_exists(path)) or await ensure_async( + self.dir_exists(path) + ) + + async def get(self, path, content=True, type=None, format=None): + """Get a file or directory model.""" + raise NotImplementedError("must be implemented in a subclass") + + async def save(self, model, path): + """ + Save a file or directory model to path. + + Should return the saved model with no content. Save implementations + should call self.run_pre_save_hook(model=model, path=path) prior to + writing any data. + """ + raise NotImplementedError("must be implemented in a subclass") + + async def delete_file(self, path): + """Delete the file or directory at path.""" + raise NotImplementedError("must be implemented in a subclass") + + async def rename_file(self, old_path, new_path): + """Rename a file or directory.""" + raise NotImplementedError("must be implemented in a subclass") + + # ContentsManager API part 2: methods that have useable default + # implementations, but can be overridden in subclasses. + + async def delete(self, path): + """Delete a file/directory and any associated checkpoints.""" + path = path.strip("/") + if not path: + raise HTTPError(400, "Can't delete root") + + await self.delete_file(path) + await self.checkpoints.delete_all_checkpoints(path) + + async def rename(self, old_path, new_path): + """Rename a file and any checkpoints associated with that file.""" + await self.rename_file(old_path, new_path) + await self.checkpoints.rename_all_checkpoints(old_path, new_path) + + async def update(self, model, path): + """Update the file's path + + For use in PATCH requests, to enable renaming a file without + re-uploading its contents. Only used for renaming at the moment. + """ + path = path.strip("/") + new_path = model.get("path", path).strip("/") + if path != new_path: + await self.rename(path, new_path) + model = await self.get(new_path, content=False) + return model + + async def increment_filename(self, filename, path="", insert=""): + """Increment a filename until it is unique. + + Parameters + ---------- + filename : unicode + The name of a file, including extension + path : unicode + The API path of the target's directory + insert : unicode + The characters to insert after the base filename + + Returns + ------- + name : unicode + A filename that is unique, based on the input filename. + """ + # Extract the full suffix from the filename (e.g. .tar.gz) + path = path.strip("/") + basename, dot, ext = filename.rpartition(".") + if ext != "ipynb": + basename, dot, ext = filename.partition(".") + + suffix = dot + ext + + for i in itertools.count(): + if i: + insert_i = "{}{}".format(insert, i) + else: + insert_i = "" + name = "{basename}{insert}{suffix}".format( + basename=basename, insert=insert_i, suffix=suffix + ) + file_exists = await ensure_async(self.exists("{}/{}".format(path, name))) + if not file_exists: + break + return name + + async def new_untitled(self, path="", type="", ext=""): + """Create a new untitled file or directory in path + + path must be a directory + + File extension can be specified. + + Use `new` to create files with a fully specified path (including filename). + """ + path = path.strip("/") + dir_exists = await ensure_async(self.dir_exists(path)) + if not dir_exists: + raise HTTPError(404, "No such directory: %s" % path) + + model = {} + if type: + model["type"] = type + + if ext == ".ipynb": + model.setdefault("type", "notebook") + else: + model.setdefault("type", "file") + + insert = "" + if model["type"] == "directory": + untitled = self.untitled_directory + insert = " " + elif model["type"] == "notebook": + untitled = self.untitled_notebook + ext = ".ipynb" + elif model["type"] == "file": + untitled = self.untitled_file + else: + raise HTTPError(400, "Unexpected model type: %r" % model["type"]) + + name = await self.increment_filename(untitled + ext, path, insert=insert) + path = "{0}/{1}".format(path, name) + return await self.new(model, path) + + async def new(self, model=None, path=""): + """Create a new file or directory and return its model with no content. + + To create a new untitled entity in a directory, use `new_untitled`. + """ + path = path.strip("/") + if model is None: + model = {} + + if path.endswith(".ipynb"): + model.setdefault("type", "notebook") + else: + model.setdefault("type", "file") + + # no content, not a directory, so fill out new-file model + if "content" not in model and model["type"] != "directory": + if model["type"] == "notebook": + model["content"] = new_notebook() + model["format"] = "json" + else: + model["content"] = "" + model["type"] = "file" + model["format"] = "text" + + model = await self.save(model, path) + return model + + async def copy(self, from_path, to_path=None): + """Copy an existing file and return its new model. + + If to_path not specified, it will be the parent directory of from_path. + If to_path is a directory, filename will increment `from_path-Copy#.ext`. + Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. + For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. + + from_path must be a full path to a file. + """ + path = from_path.strip("/") + if to_path is not None: + to_path = to_path.strip("/") + + if "/" in path: + from_dir, from_name = path.rsplit("/", 1) + else: + from_dir = "" + from_name = path + + model = await self.get(path) + model.pop("path", None) + model.pop("name", None) + if model["type"] == "directory": + raise HTTPError(400, "Can't copy directories") + if to_path is None: + to_path = from_dir + if await ensure_async(self.dir_exists(to_path)): + name = copy_pat.sub(".", from_name) + to_name = await self.increment_filename(name, to_path, insert="-Copy") + to_path = "{0}/{1}".format(to_path, to_name) + + model = await self.save(model, to_path) + return model + + async def trust_notebook(self, path): + """Explicitly trust a notebook + + Parameters + ---------- + path : string + The path of a notebook + """ + model = await self.get(path) + nb = model["content"] + self.log.warning("Trusting notebook %s", path) + self.notary.mark_cells(nb, True) + self.check_and_sign(nb, path) + + # Part 3: Checkpoints API + async def create_checkpoint(self, path): + """Create a checkpoint.""" + return await self.checkpoints.create_checkpoint(self, path) + + async def restore_checkpoint(self, checkpoint_id, path): + """ + Restore a checkpoint. + """ + await self.checkpoints.restore_checkpoint(self, checkpoint_id, path) + + async def list_checkpoints(self, path): + return await self.checkpoints.list_checkpoints(path) + + async def delete_checkpoint(self, checkpoint_id, path): + return await self.checkpoints.delete_checkpoint(checkpoint_id, path) diff --git a/server/jupyter_server/services/kernels/__init__.py b/server/jupyter_server/services/kernels/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/kernels/handlers.py b/server/jupyter_server/services/kernels/handlers.py new file mode 100644 index 0000000..84d14fd --- /dev/null +++ b/server/jupyter_server/services/kernels/handlers.py @@ -0,0 +1,785 @@ +"""Tornado handlers for kernels. + +Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#kernels-api +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json +from textwrap import dedent +from traceback import format_tb + +from ipython_genutils.py3compat import cast_unicode +from jupyter_client import protocol_version as client_protocol_version + +try: + from jupyter_client.jsonutil import json_default +except ImportError: + from jupyter_client.jsonutil import date_default as json_default +from tornado import gen +from tornado import web +from tornado.concurrent import Future +from tornado.ioloop import IOLoop + +from ...base.handlers import APIHandler +from ...base.zmqhandlers import AuthenticatedZMQStreamHandler +from ...base.zmqhandlers import ( + deserialize_binary_message, + serialize_msg_to_ws_v1, + deserialize_msg_from_ws_v1, +) +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_path_join +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "kernels" + + +class KernelsAPIHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + +class MainKernelHandler(KernelsAPIHandler): + @web.authenticated + @authorized + async def get(self): + km = self.kernel_manager + kernels = await ensure_async(km.list_kernels()) + self.finish(json.dumps(kernels, default=json_default)) + + @web.authenticated + @authorized + async def post(self): + km = self.kernel_manager + model = self.get_json_body() + if model is None: + model = {"name": km.default_kernel_name} + else: + model.setdefault("name", km.default_kernel_name) + + kernel_id = await km.start_kernel(kernel_name=model["name"], path=model.get("path")) + model = await ensure_async(km.kernel_model(kernel_id)) + location = url_path_join(self.base_url, "api", "kernels", url_escape(kernel_id)) + self.set_header("Location", location) + self.set_status(201) + self.finish(json.dumps(model, default=json_default)) + + +class KernelHandler(KernelsAPIHandler): + @web.authenticated + @authorized + async def get(self, kernel_id): + km = self.kernel_manager + model = await ensure_async(km.kernel_model(kernel_id)) + self.finish(json.dumps(model, default=json_default)) + + @web.authenticated + @authorized + async def delete(self, kernel_id): + km = self.kernel_manager + await ensure_async(km.shutdown_kernel(kernel_id)) + self.set_status(204) + self.finish() + + +class KernelActionHandler(KernelsAPIHandler): + @web.authenticated + @authorized + async def post(self, kernel_id, action): + km = self.kernel_manager + if action == "interrupt": + await ensure_async(km.interrupt_kernel(kernel_id)) + self.set_status(204) + if action == "restart": + + try: + await km.restart_kernel(kernel_id) + except Exception as e: + message = "Exception restarting kernel" + self.log.error(message, exc_info=True) + traceback = format_tb(e.__traceback__) + self.write(json.dumps(dict(message=message, traceback=traceback))) + self.set_status(500) + else: + model = await ensure_async(km.kernel_model(kernel_id)) + self.write(json.dumps(model, default=json_default)) + self.finish() + + +class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): + """There is one ZMQChannelsHandler per running kernel and it oversees all + the sessions. + """ + + auth_resource = AUTH_RESOURCE + + # class-level registry of open sessions + # allows checking for conflict on session-id, + # which is used as a zmq identity and must be unique. + _open_sessions = {} + + @property + def kernel_info_timeout(self): + km_default = self.kernel_manager.kernel_info_timeout + return self.settings.get("kernel_info_timeout", km_default) + + @property + def limit_rate(self): + return self.settings.get("limit_rate", True) + + @property + def iopub_msg_rate_limit(self): + return self.settings.get("iopub_msg_rate_limit", 0) + + @property + def iopub_data_rate_limit(self): + return self.settings.get("iopub_data_rate_limit", 0) + + @property + def rate_limit_window(self): + return self.settings.get("rate_limit_window", 1.0) + + def __repr__(self): + return "%s(%s)" % ( + self.__class__.__name__, + getattr(self, "kernel_id", "uninitialized"), + ) + + def create_stream(self): + km = self.kernel_manager + identity = self.session.bsession + for channel in ("iopub", "shell", "control", "stdin"): + meth = getattr(km, "connect_" + channel) + self.channels[channel] = stream = meth(self.kernel_id, identity=identity) + stream.channel = channel + + def nudge(self): + """Nudge the zmq connections with kernel_info_requests + Returns a Future that will resolve when we have received + a shell or control reply and at least one iopub message, + ensuring that zmq subscriptions are established, + sockets are fully connected, and kernel is responsive. + Keeps retrying kernel_info_request until these are both received. + """ + kernel = self.kernel_manager.get_kernel(self.kernel_id) + + # Do not nudge busy kernels as kernel info requests sent to shell are + # queued behind execution requests. + # nudging in this case would cause a potentially very long wait + # before connections are opened, + # plus it is *very* unlikely that a busy kernel will not finish + # establishing its zmq subscriptions before processing the next request. + if getattr(kernel, "execution_state") == "busy": + self.log.debug("Nudge: not nudging busy kernel %s", self.kernel_id) + f = Future() + f.set_result(None) + return f + # Use a transient shell channel to prevent leaking + # shell responses to the front-end. + shell_channel = kernel.connect_shell() + # Use a transient control channel to prevent leaking + # control responses to the front-end. + control_channel = kernel.connect_control() + # The IOPub used by the client, whose subscriptions we are verifying. + iopub_channel = self.channels["iopub"] + + info_future = Future() + iopub_future = Future() + both_done = gen.multi([info_future, iopub_future]) + + def finish(_=None): + """Ensure all futures are resolved + which in turn triggers cleanup + """ + for f in (info_future, iopub_future): + if not f.done(): + f.set_result(None) + + def cleanup(_=None): + """Common cleanup""" + loop.remove_timeout(nudge_handle) + iopub_channel.stop_on_recv() + if not shell_channel.closed(): + shell_channel.close() + if not control_channel.closed(): + control_channel.close() + + # trigger cleanup when both message futures are resolved + both_done.add_done_callback(cleanup) + + def on_shell_reply(msg): + self.log.debug("Nudge: shell info reply received: %s", self.kernel_id) + if not info_future.done(): + self.log.debug("Nudge: resolving shell future: %s", self.kernel_id) + info_future.set_result(None) + + def on_control_reply(msg): + self.log.debug("Nudge: control info reply received: %s", self.kernel_id) + if not info_future.done(): + self.log.debug("Nudge: resolving control future: %s", self.kernel_id) + info_future.set_result(None) + + def on_iopub(msg): + self.log.debug("Nudge: IOPub received: %s", self.kernel_id) + if not iopub_future.done(): + iopub_channel.stop_on_recv() + self.log.debug("Nudge: resolving iopub future: %s", self.kernel_id) + iopub_future.set_result(None) + + iopub_channel.on_recv(on_iopub) + shell_channel.on_recv(on_shell_reply) + control_channel.on_recv(on_control_reply) + loop = IOLoop.current() + + # Nudge the kernel with kernel info requests until we get an IOPub message + def nudge(count): + count += 1 + + # NOTE: this close check appears to never be True during on_open, + # even when the peer has closed the connection + if self.ws_connection is None or self.ws_connection.is_closing(): + self.log.debug("Nudge: cancelling on closed websocket: %s", self.kernel_id) + finish() + return + + # check for stopped kernel + if self.kernel_id not in self.kernel_manager: + self.log.debug("Nudge: cancelling on stopped kernel: %s", self.kernel_id) + finish() + return + + # check for closed zmq socket + if shell_channel.closed(): + self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) + finish() + return + + # check for closed zmq socket + if control_channel.closed(): + self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) + finish() + return + + if not both_done.done(): + log = self.log.warning if count % 10 == 0 else self.log.debug + log("Nudge: attempt %s on kernel %s" % (count, self.kernel_id)) + self.session.send(shell_channel, "kernel_info_request") + self.session.send(control_channel, "kernel_info_request") + nonlocal nudge_handle + nudge_handle = loop.call_later(0.5, nudge, count) + + nudge_handle = loop.call_later(0, nudge, count=0) + + # resolve with a timeout if we get no response + future = gen.with_timeout(loop.time() + self.kernel_info_timeout, both_done) + # ensure we have no dangling resources or unresolved Futures in case of timeout + future.add_done_callback(finish) + return future + + def request_kernel_info(self): + """send a request for kernel_info""" + km = self.kernel_manager + kernel = km.get_kernel(self.kernel_id) + try: + # check for previous request + future = kernel._kernel_info_future + except AttributeError: + self.log.debug("Requesting kernel info from %s", self.kernel_id) + # Create a kernel_info channel to query the kernel protocol version. + # This channel will be closed after the kernel_info reply is received. + if self.kernel_info_channel is None: + self.kernel_info_channel = km.connect_shell(self.kernel_id) + self.kernel_info_channel.on_recv(self._handle_kernel_info_reply) + self.session.send(self.kernel_info_channel, "kernel_info_request") + # store the future on the kernel, so only one request is sent + kernel._kernel_info_future = self._kernel_info_future + else: + if not future.done(): + self.log.debug("Waiting for pending kernel_info request") + future.add_done_callback(lambda f: self._finish_kernel_info(f.result())) + return self._kernel_info_future + + def _handle_kernel_info_reply(self, msg): + """process the kernel_info_reply + + enabling msg spec adaptation, if necessary + """ + idents, msg = self.session.feed_identities(msg) + try: + msg = self.session.deserialize(msg) + except: + self.log.error("Bad kernel_info reply", exc_info=True) + self._kernel_info_future.set_result({}) + return + else: + info = msg["content"] + self.log.debug("Received kernel info: %s", info) + if msg["msg_type"] != "kernel_info_reply" or "protocol_version" not in info: + self.log.error("Kernel info request failed, assuming current %s", info) + info = {} + self._finish_kernel_info(info) + + # close the kernel_info channel, we don't need it anymore + if self.kernel_info_channel: + self.kernel_info_channel.close() + self.kernel_info_channel = None + + def _finish_kernel_info(self, info): + """Finish handling kernel_info reply + + Set up protocol adaptation, if needed, + and signal that connection can continue. + """ + protocol_version = info.get("protocol_version", client_protocol_version) + if protocol_version != client_protocol_version: + self.session.adapt_version = int(protocol_version.split(".")[0]) + self.log.info( + "Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format( + protocol_version=protocol_version, + kernel_id=self.kernel_id, + client_protocol_version=client_protocol_version, + ) + ) + if not self._kernel_info_future.done(): + self._kernel_info_future.set_result(info) + + def initialize(self): + super(ZMQChannelsHandler, self).initialize() + self.zmq_stream = None + self.channels = {} + self.kernel_id = None + self.kernel_info_channel = None + self._kernel_info_future = Future() + self._close_future = Future() + self.session_key = "" + + # Rate limiting code + self._iopub_window_msg_count = 0 + self._iopub_window_byte_count = 0 + self._iopub_msgs_exceeded = False + self._iopub_data_exceeded = False + # Queue of (time stamp, byte count) + # Allows you to specify that the byte count should be lowered + # by a delta amount at some point in the future. + self._iopub_window_byte_queue = [] + + async def pre_get(self): + # authenticate first + super(ZMQChannelsHandler, self).pre_get() + # check session collision: + await self._register_session() + # then request kernel info, waiting up to a certain time before giving up. + # We don't want to wait forever, because browsers don't take it well when + # servers never respond to websocket connection requests. + kernel = self.kernel_manager.get_kernel(self.kernel_id) + + if hasattr(kernel, "ready"): + try: + await kernel.ready + except Exception as e: + kernel.execution_state = "dead" + kernel.reason = str(e) + raise web.HTTPError(500, str(e)) from e + + self.session.key = kernel.session.key + future = self.request_kernel_info() + + def give_up(): + """Don't wait forever for the kernel to reply""" + if future.done(): + return + self.log.warning("Timeout waiting for kernel_info reply from %s", self.kernel_id) + future.set_result({}) + + loop = IOLoop.current() + loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) + # actually wait for it + await future + + async def get(self, kernel_id): + self.kernel_id = cast_unicode(kernel_id, "ascii") + await super(ZMQChannelsHandler, self).get(kernel_id=kernel_id) + + async def _register_session(self): + """Ensure we aren't creating a duplicate session. + + If a previous identical session is still open, close it to avoid collisions. + This is likely due to a client reconnecting from a lost network connection, + where the socket on our side has not been cleaned up yet. + """ + self.session_key = "%s:%s" % (self.kernel_id, self.session.session) + stale_handler = self._open_sessions.get(self.session_key) + if stale_handler: + self.log.warning("Replacing stale connection: %s", self.session_key) + await stale_handler.close() + if ( + self.kernel_id in self.kernel_manager + ): # only update open sessions if kernel is actively managed + self._open_sessions[self.session_key] = self + + def open(self, kernel_id): + super(ZMQChannelsHandler, self).open() + km = self.kernel_manager + km.notify_connect(kernel_id) + + # on new connections, flush the message buffer + buffer_info = km.get_buffer(kernel_id, self.session_key) + if buffer_info and buffer_info["session_key"] == self.session_key: + self.log.info("Restoring connection for %s", self.session_key) + if km.ports_changed(kernel_id): + # If the kernel's ports have changed (some restarts trigger this) + # then reset the channels so nudge() is using the correct iopub channel + self.create_stream() + else: + # The kernel's ports have not changed; use the channels captured in the buffer + self.channels = buffer_info["channels"] + + connected = self.nudge() + + def replay(value): + replay_buffer = buffer_info["buffer"] + if replay_buffer: + self.log.info("Replaying %s buffered messages", len(replay_buffer)) + for channel, msg_list in replay_buffer: + stream = self.channels[channel] + self._on_zmq_reply(stream, msg_list) + + connected.add_done_callback(replay) + else: + try: + self.create_stream() + connected = self.nudge() + except web.HTTPError as e: + # Do not log error if the kernel is already shutdown, + # as it's normal that it's not responding + try: + self.kernel_manager.get_kernel(kernel_id) + + self.log.error("Error opening stream: %s", e) + except KeyError: + pass + # WebSockets don't respond to traditional error codes so we + # close the connection. + for channel, stream in self.channels.items(): + if not stream.closed(): + stream.close() + self.close() + return + + km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) + km.add_restart_callback(self.kernel_id, self.on_restart_failed, "dead") + + def subscribe(value): + for channel, stream in self.channels.items(): + stream.on_recv_stream(self._on_zmq_reply) + + connected.add_done_callback(subscribe) + + return connected + + def on_message(self, ws_msg): + if not self.channels: + # already closed, ignore the message + self.log.debug("Received message on closed websocket %r", ws_msg) + return + + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + channel, msg_list = deserialize_msg_from_ws_v1(ws_msg) + msg = { + "header": None, + } + else: + if isinstance(ws_msg, bytes): + msg = deserialize_binary_message(ws_msg) + else: + msg = json.loads(ws_msg) + msg_list = [] + channel = msg.pop("channel", None) + + if channel is None: + self.log.warning("No channel specified, assuming shell: %s", msg) + channel = "shell" + if channel not in self.channels: + self.log.warning("No such channel: %r", channel) + return + am = self.kernel_manager.allowed_message_types + ignore_msg = False + if am: + msg["header"] = self.get_part("header", msg["header"], msg_list) + if msg["header"]["msg_type"] not in am: + self.log.warning( + 'Received message of type "%s", which is not allowed. Ignoring.' + % msg["header"]["msg_type"] + ) + ignore_msg = True + if not ignore_msg: + stream = self.channels[channel] + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + self.session.send_raw(stream, msg_list) + else: + self.session.send(stream, msg) + + def get_part(self, field, value, msg_list): + if value is None: + field2idx = { + "header": 0, + "parent_header": 1, + "content": 3, + } + value = self.session.unpack(msg_list[field2idx[field]]) + return value + + def _on_zmq_reply(self, stream, msg_list): + idents, fed_msg_list = self.session.feed_identities(msg_list) + + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + msg = {"header": None, "parent_header": None, "content": None} + else: + msg = self.session.deserialize(fed_msg_list) + + channel = getattr(stream, "channel", None) + parts = fed_msg_list[1:] + + self._on_error(channel, msg, parts) + + if self._limit_rate(channel, msg, parts): + return + + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + super(ZMQChannelsHandler, self)._on_zmq_reply(stream, parts) + else: + super(ZMQChannelsHandler, self)._on_zmq_reply(stream, msg) + + def write_stderr(self, error_message, parent_header): + self.log.warning(error_message) + err_msg = self.session.msg( + "stream", + content={"text": error_message + "\n", "name": "stderr"}, + parent=parent_header, + ) + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + bin_msg = serialize_msg_to_ws_v1(err_msg, "iopub", self.session.pack) + self.write_message(bin_msg, binary=True) + else: + err_msg["channel"] = "iopub" + self.write_message(json.dumps(err_msg, default=json_default)) + + def _limit_rate(self, channel, msg, msg_list): + if not (self.limit_rate and channel == "iopub"): + return False + + msg["header"] = self.get_part("header", msg["header"], msg_list) + + msg_type = msg["header"]["msg_type"] + if msg_type == "status": + msg["content"] = self.get_part("content", msg["content"], msg_list) + if msg["content"].get("execution_state") == "idle": + # reset rate limit counter on status=idle, + # to avoid 'Run All' hitting limits prematurely. + self._iopub_window_byte_queue = [] + self._iopub_window_msg_count = 0 + self._iopub_window_byte_count = 0 + self._iopub_msgs_exceeded = False + self._iopub_data_exceeded = False + + if msg_type not in {"status", "comm_open", "execute_input"}: + # Remove the counts queued for removal. + now = IOLoop.current().time() + while len(self._iopub_window_byte_queue) > 0: + queued = self._iopub_window_byte_queue[0] + if now >= queued[0]: + self._iopub_window_byte_count -= queued[1] + self._iopub_window_msg_count -= 1 + del self._iopub_window_byte_queue[0] + else: + # This part of the queue hasn't be reached yet, so we can + # abort the loop. + break + + # Increment the bytes and message count + self._iopub_window_msg_count += 1 + if msg_type == "stream": + byte_count = sum([len(x) for x in msg_list]) + else: + byte_count = 0 + self._iopub_window_byte_count += byte_count + + # Queue a removal of the byte and message count for a time in the + # future, when we are no longer interested in it. + self._iopub_window_byte_queue.append((now + self.rate_limit_window, byte_count)) + + # Check the limits, set the limit flags, and reset the + # message and data counts. + msg_rate = float(self._iopub_window_msg_count) / self.rate_limit_window + data_rate = float(self._iopub_window_byte_count) / self.rate_limit_window + + # Check the msg rate + if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: + if not self._iopub_msgs_exceeded: + self._iopub_msgs_exceeded = True + msg["parent_header"] = self.get_part( + "parent_header", msg["parent_header"], msg_list + ) + self.write_stderr( + dedent( + """\ + IOPub message rate exceeded. + The Jupyter server will temporarily stop sending output + to the client in order to avoid crashing it. + To change this limit, set the config variable + `--ServerApp.iopub_msg_rate_limit`. + + Current values: + ServerApp.iopub_msg_rate_limit={} (msgs/sec) + ServerApp.rate_limit_window={} (secs) + """.format( + self.iopub_msg_rate_limit, self.rate_limit_window + ) + ), + msg["parent_header"], + ) + else: + # resume once we've got some headroom below the limit + if self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): + self._iopub_msgs_exceeded = False + if not self._iopub_data_exceeded: + self.log.warning("iopub messages resumed") + + # Check the data rate + if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: + if not self._iopub_data_exceeded: + self._iopub_data_exceeded = True + msg["parent_header"] = self.get_part( + "parent_header", msg["parent_header"], msg_list + ) + self.write_stderr( + dedent( + """\ + IOPub data rate exceeded. + The Jupyter server will temporarily stop sending output + to the client in order to avoid crashing it. + To change this limit, set the config variable + `--ServerApp.iopub_data_rate_limit`. + + Current values: + ServerApp.iopub_data_rate_limit={} (bytes/sec) + ServerApp.rate_limit_window={} (secs) + """.format( + self.iopub_data_rate_limit, self.rate_limit_window + ) + ), + msg["parent_header"], + ) + else: + # resume once we've got some headroom below the limit + if self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): + self._iopub_data_exceeded = False + if not self._iopub_msgs_exceeded: + self.log.warning("iopub messages resumed") + + # If either of the limit flags are set, do not send the message. + if self._iopub_msgs_exceeded or self._iopub_data_exceeded: + # we didn't send it, remove the current message from the calculus + self._iopub_window_msg_count -= 1 + self._iopub_window_byte_count -= byte_count + self._iopub_window_byte_queue.pop(-1) + return True + + return False + + def close(self): + super(ZMQChannelsHandler, self).close() + return self._close_future + + def on_close(self): + self.log.debug("Websocket closed %s", self.session_key) + # unregister myself as an open session (only if it's really me) + if self._open_sessions.get(self.session_key) is self: + self._open_sessions.pop(self.session_key) + + km = self.kernel_manager + if self.kernel_id in km: + km.notify_disconnect(self.kernel_id) + km.remove_restart_callback( + self.kernel_id, + self.on_kernel_restarted, + ) + km.remove_restart_callback( + self.kernel_id, + self.on_restart_failed, + "dead", + ) + + # start buffering instead of closing if this was the last connection + if km._kernel_connections[self.kernel_id] == 0: + km.start_buffering(self.kernel_id, self.session_key, self.channels) + self._close_future.set_result(None) + return + + # This method can be called twice, once by self.kernel_died and once + # from the WebSocket close event. If the WebSocket connection is + # closed before the ZMQ streams are setup, they could be None. + for channel, stream in self.channels.items(): + if stream is not None and not stream.closed(): + stream.on_recv(None) + stream.close() + + self.channels = {} + self._close_future.set_result(None) + + def _send_status_message(self, status): + iopub = self.channels.get("iopub", None) + if iopub and not iopub.closed(): + # flush IOPub before sending a restarting/dead status message + # ensures proper ordering on the IOPub channel + # that all messages from the stopped kernel have been delivered + iopub.flush() + msg = self.session.msg("status", {"execution_state": status}) + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + bin_msg = serialize_msg_to_ws_v1(msg, "iopub", self.session.pack) + self.write_message(bin_msg, binary=True) + else: + msg["channel"] = "iopub" + self.write_message(json.dumps(msg, default=json_default)) + + def on_kernel_restarted(self): + self.log.warning("kernel %s restarted", self.kernel_id) + self._send_status_message("restarting") + + def on_restart_failed(self): + self.log.error("kernel %s restarted failed!", self.kernel_id) + self._send_status_message("dead") + + def _on_error(self, channel, msg, msg_list): + if self.kernel_manager.allow_tracebacks: + return + + if channel == "iopub": + msg["header"] = self.get_part("header", msg["header"], msg_list) + if msg["header"]["msg_type"] == "error": + msg["content"] = self.get_part("content", msg["content"], msg_list) + msg["content"]["ename"] = "ExecutionError" + msg["content"]["evalue"] = "Execution error" + msg["content"]["traceback"] = [self.kernel_manager.traceback_replacement_message] + if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": + msg_list[3] = self.session.pack(msg["content"]) + + +# ----------------------------------------------------------------------------- +# URL to handler mappings +# ----------------------------------------------------------------------------- + + +_kernel_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" +_kernel_action_regex = r"(?Prestart|interrupt)" + +default_handlers = [ + (r"/api/kernels", MainKernelHandler), + (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), + ( + r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), + KernelActionHandler, + ), + (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler), +] diff --git a/server/jupyter_server/services/kernels/kernelmanager.py b/server/jupyter_server/services/kernels/kernelmanager.py new file mode 100644 index 0000000..8f8fff7 --- /dev/null +++ b/server/jupyter_server/services/kernels/kernelmanager.py @@ -0,0 +1,659 @@ +"""A MultiKernelManager for use in the Jupyter server + +- raises HTTPErrors +- creates REST API models +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import asyncio +import os +from collections import defaultdict +from datetime import datetime +from datetime import timedelta +from functools import partial + +from jupyter_client.multikernelmanager import AsyncMultiKernelManager +from jupyter_client.multikernelmanager import MultiKernelManager +from jupyter_client.session import Session +from jupyter_core.paths import exists +from tornado import web +from tornado.concurrent import Future +from tornado.ioloop import IOLoop +from tornado.ioloop import PeriodicCallback +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import Float +from traitlets import Instance +from traitlets import Integer +from traitlets import List +from traitlets import TraitError +from traitlets import Unicode +from traitlets import validate + +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcnow +from jupyter_server.prometheus.metrics import KERNEL_CURRENTLY_RUNNING_TOTAL +from jupyter_server.utils import ensure_async +from jupyter_server.utils import to_os_path + + +class MappingKernelManager(MultiKernelManager): + """A KernelManager that handles + - File mapping + - HTTP error handling + - Kernel message filtering + """ + + @default("kernel_manager_class") + def _default_kernel_manager_class(self): + return "jupyter_client.ioloop.IOLoopKernelManager" + + kernel_argv = List(Unicode()) + + root_dir = Unicode(config=True) + + _kernel_connections = Dict() + + _kernel_ports = Dict() + + _culler_callback = None + + _initialized_culler = False + + @default("root_dir") + def _default_root_dir(self): + try: + return self.parent.root_dir + except AttributeError: + return os.getcwd() + + @validate("root_dir") + def _update_root_dir(self, proposal): + """Do a bit of validation of the root dir.""" + value = proposal["value"] + if not os.path.isabs(value): + # If we receive a non-absolute path, make it absolute. + value = os.path.abspath(value) + if not exists(value) or not os.path.isdir(value): + raise TraitError("kernel root dir %r is not a directory" % value) + return value + + cull_idle_timeout = Integer( + 0, + config=True, + help="""Timeout (in seconds) after which a kernel is considered idle and ready to be culled. + Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled + for users with poor network connections.""", + ) + + cull_interval_default = 300 # 5 minutes + cull_interval = Integer( + cull_interval_default, + config=True, + help="""The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value.""", + ) + + cull_connected = Bool( + False, + config=True, + help="""Whether to consider culling kernels which have one or more connections. + Only effective if cull_idle_timeout > 0.""", + ) + + cull_busy = Bool( + False, + config=True, + help="""Whether to consider culling kernels which are busy. + Only effective if cull_idle_timeout > 0.""", + ) + + buffer_offline_messages = Bool( + True, + config=True, + help="""Whether messages from kernels whose frontends have disconnected should be buffered in-memory. + + When True (default), messages are buffered and replayed on reconnect, + avoiding lost messages due to interrupted connectivity. + + Disable if long-running kernels will produce too much output while + no frontends are connected. + """, + ) + + kernel_info_timeout = Float( + 60, + config=True, + help="""Timeout for giving up on a kernel (in seconds). + + On starting and restarting kernels, we check whether the + kernel is running and responsive by sending kernel_info_requests. + This sets the timeout in seconds for how long the kernel can take + before being presumed dead. + This affects the MappingKernelManager (which handles kernel restarts) + and the ZMQChannelsHandler (which handles the startup). + """, + ) + + _kernel_buffers = Any() + + @default("_kernel_buffers") + def _default_kernel_buffers(self): + return defaultdict(lambda: {"buffer": [], "session_key": "", "channels": {}}) + + last_kernel_activity = Instance( + datetime, + help="The last activity on any kernel, including shutting down a kernel", + ) + + def __init__(self, **kwargs): + self.pinned_superclass = MultiKernelManager + self.pinned_superclass.__init__(self, **kwargs) + self.last_kernel_activity = utcnow() + + allowed_message_types = List( + trait=Unicode(), + config=True, + help="""White list of allowed kernel message types. + When the list is empty, all message types are allowed. + """, + ) + + allow_tracebacks = Bool( + True, config=True, help=("Whether to send tracebacks to clients on exceptions.") + ) + + traceback_replacement_message = Unicode( + "An exception occurred at runtime, which is not shown due to security reasons.", + config=True, + help=("Message to print when allow_tracebacks is False, and an exception occurs"), + ) + + # ------------------------------------------------------------------------- + # Methods for managing kernels and sessions + # ------------------------------------------------------------------------- + + def _handle_kernel_died(self, kernel_id): + """notice that a kernel died""" + self.log.warning("Kernel %s died, removing from map.", kernel_id) + self.remove_kernel(kernel_id) + + def cwd_for_path(self, path): + """Turn API path into absolute OS path.""" + os_path = to_os_path(path, self.root_dir) + # in the case of documents and kernels not being on the same filesystem, + # walk up to root_dir if the paths don't exist + while not os.path.isdir(os_path) and os_path != self.root_dir: + os_path = os.path.dirname(os_path) + return os_path + + async def start_kernel(self, kernel_id=None, path=None, **kwargs): + """Start a kernel for a session and return its kernel_id. + + Parameters + ---------- + kernel_id : uuid + The uuid to associate the new kernel with. If this + is not None, this kernel will be persistent whenever it is + requested. + path : API path + The API path (unicode, '/' delimited) for the cwd. + Will be transformed to an OS path relative to root_dir. + kernel_name : str + The name identifying which kernel spec to launch. This is ignored if + an existing kernel is returned, but it may be checked in the future. + """ + if kernel_id is None or kernel_id not in self: + if path is not None: + kwargs["cwd"] = self.cwd_for_path(path) + if kernel_id is not None: + kwargs["kernel_id"] = kernel_id + kernel_id = await ensure_async(self.pinned_superclass.start_kernel(self, **kwargs)) + self._kernel_connections[kernel_id] = 0 + fut = asyncio.ensure_future(self._finish_kernel_start(kernel_id)) + if not getattr(self, "use_pending_kernels", None): + await fut + # add busy/activity markers: + kernel = self.get_kernel(kernel_id) + kernel.execution_state = "starting" + kernel.reason = "" + kernel.last_activity = utcnow() + self.log.info("Kernel started: %s" % kernel_id) + self.log.debug("Kernel args: %r" % kwargs) + + # Increase the metric of number of kernels running + # for the relevant kernel type by 1 + KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).inc() + + else: + self.log.info("Using existing kernel: %s" % kernel_id) + + # Initialize culling if not already + if not self._initialized_culler: + self.initialize_culler() + + return kernel_id + + async def _finish_kernel_start(self, kernel_id): + km = self.get_kernel(kernel_id) + if hasattr(km, "ready"): + try: + await km.ready + except Exception: + self.log.exception(km.ready.exception()) + return + + self._kernel_ports[kernel_id] = km.ports + self.start_watching_activity(kernel_id) + # register callback for failed auto-restart + self.add_restart_callback( + kernel_id, + lambda: self._handle_kernel_died(kernel_id), + "dead", + ) + + def ports_changed(self, kernel_id): + """Used by ZMQChannelsHandler to determine how to coordinate nudge and replays. + + Ports are captured when starting a kernel (via MappingKernelManager). Ports + are considered changed (following restarts) if the referenced KernelManager + is using a set of ports different from those captured at startup. If changes + are detected, the captured set is updated and a value of True is returned. + + NOTE: Use is exclusive to ZMQChannelsHandler because this object is a singleton + instance while ZMQChannelsHandler instances are per WebSocket connection that + can vary per kernel lifetime. + """ + changed_ports = self._get_changed_ports(kernel_id) + if changed_ports: + # If changed, update captured ports and return True, else return False. + self.log.debug(f"Port change detected for kernel: {kernel_id}") + self._kernel_ports[kernel_id] = changed_ports + return True + return False + + def _get_changed_ports(self, kernel_id): + """Internal method to test if a kernel's ports have changed and, if so, return their values. + + This method does NOT update the captured ports for the kernel as that can only be done + by ZMQChannelsHandler, but instead returns the new list of ports if they are different + than those captured at startup. This enables the ability to conditionally restart + activity monitoring immediately following a kernel's restart (if ports have changed). + """ + # Get current ports and return comparison with ports captured at startup. + km = self.get_kernel(kernel_id) + if km.ports != self._kernel_ports[kernel_id]: + return km.ports + return None + + def start_buffering(self, kernel_id, session_key, channels): + """Start buffering messages for a kernel + + Parameters + ---------- + kernel_id : str + The id of the kernel to stop buffering. + session_key : str + The session_key, if any, that should get the buffer. + If the session_key matches the current buffered session_key, + the buffer will be returned. + channels : dict({'channel': ZMQStream}) + The zmq channels whose messages should be buffered. + """ + + if not self.buffer_offline_messages: + for channel, stream in channels.items(): + stream.close() + return + + self.log.info("Starting buffering for %s", session_key) + self._check_kernel_id(kernel_id) + # clear previous buffering state + self.stop_buffering(kernel_id) + buffer_info = self._kernel_buffers[kernel_id] + # record the session key because only one session can buffer + buffer_info["session_key"] = session_key + # TODO: the buffer should likely be a memory bounded queue, we're starting with a list to keep it simple + buffer_info["buffer"] = [] + buffer_info["channels"] = channels + + # forward any future messages to the internal buffer + def buffer_msg(channel, msg_parts): + self.log.debug("Buffering msg on %s:%s", kernel_id, channel) + buffer_info["buffer"].append((channel, msg_parts)) + + for channel, stream in channels.items(): + stream.on_recv(partial(buffer_msg, channel)) + + def get_buffer(self, kernel_id, session_key): + """Get the buffer for a given kernel + + Parameters + ---------- + kernel_id : str + The id of the kernel to stop buffering. + session_key : str, optional + The session_key, if any, that should get the buffer. + If the session_key matches the current buffered session_key, + the buffer will be returned. + """ + self.log.debug("Getting buffer for %s", kernel_id) + if kernel_id not in self._kernel_buffers: + return + + buffer_info = self._kernel_buffers[kernel_id] + if buffer_info["session_key"] == session_key: + # remove buffer + self._kernel_buffers.pop(kernel_id) + # only return buffer_info if it's a match + return buffer_info + else: + self.stop_buffering(kernel_id) + + def stop_buffering(self, kernel_id): + """Stop buffering kernel messages + + Parameters + ---------- + kernel_id : str + The id of the kernel to stop buffering. + """ + self.log.debug("Clearing buffer for %s", kernel_id) + self._check_kernel_id(kernel_id) + + if kernel_id not in self._kernel_buffers: + return + buffer_info = self._kernel_buffers.pop(kernel_id) + # close buffering streams + for stream in buffer_info["channels"].values(): + if not stream.closed(): + stream.on_recv(None) + stream.close() + + msg_buffer = buffer_info["buffer"] + if msg_buffer: + self.log.info( + "Discarding %s buffered messages for %s", + len(msg_buffer), + buffer_info["session_key"], + ) + + def shutdown_kernel(self, kernel_id, now=False, restart=False): + """Shutdown a kernel by kernel_id""" + self._check_kernel_id(kernel_id) + self.stop_watching_activity(kernel_id) + self.stop_buffering(kernel_id) + self._kernel_connections.pop(kernel_id, None) + + # Decrease the metric of number of kernels + # running for the relevant kernel type by 1 + KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() + + self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + # Unlike its async sibling method in AsyncMappingKernelManager, removing the kernel_id + # from the connections dictionary isn't as problematic before the shutdown since the + # method is synchronous. However, we'll keep the relative call orders the same from + # a maintenance perspective. + self._kernel_connections.pop(kernel_id, None) + self._kernel_ports.pop(kernel_id, None) + + async def restart_kernel(self, kernel_id, now=False): + """Restart a kernel by kernel_id""" + self._check_kernel_id(kernel_id) + await ensure_async(self.pinned_superclass.restart_kernel(self, kernel_id, now=now)) + kernel = self.get_kernel(kernel_id) + # return a Future that will resolve when the kernel has successfully restarted + channel = kernel.connect_shell() + future = Future() + + def finish(): + """Common cleanup when restart finishes/fails for any reason.""" + if not channel.closed(): + channel.close() + loop.remove_timeout(timeout) + kernel.remove_restart_callback(on_restart_failed, "dead") + + def on_reply(msg): + self.log.debug("Kernel info reply received: %s", kernel_id) + finish() + if not future.done(): + future.set_result(msg) + + def on_timeout(): + self.log.warning("Timeout waiting for kernel_info_reply: %s", kernel_id) + finish() + if not future.done(): + future.set_exception(TimeoutError("Timeout waiting for restart")) + + def on_restart_failed(): + self.log.warning("Restarting kernel failed: %s", kernel_id) + finish() + if not future.done(): + future.set_exception(RuntimeError("Restart failed")) + + kernel.add_restart_callback(on_restart_failed, "dead") + kernel.session.send(channel, "kernel_info_request") + channel.on_recv(on_reply) + loop = IOLoop.current() + timeout = loop.add_timeout(loop.time() + self.kernel_info_timeout, on_timeout) + # Re-establish activity watching if ports have changed... + if self._get_changed_ports(kernel_id) is not None: + self.stop_watching_activity(kernel_id) + self.start_watching_activity(kernel_id) + return future + + def notify_connect(self, kernel_id): + """Notice a new connection to a kernel""" + if kernel_id in self._kernel_connections: + self._kernel_connections[kernel_id] += 1 + + def notify_disconnect(self, kernel_id): + """Notice a disconnection from a kernel""" + if kernel_id in self._kernel_connections: + self._kernel_connections[kernel_id] -= 1 + + def kernel_model(self, kernel_id): + """Return a JSON-safe dict representing a kernel + + For use in representing kernels in the JSON APIs. + """ + self._check_kernel_id(kernel_id) + kernel = self._kernels[kernel_id] + + model = { + "id": kernel_id, + "name": kernel.kernel_name, + "last_activity": isoformat(kernel.last_activity), + "execution_state": kernel.execution_state, + "connections": self._kernel_connections.get(kernel_id, 0), + } + if getattr(kernel, "reason", None): + model["reason"] = kernel.reason + return model + + def list_kernels(self): + """Returns a list of kernel_id's of kernels running.""" + kernels = [] + kernel_ids = self.pinned_superclass.list_kernel_ids(self) + for kernel_id in kernel_ids: + try: + model = self.kernel_model(kernel_id) + kernels.append(model) + except (web.HTTPError, KeyError): + pass # Probably due to a (now) non-existent kernel, continue building the list + return kernels + + # override _check_kernel_id to raise 404 instead of KeyError + def _check_kernel_id(self, kernel_id): + """Check a that a kernel_id exists and raise 404 if not.""" + if kernel_id not in self: + raise web.HTTPError(404, "Kernel does not exist: %s" % kernel_id) + + # monitoring activity: + + def start_watching_activity(self, kernel_id): + """Start watching IOPub messages on a kernel for activity. + + - update last_activity on every message + - record execution_state from status messages + """ + kernel = self._kernels[kernel_id] + # add busy/activity markers: + kernel.execution_state = "starting" + kernel.reason = "" + kernel.last_activity = utcnow() + kernel._activity_stream = kernel.connect_iopub() + session = Session( + config=kernel.session.config, + key=kernel.session.key, + ) + + def record_activity(msg_list): + """Record an IOPub message arriving from a kernel""" + self.last_kernel_activity = kernel.last_activity = utcnow() + + idents, fed_msg_list = session.feed_identities(msg_list) + msg = session.deserialize(fed_msg_list) + + msg_type = msg["header"]["msg_type"] + if msg_type == "status": + kernel.execution_state = msg["content"]["execution_state"] + self.log.debug( + "activity on %s: %s (%s)", + kernel_id, + msg_type, + kernel.execution_state, + ) + else: + self.log.debug("activity on %s: %s", kernel_id, msg_type) + + kernel._activity_stream.on_recv(record_activity) + + def stop_watching_activity(self, kernel_id): + """Stop watching IOPub messages on a kernel for activity.""" + kernel = self._kernels[kernel_id] + if getattr(kernel, "_activity_stream", None): + kernel._activity_stream.close() + kernel._activity_stream = None + + def initialize_culler(self): + """Start idle culler if 'cull_idle_timeout' is greater than zero. + + Regardless of that value, set flag that we've been here. + """ + if not self._initialized_culler and self.cull_idle_timeout > 0: + if self._culler_callback is None: + loop = IOLoop.current() + if self.cull_interval <= 0: # handle case where user set invalid value + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", + self.cull_interval, + self.cull_interval_default, + ) + self.cull_interval = self.cull_interval_default + self._culler_callback = PeriodicCallback( + self.cull_kernels, 1000 * self.cull_interval + ) + self.log.info( + "Culling kernels with idle durations > %s seconds at %s second intervals ...", + self.cull_idle_timeout, + self.cull_interval, + ) + if self.cull_busy: + self.log.info("Culling kernels even if busy") + if self.cull_connected: + self.log.info("Culling kernels even with connected clients") + self._culler_callback.start() + + self._initialized_culler = True + + async def cull_kernels(self): + self.log.debug( + "Polling every %s seconds for kernels idle > %s seconds...", + self.cull_interval, + self.cull_idle_timeout, + ) + """Create a separate list of kernels to avoid conflicting updates while iterating""" + for kernel_id in list(self._kernels): + try: + await self.cull_kernel_if_idle(kernel_id) + except Exception as e: + self.log.exception( + "The following exception was encountered while checking the idle duration of kernel %s: %s", + kernel_id, + e, + ) + + async def cull_kernel_if_idle(self, kernel_id): + kernel = self._kernels[kernel_id] + + if getattr(kernel, "execution_state") == "dead": + self.log.warning( + "Culling '%s' dead kernel '%s' (%s).", + kernel.execution_state, + kernel.kernel_name, + kernel_id, + ) + await ensure_async(self.shutdown_kernel(kernel_id)) + return + + if hasattr( + kernel, "last_activity" + ): # last_activity is monkey-patched, so ensure that has occurred + self.log.debug( + "kernel_id=%s, kernel_name=%s, last_activity=%s", + kernel_id, + kernel.kernel_name, + kernel.last_activity, + ) + dt_now = utcnow() + dt_idle = dt_now - kernel.last_activity + # Compute idle properties + is_idle_time = dt_idle > timedelta(seconds=self.cull_idle_timeout) + is_idle_execute = self.cull_busy or (kernel.execution_state != "busy") + connections = self._kernel_connections.get(kernel_id, 0) + is_idle_connected = self.cull_connected or not connections + # Cull the kernel if all three criteria are met + if is_idle_time and is_idle_execute and is_idle_connected: + idle_duration = int(dt_idle.total_seconds()) + self.log.warning( + "Culling '%s' kernel '%s' (%s) with %d connections due to %s seconds of inactivity.", + kernel.execution_state, + kernel.kernel_name, + kernel_id, + connections, + idle_duration, + ) + await ensure_async(self.shutdown_kernel(kernel_id)) + + +# AsyncMappingKernelManager inherits as much as possible from MappingKernelManager, +# overriding only what is different. +class AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager): + @default("kernel_manager_class") + def _default_kernel_manager_class(self): + return "jupyter_client.ioloop.AsyncIOLoopKernelManager" + + def __init__(self, **kwargs): + self.pinned_superclass = AsyncMultiKernelManager + self.pinned_superclass.__init__(self, **kwargs) + self.last_kernel_activity = utcnow() + + async def shutdown_kernel(self, kernel_id, now=False, restart=False): + """Shutdown a kernel by kernel_id""" + self._check_kernel_id(kernel_id) + self.stop_watching_activity(kernel_id) + self.stop_buffering(kernel_id) + + # Decrease the metric of number of kernels + # running for the relevant kernel type by 1 + KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() + + # Finish shutting down the kernel before clearing state to avoid a race condition. + ret = await self.pinned_superclass.shutdown_kernel( + self, kernel_id, now=now, restart=restart + ) + self._kernel_connections.pop(kernel_id, None) + self._kernel_ports.pop(kernel_id, None) + return ret diff --git a/server/jupyter_server/services/kernelspecs/__init__.py b/server/jupyter_server/services/kernelspecs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/kernelspecs/handlers.py b/server/jupyter_server/services/kernelspecs/handlers.py new file mode 100644 index 0000000..89bb4e4 --- /dev/null +++ b/server/jupyter_server/services/kernelspecs/handlers.py @@ -0,0 +1,109 @@ +"""Tornado handlers for kernel specifications. + +Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-25%3A-Registry-of-installed-kernels#rest-api +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import glob +import json +import os + +pjoin = os.path.join + +from tornado import web + +from ...base.handlers import APIHandler +from ...utils import ensure_async, url_path_join, url_unescape +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "kernelspecs" + + +def kernelspec_model(handler, name, spec_dict, resource_dir): + """Load a KernelSpec by name and return the REST API model""" + d = {"name": name, "spec": spec_dict, "resources": {}} + + # Add resource files if they exist + resource_dir = resource_dir + for resource in ["kernel.js", "kernel.css"]: + if os.path.exists(pjoin(resource_dir, resource)): + d["resources"][resource] = url_path_join( + handler.base_url, "kernelspecs", name, resource + ) + for logo_file in glob.glob(pjoin(resource_dir, "logo-*")): + fname = os.path.basename(logo_file) + no_ext, _ = os.path.splitext(fname) + d["resources"][no_ext] = url_path_join(handler.base_url, "kernelspecs", name, fname) + return d + + +def is_kernelspec_model(spec_dict): + """Returns True if spec_dict is already in proper form. This will occur when using a gateway.""" + return ( + isinstance(spec_dict, dict) + and "name" in spec_dict + and "spec" in spec_dict + and "resources" in spec_dict + ) + + +class KernelSpecsAPIHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + +class MainKernelSpecHandler(KernelSpecsAPIHandler): + @web.authenticated + @authorized + async def get(self): + ksm = self.kernel_spec_manager + km = self.kernel_manager + model = {} + model["default"] = km.default_kernel_name + model["kernelspecs"] = specs = {} + kspecs = await ensure_async(ksm.get_all_specs()) + for kernel_name, kernel_info in kspecs.items(): + try: + if is_kernelspec_model(kernel_info): + d = kernel_info + else: + d = kernelspec_model( + self, + kernel_name, + kernel_info["spec"], + kernel_info["resource_dir"], + ) + except Exception: + self.log.error("Failed to load kernel spec: '%s'", kernel_name, exc_info=True) + continue + specs[kernel_name] = d + self.set_header("Content-Type", "application/json") + self.finish(json.dumps(model)) + + +class KernelSpecHandler(KernelSpecsAPIHandler): + @web.authenticated + @authorized + async def get(self, kernel_name): + ksm = self.kernel_spec_manager + kernel_name = url_unescape(kernel_name) + try: + spec = await ensure_async(ksm.get_kernel_spec(kernel_name)) + except KeyError as e: + raise web.HTTPError(404, "Kernel spec %s not found" % kernel_name) from e + if is_kernelspec_model(spec): + model = spec + else: + model = kernelspec_model(self, kernel_name, spec.to_dict(), spec.resource_dir) + self.set_header("Content-Type", "application/json") + self.finish(json.dumps(model)) + + +# URL to handler mappings + +kernel_name_regex = r"(?P[\w\.\-%]+)" + +default_handlers = [ + (r"/api/kernelspecs", MainKernelSpecHandler), + (r"/api/kernelspecs/%s" % kernel_name_regex, KernelSpecHandler), +] diff --git a/server/jupyter_server/services/nbconvert/__init__.py b/server/jupyter_server/services/nbconvert/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/nbconvert/handlers.py b/server/jupyter_server/services/nbconvert/handlers.py new file mode 100644 index 0000000..d64c056 --- /dev/null +++ b/server/jupyter_server/services/nbconvert/handlers.py @@ -0,0 +1,53 @@ +import asyncio +import json + +from anyio.to_thread import run_sync +from tornado import web + +from ...base.handlers import APIHandler +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "nbconvert" + + +LOCK = asyncio.Lock() + + +class NbconvertRootHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + @web.authenticated + @authorized + async def get(self): + try: + from nbconvert.exporters import base + except ImportError as e: + raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e + res = {} + # Some exporters use the filesystem when instantiating, delegate that + # to a thread so we don't block the event loop for it. + exporters = await run_sync(base.get_export_names) + for exporter_name in exporters: + try: + async with LOCK: + exporter_class = await run_sync(base.get_exporter, exporter_name) + except ValueError: + # I think the only way this will happen is if the entrypoint + # is uninstalled while this method is running + continue + # XXX: According to the docs, it looks like this should be set to None + # if the exporter shouldn't be exposed to the front-end and a friendly + # name if it should. However, none of the built-in exports have it defined. + # if not exporter_class.export_from_notebook: + # continue + res[exporter_name] = { + "output_mimetype": exporter_class.output_mimetype, + } + + self.finish(json.dumps(res)) + + +default_handlers = [ + (r"/api/nbconvert", NbconvertRootHandler), +] diff --git a/server/jupyter_server/services/security/__init__.py b/server/jupyter_server/services/security/__init__.py new file mode 100644 index 0000000..9cf0d47 --- /dev/null +++ b/server/jupyter_server/services/security/__init__.py @@ -0,0 +1,4 @@ +# URI for the CSP Report. Included here to prevent a cyclic dependency. +# csp_report_uri is needed both by the BaseHandler (for setting the report-uri) +# and by the CSPReportHandler (which depends on the BaseHandler). +csp_report_uri = r"/api/security/csp-report" diff --git a/server/jupyter_server/services/security/handlers.py b/server/jupyter_server/services/security/handlers.py new file mode 100644 index 0000000..5bf540f --- /dev/null +++ b/server/jupyter_server/services/security/handlers.py @@ -0,0 +1,38 @@ +"""Tornado handlers for security logging.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from tornado import web + +from . import csp_report_uri +from ...base.handlers import APIHandler +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "csp" + + +class CSPReportHandler(APIHandler): + """Accepts a content security policy violation report""" + + auth_resource = AUTH_RESOURCE + _track_activity = False + + def skip_check_origin(self): + """Don't check origin when reporting origin-check violations!""" + return True + + def check_xsrf_cookie(self): + # don't check XSRF for CSP reports + return + + @web.authenticated + @authorized + def post(self): + """Log a content security policy violation report""" + self.log.warning( + "Content security violation: %s", + self.request.body.decode("utf8", "replace"), + ) + + +default_handlers = [(csp_report_uri, CSPReportHandler)] diff --git a/server/jupyter_server/services/sessions/__init__.py b/server/jupyter_server/services/sessions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/services/sessions/handlers.py b/server/jupyter_server/services/sessions/handlers.py new file mode 100644 index 0000000..09e3ca3 --- /dev/null +++ b/server/jupyter_server/services/sessions/handlers.py @@ -0,0 +1,195 @@ +"""Tornado handlers for the sessions web service. + +Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#sessions-api +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import asyncio +import json + +try: + from jupyter_client.jsonutil import json_default +except ImportError: + from jupyter_client.jsonutil import date_default as json_default +from jupyter_client.kernelspec import NoSuchKernel +from tornado import web + +from ...base.handlers import APIHandler +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_path_join +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "sessions" + + +class SessionsAPIHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + +class SessionRootHandler(SessionsAPIHandler): + @web.authenticated + @authorized + async def get(self): + # Return a list of running sessions + sm = self.session_manager + sessions = await ensure_async(sm.list_sessions()) + self.finish(json.dumps(sessions, default=json_default)) + + @web.authenticated + @authorized + async def post(self): + # Creates a new session + # (unless a session already exists for the named session) + sm = self.session_manager + + model = self.get_json_body() + if model is None: + raise web.HTTPError(400, "No JSON data provided") + + if "notebook" in model and "path" in model["notebook"]: + self.log.warning("Sessions API changed, see updated swagger docs") + model["path"] = model["notebook"]["path"] + model["type"] = "notebook" + + try: + path = model["path"] + except KeyError as e: + raise web.HTTPError(400, "Missing field in JSON data: path") from e + + try: + mtype = model["type"] + except KeyError as e: + raise web.HTTPError(400, "Missing field in JSON data: type") from e + + name = model.get("name", None) + kernel = model.get("kernel", {}) + kernel_name = kernel.get("name", None) + kernel_id = kernel.get("id", None) + + if not kernel_id and not kernel_name: + self.log.debug("No kernel specified, using default kernel") + kernel_name = None + + exists = await ensure_async(sm.session_exists(path=path)) + if exists: + model = await sm.get_session(path=path) + else: + try: + model = await sm.create_session( + path=path, + kernel_name=kernel_name, + kernel_id=kernel_id, + name=name, + type=mtype, + ) + except NoSuchKernel: + msg = ( + "The '%s' kernel is not available. Please pick another " + "suitable kernel instead, or install that kernel." % kernel_name + ) + status_msg = "%s not found" % kernel_name + self.log.warning("Kernel not found: %s" % kernel_name) + self.set_status(501) + self.finish(json.dumps(dict(message=msg, short_message=status_msg))) + return + except Exception as e: + raise web.HTTPError(500, str(e)) from e + + location = url_path_join(self.base_url, "api", "sessions", model["id"]) + self.set_header("Location", location) + self.set_status(201) + self.finish(json.dumps(model, default=json_default)) + + +class SessionHandler(SessionsAPIHandler): + @web.authenticated + @authorized + async def get(self, session_id): + # Returns the JSON model for a single session + sm = self.session_manager + model = await sm.get_session(session_id=session_id) + self.finish(json.dumps(model, default=json_default)) + + @web.authenticated + @authorized + async def patch(self, session_id): + """Patch updates sessions: + + - path updates session to track renamed paths + - kernel.name starts a new kernel with a given kernelspec + """ + sm = self.session_manager + km = self.kernel_manager + model = self.get_json_body() + if model is None: + raise web.HTTPError(400, "No JSON data provided") + + # get the previous session model + before = await sm.get_session(session_id=session_id) + + changes = {} + if "notebook" in model and "path" in model["notebook"]: + self.log.warning("Sessions API changed, see updated swagger docs") + model["path"] = model["notebook"]["path"] + model["type"] = "notebook" + if "path" in model: + changes["path"] = model["path"] + if "name" in model: + changes["name"] = model["name"] + if "type" in model: + changes["type"] = model["type"] + if "kernel" in model: + # Kernel id takes precedence over name. + if model["kernel"].get("id") is not None: + kernel_id = model["kernel"]["id"] + if kernel_id not in km: + raise web.HTTPError(400, "No such kernel: %s" % kernel_id) + changes["kernel_id"] = kernel_id + elif model["kernel"].get("name") is not None: + kernel_name = model["kernel"]["name"] + kernel_id = await sm.start_kernel_for_session( + session_id, + kernel_name=kernel_name, + name=before["name"], + path=before["path"], + type=before["type"], + ) + changes["kernel_id"] = kernel_id + + await sm.update_session(session_id, **changes) + model = await sm.get_session(session_id=session_id) + + if model["kernel"]["id"] != before["kernel"]["id"]: + # kernel_id changed because we got a new kernel + # shutdown the old one + fut = asyncio.ensure_future(ensure_async(km.shutdown_kernel(before["kernel"]["id"]))) + # If we are not using pending kernels, wait for the kernel to shut down + if not getattr(km, "use_pending_kernels", None): + await fut + self.finish(json.dumps(model, default=json_default)) + + @web.authenticated + @authorized + async def delete(self, session_id): + # Deletes the session with given session_id + sm = self.session_manager + try: + await sm.delete_session(session_id) + except KeyError as e: + # the kernel was deleted but the session wasn't! + raise web.HTTPError(410, "Kernel deleted before session") from e + self.set_status(204) + self.finish() + + +# ----------------------------------------------------------------------------- +# URL to handler mappings +# ----------------------------------------------------------------------------- + +_session_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" + +default_handlers = [ + (r"/api/sessions/%s" % _session_id_regex, SessionHandler), + (r"/api/sessions", SessionRootHandler), +] diff --git a/server/jupyter_server/services/sessions/sessionmanager.py b/server/jupyter_server/services/sessions/sessionmanager.py new file mode 100644 index 0000000..6a2a454 --- /dev/null +++ b/server/jupyter_server/services/sessions/sessionmanager.py @@ -0,0 +1,314 @@ +"""A base class session manager.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import pathlib +import uuid + +try: + import sqlite3 +except ImportError: + # fallback on pysqlite2 if Python was build without sqlite + from pysqlite2 import dbapi2 as sqlite3 + +from tornado import web + +from traitlets.config.configurable import LoggingConfigurable +from traitlets import Instance +from traitlets import Unicode +from traitlets import validate +from traitlets import TraitError + +from jupyter_server.utils import ensure_async +from jupyter_server.traittypes import InstanceFromClasses + + +class SessionManager(LoggingConfigurable): + + database_filepath = Unicode( + default_value=":memory:", + help=( + "The filesystem path to SQLite Database file " + "(e.g. /path/to/session_database.db). By default, the session " + "database is stored in-memory (i.e. `:memory:` setting from sqlite3) " + "and does not persist when the current Jupyter Server shuts down." + ), + ).tag(config=True) + + @validate("database_filepath") + def _validate_database_filepath(self, proposal): + value = proposal["value"] + if value == ":memory:": + return value + path = pathlib.Path(value) + if path.exists(): + # Verify that the database path is not a directory. + if path.is_dir(): + raise TraitError( + "`database_filepath` expected a file path, but the given path is a directory." + ) + # Verify that database path is an SQLite 3 Database by checking its header. + with open(value, "rb") as f: + header = f.read(100) + + if not header.startswith(b"SQLite format 3") and not header == b"": + raise TraitError("The given file is not an SQLite database file.") + return value + + kernel_manager = Instance("jupyter_server.services.kernels.kernelmanager.MappingKernelManager") + contents_manager = InstanceFromClasses( + [ + "jupyter_server.services.contents.manager.ContentsManager", + "notebook.services.contents.manager.ContentsManager", + ] + ) + + # Session database initialized below + _cursor = None + _connection = None + _columns = {"session_id", "path", "name", "type", "kernel_id"} + + @property + def cursor(self): + """Start a cursor and create a database called 'session'""" + if self._cursor is None: + self._cursor = self.connection.cursor() + self._cursor.execute( + """CREATE TABLE IF NOT EXISTS session + (session_id, path, name, type, kernel_id)""" + ) + return self._cursor + + @property + def connection(self): + """Start a database connection""" + if self._connection is None: + # Set isolation level to None to autocommit all changes to the database. + self._connection = sqlite3.connect(self.database_filepath, isolation_level=None) + self._connection.row_factory = sqlite3.Row + return self._connection + + def close(self): + """Close the sqlite connection""" + if self._cursor is not None: + self._cursor.close() + self._cursor = None + + def __del__(self): + """Close connection once SessionManager closes""" + self.close() + + async def session_exists(self, path): + """Check to see if the session of a given name exists""" + exists = False + self.cursor.execute("SELECT * FROM session WHERE path=?", (path,)) + row = self.cursor.fetchone() + if row is not None: + # Note, although we found a row for the session, the associated kernel may have + # been culled or died unexpectedly. If that's the case, we should delete the + # row, thereby terminating the session. This can be done via a call to + # row_to_model that tolerates that condition. If row_to_model returns None, + # we'll return false, since, at that point, the session doesn't exist anyway. + model = await self.row_to_model(row, tolerate_culled=True) + if model is not None: + exists = True + return exists + + def new_session_id(self): + "Create a uuid for a new session" + return str(uuid.uuid4()) + + async def create_session( + self, path=None, name=None, type=None, kernel_name=None, kernel_id=None + ): + """Creates a session and returns its model""" + session_id = self.new_session_id() + if kernel_id is not None and kernel_id in self.kernel_manager: + pass + else: + kernel_id = await self.start_kernel_for_session( + session_id, path, name, type, kernel_name + ) + result = await self.save_session( + session_id, path=path, name=name, type=type, kernel_id=kernel_id + ) + return result + + async def start_kernel_for_session(self, session_id, path, name, type, kernel_name): + """Start a new kernel for a given session.""" + # allow contents manager to specify kernels cwd + kernel_path = self.contents_manager.get_kernel_path(path=path) + kernel_id = await self.kernel_manager.start_kernel( + path=kernel_path, kernel_name=kernel_name + ) + return kernel_id + + async def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): + """Saves the items for the session with the given session_id + + Given a session_id (and any other of the arguments), this method + creates a row in the sqlite session database that holds the information + for a session. + + Parameters + ---------- + session_id : str + uuid for the session; this method must be given a session_id + path : str + the path for the given session + name: str + the name of the session + type: string + the type of the session + kernel_id : str + a uuid for the kernel associated with this session + + Returns + ------- + model : dict + a dictionary of the session model + """ + self.cursor.execute( + "INSERT INTO session VALUES (?,?,?,?,?)", + (session_id, path, name, type, kernel_id), + ) + result = await self.get_session(session_id=session_id) + return result + + async def get_session(self, **kwargs): + """Returns the model for a particular session. + + Takes a keyword argument and searches for the value in the session + database, then returns the rest of the session's info. + + Parameters + ---------- + **kwargs : keyword argument + must be given one of the keywords and values from the session database + (i.e. session_id, path, name, type, kernel_id) + + Returns + ------- + model : dict + returns a dictionary that includes all the information from the + session described by the kwarg. + """ + if not kwargs: + raise TypeError("must specify a column to query") + + conditions = [] + for column in kwargs.keys(): + if column not in self._columns: + raise TypeError("No such column: %r", column) + conditions.append("%s=?" % column) + + query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) + + self.cursor.execute(query, list(kwargs.values())) + try: + row = self.cursor.fetchone() + except KeyError: + # The kernel is missing, so the session just got deleted. + row = None + + if row is None: + q = [] + for key, value in kwargs.items(): + q.append("%s=%r" % (key, value)) + + raise web.HTTPError(404, "Session not found: %s" % (", ".join(q))) + + try: + model = await self.row_to_model(row) + except KeyError as e: + raise web.HTTPError(404, "Session not found: %s" % str(e)) + return model + + async def update_session(self, session_id, **kwargs): + """Updates the values in the session database. + + Changes the values of the session with the given session_id + with the values from the keyword arguments. + + Parameters + ---------- + session_id : str + a uuid that identifies a session in the sqlite3 database + **kwargs : str + the key must correspond to a column title in session database, + and the value replaces the current value in the session + with session_id. + """ + await self.get_session(session_id=session_id) + + if not kwargs: + # no changes + return + + sets = [] + for column in kwargs.keys(): + if column not in self._columns: + raise TypeError("No such column: %r" % column) + sets.append("%s=?" % column) + query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) + self.cursor.execute(query, list(kwargs.values()) + [session_id]) + + def kernel_culled(self, kernel_id): + """Checks if the kernel is still considered alive and returns true if its not found.""" + return kernel_id not in self.kernel_manager + + async def row_to_model(self, row, tolerate_culled=False): + """Takes sqlite database session row and turns it into a dictionary""" + kernel_culled = await ensure_async(self.kernel_culled(row["kernel_id"])) + if kernel_culled: + # The kernel was culled or died without deleting the session. + # We can't use delete_session here because that tries to find + # and shut down the kernel - so we'll delete the row directly. + # + # If caller wishes to tolerate culled kernels, log a warning + # and return None. Otherwise, raise KeyError with a similar + # message. + self.cursor.execute("DELETE FROM session WHERE session_id=?", (row["session_id"],)) + msg = ( + "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " + "invalidating session '{session_id}'. The session has been removed.".format( + kernel_id=row["kernel_id"], session_id=row["session_id"] + ) + ) + if tolerate_culled: + self.log.warning(msg + " Continuing...") + return + raise KeyError(msg) + + kernel_model = await ensure_async(self.kernel_manager.kernel_model(row["kernel_id"])) + model = { + "id": row["session_id"], + "path": row["path"], + "name": row["name"], + "type": row["type"], + "kernel": kernel_model, + } + if row["type"] == "notebook": + # Provide the deprecated API. + model["notebook"] = {"path": row["path"], "name": row["name"]} + return model + + async def list_sessions(self): + """Returns a list of dictionaries containing all the information from + the session database""" + c = self.cursor.execute("SELECT * FROM session") + result = [] + # We need to use fetchall() here, because row_to_model can delete rows, + # which messes up the cursor if we're iterating over rows. + for row in c.fetchall(): + try: + model = await self.row_to_model(row) + result.append(model) + except KeyError: + pass + return result + + async def delete_session(self, session_id): + """Deletes the row in the session database with given session_id""" + session = await self.get_session(session_id=session_id) + await ensure_async(self.kernel_manager.shutdown_kernel(session["kernel"]["id"])) + self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,)) diff --git a/server/jupyter_server/services/shutdown.py b/server/jupyter_server/services/shutdown.py new file mode 100644 index 0000000..a77e900 --- /dev/null +++ b/server/jupyter_server/services/shutdown.py @@ -0,0 +1,28 @@ +"""HTTP handler to shut down the Jupyter server. +""" +from tornado import ioloop +from tornado import web + +from jupyter_server.auth import authorized +from jupyter_server.base.handlers import JupyterHandler + + +AUTH_RESOURCE = "server" + + +class ShutdownHandler(JupyterHandler): + auth_resource = AUTH_RESOURCE + + @web.authenticated + @authorized + async def post(self): + self.log.info("Shutting down on /api/shutdown request.") + + await self.serverapp._cleanup() + + ioloop.IOLoop.current().stop() + + +default_handlers = [ + (r"/api/shutdown", ShutdownHandler), +] diff --git a/server/jupyter_server/static/favicon.ico b/server/jupyter_server/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..00ac191e771f9df4dc853aae6bcb85f89f94f794 GIT binary patch literal 61209 zcmeFa^^VWGy;+miU_ELG*T<5h!PSa z&7w%h(hcA9x)wjreZT*J=coGzU$Sz=oO8|`Gjq(GYnZOKDg_x684(c?g}R#ZH6kLU zKlF$6DEOCG<;$nQU&LNZ!d5S;-(lM)e!+7lt6OTZr{@P~-# zNER{C5%4!L^lw>+zrRKLXC3+feugeAE_!G{M1&?%SH7U{Njx`)bg1QnB z`IAfaQ7{oP0!d2Ejt>0$1G_AycZ!pS`hWjD5POz5n2iPV_qx9~Q0E6W>bwl2XZYV6 z&||7>|9CFpdgU`&cv|~g!I%Hu{`bYYo1Mr0B?olD6Egl?*Z6*7cu}{I8Gv=aT;+$zS0854Zm(O#T;c z{tNE^2F(9*lK)M~{>uRWOI-i@$bb3vfBE)*`8I@*|D{s@|E5ykS?x_5ISIDM$&ftd zKpt^SuJk~*@z?X{V(uL;?{$j+TZgIXFZ&KIs9`;SRvD(o`bVmb^_Nr|GajkN>fYaJp*_|VzZ0YUmQRAP4t5^m;LL;)VFUQ1E*gU=>@B`^Q^)9C9-nFA zUgfQ;9w(>nypfG-uc#0^c}A{K|M>Pm$9hPSQvF6>hl^7`8zpP|YgL9rt)M?3VQ!}W z6P=KKswRw|w-~EDFL-!PdmS0OY`YQarXqJqQ&Us#l42~qAM5^f20mp`fqsANm`-S> zl3!l_{N1~MAC?0+k*6B{F$C5o?+rFQ_{KZiE%fG}m7_7kjn{uhwEZ4ejbWu2-Q23b zeL1Vad%UICx{~v|yfljj0lRx{pvQ9CvD!L>kF6rHSDI)G>n-_f@BdJf4tU|S&f@m| zkFKQGcO~D0e0)pM!f^|KM;C3A{2PS&3H=oaAz*7U2Y<{`M>GvPjYSEDlpd%wDqAq? zQ7V>LTWo%8lDlY6r)z)Cp8PP6V5b82d3|{(3DZ*;W#sz-<`HRj`jU&q+%jW(aPy{8##RXGP8EB%kH07L>^Ru;>~s4**P&-t%S%|@ z44>skoju3&6!!oD)d7ZX{!022 z!YzXMSKm|`kC~TqN3NFfIJmyXcAT1(HQ4p)U&iRo-;q1izU2te{tB-8JP4t%6F)qIEE=_7u2F> z|MVEoB-4j$PO4Q)%kPSd8@P5m0uD-D@x@*Nuh&YI2=8^hL(Ju`?PNtoc$g&v=33_@ zSCQhSPFsdd4TFZc-4FP8KcAgA`rcRg&Bt$whbDF(QGZi^(%OY^7w!h8y&@66ZzzIt zNL)Ral8dEJ8RILG6VzHwIc=YjK*>i?rZ%p#}N7e8B@7lkdk`YCfj zP4>`^T}{zrlT|^Phmu}K9B8hu77AV&H+}NM^ue{=l!(;ZgxmnrkEHXriGP zTzS&*;L`@t8D%5k17r!|N^EVsZ6XTm!xsb!%gDLy6siA#~>GqY3U?fygoLaIxXiQSljc@VRCCMh%x55z_dI;y3-g z1K(d02m|-^m#!!Ppw?dpM_r}!(MjL1Oy>Qketxn6i|C15f8sd!Wj3{yp~;vbxN z?V{?iiF=b66sEW${Pbo?7-1WdIkWKMBu$!!Msf-hWxEz)Xl|#SBEKFAjI*xDh`EF* z_Ais}szGdVN?YCP7<_$fBHDKSWRZo>Qe)Xe5u>ss;ThCH4Kuxv!y&Y%a%JJWnLP}7 zV2iA#uR5RBy7hLfeW*gGDdR-dLf4q?cE{iC=I6`8>uEO=94$5(+x4W~b%jZ1>5Rs} zK~n@n&^LyZxoZBcE^YYT-;aa9WRq9J3meCmd~84~ue-?bjI}@rTEKA8%gerGvf`G6 zksxddM3@kA{#}V@LnH*lP$Yqr{#L0}G&8FE^$im3S(lD*C0?Hl`Y%TbOREB~@{0w% zNw5oUU8dhFi$LK5x)$UKZ0=Ubu8XSbk zkc#5p?TPFPgS`lSmc-dehK7N{ovw7rSm8?6o4WJ0lk^cyu9Zce-IlckoJNsv8pOIT z2W!L9c@LO?z~bZUqor_j$x_v``Fcc5+aR5u>8(RlpWp>HASIf2^xmjIk<(0Z91g3u}ep{{S$j|pbr`R(-yG8F3Vjjq#*yLUci z23-@;houk`04h-*BOxHqCCj#78Ll6cJ(xa(-@ArhvW;sbCkUGWguOP+KtL$cv5@N- z<4^A>>F%nZU-<>aG)w{so&UA=`abA=>@t|HT1D2Sbkl)c64NvLyP1X}&nnta!;eC< zn*+6ax(O0sz%>q=CUTmxp#OHgusyly{e4Smm$%|h$WFpKmBBJ-)XT&O%kWs&&vg)G zJ$eCWw6r)9MazA-SR{0nS_w7bDs`M;B300b1<8}hXjJXgGmL8sBHnkFlCU)oz}x%8 zg%n|7@NcmWqGtzGZIeatiy00*9quO<3HIj;40q{|zA}6hjvHgml|HuosgAFplG6#Z zX1gAIKEkI2FZe{6sxh#@SP(BYtz*SYIk{S+j(FlkhDj* zm;>os#>jbvNgejt*R=c!Ewk(8Dm(hQHj6x$(#O`4C?6f>9Go<|n_>JZ7YXj$Oal3< z>$YX{Ql&plo)HejmWzhdiWd$eG<7hrQ964%B)o#nV6=yiCjZ{DGlv)>rh8kikwQ>) z;$Gh0Ak7S#lH+0msmibz6I{C6IessS{1kIt9e*LD3RubOa&}0B;S~j<$4Go%Fe(Nk z2y!)0Pt7_~M99k}9*~y5%2olM!073CNHdlO_{`l(fq(c~KA&RNXJ=yj|Yu zPKdJK<-xv8^w*q&7nzrjw!b)8ubu68Cp>ktWh7RC#ktlnQ=|4YDIA2!L_nm$Bk#a3 zqUJ|wt(FH%6^ae0y5tq0btFHmu*`mT@VRDth+#~it=p~Z%t-jScB0B}-gC-%T8;pFF*(=3n|4Apd+DusC*8G~>FeF2~kS*mm!;c_0#mD8UuO()$W$9R*Q zdM%)^N=L^T(9$E+#RtE04`>k9&k7 z2zDwGI*@Kyc1aL&^#r7(Z9`C!|EWr9y9De?(pu;@hWk=8_X;l^Mo2V>kVhYDUw}U{ z34cMek6noQaV*|opg2AHCPBSPyx)}=JVyD#({{#f7wn7EvLf|9w5 zTkP0ap0^+b8PcblB`a~Eq=t^`6!OOlxV0jxHCwb^wDsOLjdU$>!dDJ3DL@;$PM%~R8pO7*F3Ifb3 z`Ez9KAu6V;mt&dBwfX(h_ujb7bvqRNN_JNe|2kTDdkMjR2vug&U^-eiQgFUnQ|lLn zv253=!a6JIyCm=i6JdM-&W+0{8LPs21-L%b_`KM$EaE6;2sKpkK*;^%dVEc-9bw=> z8rZ#?X`>FT+gLqJ^j!CL`~BN8PU$u|B2fKk+R&Jn*|US$gx1L30PJC_Eh>?KjJFX* zHSW?vtTQh!uva3aide}N=6QEooE}O~Ar#Z&HLC+5sj~PHb^YUtQblJgb*?Ty^_B~LKe+m{NA6JOe<#n>&b7W!A^4nK z_asKjnyY8EzwxE!C3tlw0AeW92y4Lxl8Dtcbntv5qZj^EqJYA$zreiGHpZj!Fhp#D z%(_KKq$~)II$C52&Py|k=LT#+glHr+O;QMU#(3qE#fwrvUW(ZSPWP+h9VslWCE|CT z({qXZ(qQxnlJ^59P3g1pVTF&YyKSKNJ_dOsUwdtMIP_pLA=lY~rSI`q<{UY6^)ZGP z@^tMdXZY`|l&8V(Rp!B&SnE&I9fu5s&djnC?ZZEO<~E0}6=uCR(X}Haj59Rr5OkQp z;_R*oP{S{|kmVks1(4_X^T!-_F>l)0NGGH3^A-|(gZ;ZN1f7#;azX$;izy9}YkVWZ z^x^_L8KyTe(REQJ+y8~yD|YDjS>J(81Z!u0hT%mE;bB|(Il=kKJL<=mai1oS2SqTt zH`7EDVmX=vLZ1}LC^!-7L$;ruB{LLedn_!pL`{0op2t>4fCdQ=({uB8MF`ny#)-b# zyYgjLm7k4BKk^ojy{#|yzN_PSGb|KV0-XxDSS-Iw7hc(QT-TgIa+mN~2E;p%Rm|}9 z_7GA7GIoHIf!y|-kgh%oLswBtixl(L zki2pij0{H4Av}sUf{Oaw2no@bTmT@7D=)4?022E+uv0ON+c>#7V_{8@SL`P(A%Zu7 ze94}R5KBlUjKF$7;6w=it$}Fp9l^dcv9P07yMhC*`+QjHdry~$ zIrJR67lARuyuWHBpw+w-(0%{oPsZAB`=g>W<0Og4?}yBauxS6yqwJu>HT$I*L8$be}JSIU9|BPG=q41iQtW&5y5p= zEPNCJPX!WZoZufGxVJNKmKjNEh2T<{9|JIO)Hl7yLfhNi$Oa89@F+tUhMHu)yO>Zd8bU@MPMTxq_uZHshwPOY9fR!(jIlNJndp_0&y8le%z+A zB>CHTRY)fC75S8cG?8fs3akC& z7LbGhkqW|A&$AvBgpq-0Qja1ws`>r`L6FS7ruF<#vJa&P_mA}xwvaj*AcU;}(`k6= z*db&)%j|JO3o+@+M=X~E(S5a@d2GW3Ea}kzIy8uDCuAKN^i{S^%64teH{MaRi%DVb zgZhNRcD;Sxl2GdsBf3Ele|6WBaQOtE>(avo*FV)qR^-C6QPiY4h}VZ^lH35I@-9U> zVW+gnJek+HgBg(%JjJ)J>Ug%u?5)U#K7toRgrNq#asscY%Z2&IxDhldvKDbCN;*9R z`%b;GZVZ9XaM^XYg743%h>Zm76W2Z2NWo2{B;Zo?XcOQfUdZo`96ms zcOEI3@q^Jo^z138dFAvltT@8s?dc9)&D=u^ZG}`-crTh@fqKZ+d6?ZfPiG_vWe%Qs zA-3*k;9ZJM1eR~S#UKp9Mb8D?SGS!e9&(=LTaP*9nA#Tm@ncU7Aa>N#6 z2Y*HeFFeThr&&N;gwq8X$d*sE-z3ltQnHh1dLInrVV2VhP>n1->3U4+s$%xCgijx@UmooLnHqI2O$8b;6Vubog;Q}qx} zu-ZQ$6h1ZSCm=dApt!|+0I<9bhVEUpuJj%Tg!%!K=wdkR6aOq6v+NPTu~9i;HlUJ{ zP{pnFKd$NCRqA8*g_$x;BPdDfM68*#Afs-hVk5galpU@f6!P)i+%4Lki@72=sTN{8*<033a)PgKdIeXnl#W|uj{ zt$Zntv1(@bgKN8;?J^}1{C2bq2&eaCD+qXJOY+%guyIW=a=nv-bX+~FJO>mfcR6*g zbP-CnSUGU@P>%8==w9|1L}eihl>&>RIUi-G8R5=j@a8NSg|U2sig2YF2C zxLFbuugExDJ=UwHK<0I^>^c|frBJcAx{K$2SA7ENBO#Nq0q~#l7*}Rzh3~upu2Sfs$SrQx%2Gju@ebl&AD23rPYwadB1ySM7Ak z6NGBkG{F!gfxgP`5(@vvXdKitq>u3Ce5_0gLLbuET%5^UE^@mQjuz}rz!u6@M7<%| zlQ~^us{LNsd8%h^{>?BV*dCL@#*38)pU5D;Hx{lo6$ z%<@x157;r>Nqos(rTIL8Fd174x+LlB9t9iRqAU;5k%%K^PGj?ik)KYcCKLSPGLV@5 zY(Gr7-(mp4`4VVi6((PzJB@isQfP6O=Ck);oFuh8glR89vK8;Va2d9t6tvH}y62>9 zHnICr#98uH_p*?|pG-d(-J(CKKzb?#{@Ln$8?HgQ+3^q<#yxLGa?*2R%18j)=4vGI zJwx*06to}g>Od9T-6D37D)>?E73D#Ly-f4jQq*b}vQ`gvh9{OaIDXvXhAfK~=t7%l zDG@UCt`k-MpInW;k3P}Ha`MDhq3soma+>xtbq?8-764Y!i$YE!@2ztyEQs#=`4~Z~H5@TcI#yXWf?qn|x@KSa&ur7Bu(R*NB+=8xp z2CJuLoHZr=F|=xCxM0XQ$ZPa$so@SBHvr3cwHLv#Gj)(2wh?A%6HqZ`(ssML%UNuHuK%v4f)M7S837Om4A64-Q_NfIeNf(*^2-FFM1 z?p-pktF7cr?sYTI)G#Snh!}t$^3`-OFVV{B>LH17NE@B8>3o+;1k=9f83ie4QShZu zdGySSo*oiOUa;7=R4)L-o)At?xVfbh9*Rh=!M-AZ9;v<{U@#d* zD!j`En&t>p^F7Fa2Yc*JLgKex&ywh}U5gv3W?hg*fFz=dqh|PxK2Zr|5k#1iz|C%b zd2temjToIm5YYAZXD#be1_BCPlAV1Cl_Hg+o!3LgyvEf?U{7~ z(SSbnY0q`QU&u|ViAMn3`9SN%KCK0C^&Wd4Y1EO zv!>T4?&n=9B@}gX16mbuoVImWl~$4fXn{xCy#kLHUo(S~`8&L>ZI>@WwMkq1B-BOV zN16J4u#~p#X%O~?Np?Czkiu70cSHV6?P-w3wIfl}WXik^etDnT&dDB?CdG8c=$J9c zQh^GO0`!p}9k7N(%kn>@s3fRV_kz+>&;y8^ToQBN)UBqj1(Fv)Ui4icdw80hFf6-P zmSEbW-@SsZAD)W?<59veWc$yA&bC5o(DeYO^q#f`)_#v3ZkxbQt#7|VQ*n{ z-a>YvNCv z9H8a#KL)C~K$DXKz0$P=Yhn-;^n^9rPdo9Q4KJoiH9Ev&w326NDM<-+@KebJb&QfF z#a?QbwDk;}ZJ0|gHy>^|$P4sdI@$|DR;NXF{6TrhX;CSdp(;TPK42r0+Ut*LQe_>w z>)4Pfe^+dG-_Rpb;j;=t95qxr^6Zv)wGj*XT)Dl`>r7+j$W;Y<~Li$Vt7ZHaqDYO(`@<_zG1pC3W&Fooc4LG=*62Zwxr1~Tq<=Bkx~>|pSG{k}>Jh##avQ;Vbx-YH=dZ_bvA zLZ%{N4xDXiqEHRWCful{eagD6Uc0$!PRHsf)^hC*H^`r#j2%r=7@M8m<5SvmXy|u z9K$~sb;|4zU{1@z+PV06&c5zMaxua^8@kIt5dIXnxz8=+I;Lz04TPb{N$Z|rpJb~x z1#BO%QiYyL`&K=a-^j;0a>{>Md95=1sX~5KC;5=)mfgv@<#O3=W%5`c0|`AIb^ZHC zdr7dwYczdjcR9;W&c|Q-`X+o$&tjCmJ|${1|I-P7pSk(rZUvwvfVB5`5OV9xWQ+U0PUPnggwyz@#xfW3$CuYRK(ius z-0N+n({ddnWG`rTQLsZ^EkO?;*u+L6(n>oO84#tY-J}jD3Y~?b4+`ddrnrOsT;Vah z2jnTi=p2LsOTieJA9=dqtORW<`*$&bEFo8GVE$fTu-HZ&?s1fd_A8yhur2hl?Jp1N z^zaP0SXD;*ZP)1Z{u$C{D#G3elQox*%r70~KL%eOzd5^bDQud|LlH4wAbc}Epkd>bJ?utdhi3Rrb;&e1k!?WGFoZ$ld0s{qeM##uyJ4 z>HS_EL@u43eT&MTC+CTmN+%OzPS5Lc7~ZR4X+5JD zkV=1xsXAf8!@pzCXla35}-V&*m-t8bcg%f-k zLA@Mt9bnY3*r{Vo_F46=fYRP@Zlg!Vf?z{@0aBZ~&#|Wi#W@1^MZ`h=kotTf6|xku zZ0vbPieS)>7k4g=13^2cp`z5DdhvZnUgoPY#?{8zjydw#cD#Gz!&f?`PzDqQ^dON> zM+#&K7yh2%$2t7&!P~wIc7A3g8Y^@1n|&ki&9jGEls2Xo@Hv@t0+kU?NNpN?P-DE> zTNvMug6c!!U#5%*ja*N;(L4}`&21P==JR^m?Z4q?^3|y#3abtc&Y*I( z`~gD=^p zszu+XTzMF_;JUVT1B+zhnU{q&O6*G3>Ng#W+)l-A-C7xST&a?;>|9^k=+*NL8&6qo?qpoz zD<32ScnI3>p{MEUY=0AG181P#%`tsKezy$u&&HzqxrT(1Ujco0f4{(y=8wFn4<#!F z+wR5s!DQP8{lN#_GuOrjf8zC5M4iFjN*H7Lw6i~%!5!B|Cx_-z6Bb|OG}#}L{h23x zO&*o=q78bK75+1)YZ`Sjq+=f?+FiVg3@NQ{8%b=tYeVQLM^WPoYEFDDgo)L7M8wg7 zCSc(ipfP|+K~h?~?fBv&KAlu|6Z3h!by!|Vs*jk zaH04LHS|QGy{feB<11JnSrXwBsx`}Nq#N-}GS;6)t#nv-7JB{kdI3FAi?)w0H7Tub zKlBx7T6K-#0Zfx5(@EdX<1%&Y{66j@O4`a@eDM7VeoWLk(_YW{Crg|0I%~H7y|m0) z$vFzakW2O@wuiDnHQL{<2#^H|(v?i};uN3K;*p9=`1WFJkEM)+>)wcdDYj9cYw`xv z$yc%@e)gx&MsBCHHNy-_9)uZA%;~WGf}t8f{#9*1{Gc~y4F+q#;4mdyK%P;y->Zq% zyv%$ib3;yiYJ|D0yX)WY0tjvT^&uL{il7inog5EH^=Ey|aI@$K*-+_C&a#S=^Lhm) zcXk5CKd{{VS?^de{!nCN&(6bhGuMY~z`M;k1g$DL*$?m_?XLY#xCHpeEYx=`3_0KX z#;23(Ql+RuGa^j74^^;^O{EvT-`SbmsZ)-=I~EyHA@Xvk>Fo)-yaqVm{Lt5K5S{`z zzH*xHVgxW)ty-BIxIK$L*7bypD0BRu2&EC)`qkgVBrz9PPJ^KYifWl1k1j&)J;MR) zmFoH=EQ~q^&<#!GiKphpQNKP#B4U*8izTp*v3|SNqa&2!Xv*B+t{qS{RaW@f?E6#5 zXa31GNT^~h-lhFEFME?n#&e7+i8Y+HdHHGP0>y^HogN+Len&03oU2bZnAW;aIvPR* z!!2MJVLYZ)K$&(gv8g;k2cf!JN(vlEk(1YU@%xIg~R*Qq{tQ{3=|6Y2!Tx(utq=I4%$;EsJ< ze#?eA7!k(uw`e5H!CCYVkmBoSTU$We2}X|&Qv>=%mu3kP*8UPv--#U^)ziB>tbGj8 ztWlE;6?U{wv#}anJefX;PvT@W!d^k401uWCtn(aVkpV^PBk6hZilTUYkwM*WHTvFN} z<^mp9OeQHXv?Tjy(^jiizpUsdZUo#kk@o#)s+2!o!*DkvX-{rDocx%>-7}M551_Ux z7}Z-Og4%CrVG`p~C7scfy{7{gv!0>tOMZ9F>j((P*u=a&c%z!3C1CZg7Y(|VMP3;8e!oQyBh{&SJ=)gCI?H^cef0ii zvz8xEBvQ>^>d(tO656(=XWr7Ak03uL3z`HfuL_q_p+}JF^8jz6PfCYu(~=j{Ws&XD zwtvQnLs#4K;5pY=lf{6!bJA4h-dz5r=lZ)P)Ojknc?XqCE}<>{uMdlw-HTu*osq#D z4$aIY8Uay?1r4{ORpo@}F{1&Vwwqhsq1R9zz0yi?^bM3&m&M328YE!g8_btQ8Gz~( z4rN#gkoDR#k3cR0YQda_i%I)TuoxSdrm;%4fve6*RuxYDe*C^OX%pM(ykqCdRts*7 z*%p5@++eyZO92)WM}wb!qW_)*w2LtVG>hg>Q2S;+)>N=avQ8j@*%J71zs((g|z03!d+tDirwE9fUQmbm($ETrrAFz7x20kWVek~?K{aJM@i(RAa(WSgMEEl)7^a*BjC zV8(JY;fb*LfMeUl_EJ#MDf_oVT(00i$jzG#gAU)Q`(aq;1HSc)=oj2%)aP~mMxXt> zsM{{{YgyOutipy1`7uhiD~ve0C#Tok$k@|#Xr>%Nd#iv=$-a&rmcddI$k1=~7WS?i zB%e!Vct?^I8i#>nV6)|@f*SlkcE9<0>Z!`!$rIPLzf`0k&F^>WN;_2Des!gn(1?l$ zcB#$z4lcd5dEmZg`reOz=m|9ogG|TQ1^8B5D`FI@hE0`nZAzNy{*KcKIMhOz4$#rNwh%n3L9;$>wGoh+yD?PV9`*z|Z_3INrPd`}`i7G_*LSANTkk?6xARX}pUbre zqtP4A5!2)2`YQL z@2)EG<$3y?!7!$aQ;V2=_j^6;2duUKui_wGqdaOg6v ztu$M#1QmHv^LkBv?e=LR?^t(nk85FsCSuM&fu5*E1lkEROFhX@wWMoXwIIXw zcbe%HAZxXL<3k{zk0y5<5WBmZlZmvU>#N?-1xMt_XvKf^*!^ja+RlGsWt>}W4cPrV zE8L&UN<^TJW-B1I#R*=hX7D%G$46Piz+^oh1Wt=Fw8D5onTvA*n*y05Ku9MUC^mcMbXR867Z^6}Y<8w5eL! z+81Wdwb(<18<;%|P7`4(n4pFa)cx~YoRTBZ4w6HY?Q6fG$`yr@f4_p>1mRtc|JOw5 z_LAu7weDRZx*`&n8|P_4YW#*p%_qu|f#%BT@#?xJI?!-3pyCfBy^#BHCW`=!M}WVB zSYPgbyIv9G4aZdWw0M@@BQW*t6##m}0lV7|B`sG$;jWx+{sGuR);%xHmekE4@(IlZ_!)Ei>jd|8{JuN)8%@z>wVJOzX2Z(i)@$3j?y9tYd; zgwMzm7^W!#?|q2= zb>1`r$_+q{0$6~X?lizM=uB|i^ju?Q{ta|=auI7Tdo?B1enM^ZVl%nb`Fw_4$`$uPzRhbPnELHaz;5%&tHr+RuZ~M#!a2QJGU>AM8YOX5f>X$Ty zAw(N2#hxk5JQ$%htA&<81_ThWx)BMMz|ZB{1g1j<9Da42Q*LpQ^EK2|*{VAej@HO#^F5#W9J znz&3hq&8oR7;57@VxqQXOr04ANuP1U)K_CzVSKL%T%Fzp`}r~5q7&NBw^(o(r%Bht z1R%1O2@d}xdu)%&QS&rMXeZn0wH16jbA?v?ve4m3)>S31_9xK5Miwhr&6@6g=x`Ms z+EQ9@6HLOX5gWe_Yl9GO0YbRGR^F<0Aw-A#*tfjQ8=h}7F5|p3cbWjw+VtN!53T1T z7*D6YZSV^^%%7Id^W~sffY&D5XV=(z^q-O}}ja<&0OR72v>% z%HH_7v(PLsMiwAM531K5>~O3tCak`~bf@&lZM5Xyz*7t*cNPYCvIT5%@^>I}# zSz_eD^MRgkLmGX65b`yBU)jytflGug9w3kl8nVK=p^=By>(HdFCrE7E6o&4Gk?yWv zl4~sVQ;DPF7;Z=!Uf?mj{6)Whz;n7WL)ZIj5?mDg0^@UGO3j=geE>zJ9O!0%6Pxz4 zwH#vCEhk5s#%L#^Q#9gce*$4pYyn?PwW_tRX`lA^xgCbIi)}siyyTE3FXks(3PfZ1N-qvaD@@!zqy2fgv+EBy*87L?_(b+uW7sn8 z14ZPYE}(^aRsjf*SW(Rv?ANC?w2tRqRtng*-27d%_*Lgo*-K7LU1)SjjG|qS&Sj$q zsh}CWUTsM|B|+l?+?zTbl>P1k^biYe!ys*2CT=<}S|k43NKODhvQrs+Bs505e&dBS zB4D-L>}Eu6F+JZ+8T;uQP3dC;nyovD@$Pm+U`IL47z2zJ z$B1CmcDL4XIkowTtcbLg9lx+sarBX&O`yQ{*E)VHVGt|KkoPGy^%y4j2+NqE;mNi5 zP*XdIkp4&2j&4UvI<;mD0a@Bh;)*7GZD;H88vp|>qPkLFlTg|`t)cP*$11Kj$*>RA z+6b+dou+>+2r!6U1;o^orDu47nP{CbdRHt5&Osj}oHwVDzJW{69Q0n+a3B-FPmT`I z`!9dQT&yt5PU92n9b_FTxl`sv;c5-;qs~hM${n^#hp>H%y6;)k`a6)4;UO5@@h__x z5~-k90}LYd$N`Dl9v#1Rwf#&~QcMU&q=9vumji09VVF>xNp&i*+Mp!I$df^**?QCa zlbT=I>qR3+_PnzF?j=O0O8hBe^*XmZtDWtC=V8is^`kX|jcsq5X~K>2fD~$A_>?b7 zJ|d0?qf8=ctFv4J384chJNebVk$W|L251bIDsUGn{waE~`&V3+(+r}6Ix{L>RbDy* z7Yh#nfRvZIL%JB(fPevN5>(%b|2(dVo9fIStJ@Hy92XQUcW4{+-^j<;Iqf)jT(s4= z2mtu}P@=*J;n?pIs24uQYd!(m1s!pa2Ck0ET>*zzBBCQ!$E8IS;IvNgp6WOMNg} zM9&W7+bV-IxBLx;21{J(bMN}zf_onAYtHoALi3G0X!cuRXCKE4f`)$=on1>~y+d86 zqPECc~TG3NmmA}0^Zs-xxoT(_$Q7T=lhG^LGiuYs`u<){< z{RU3>t$g0);1J#V4$YGMV;mGsNuqISB`L~%qS!h$fe|x&EIKGgp^Qtktuy*4N*?b5 z*b=3I;jA~+T0)2dVd0xl0q`+S=PrQaLSUlrs~G{o`~+pCgQM_xyF#K+Qr0W~EJmqJ zVQ$E$YGU$!<1TrvAgJS3?WS*7fMSxob`zX2%nv2er?i2NoQwlN;|o>qkxo)&-(>&y0*l+mrcjY7SnUHW1Z(k5~ll8fIBQxxH7gbbe9eNlm(7)Gl*N^RrI~_=*X!-Ois6KAq0`a)!p!{pSbZzP>MB z_{}Q=UcYW)JLT&b6za$znj+8L9y28PL&`Hw0}R`|EcO1c zK^3o$siSv(54_*e-Sl%)xc<%NKnu433Z%{xQ0k=&ES1ydN*XXb{982mvX82zd)a>O zkCzb*DQ&a2igmoQdn)|rKFp7`_+>048jnn#Uc8)pKQu|0Eg2pk0a~Ddni*nM-=IVw zk-!rSJC)1lxLxMOrx)xr2JA5E+zS^fSfQM3*;8$7W*tymT#>%u4M&s$;ptDpx4>fP zpzqT7(%=06It_HPolMciTqdpzgVPbl7w%MKc=P#i3*f_w=8l_`s5b~BU!MT&W-y4@ z`1}s#J3==adO!=x)eEu^Fgo+x4eB1at!a)`g$BIm^Ihm176>}?S%!43W8uK(fDsVW zN6eN(L1RY9eN&io%by5X#V^uLzM8`mWLjj<(Y{$xad;Bq6bWcfJ^A!i01>d>W;O;o zm#^L~#bNVF7^vXW5sGWk>!3U!!k7XO8t0^7f(&j33C=^vyI1b;)ykHat?%cuDYCG7 zJGvNOh^V-O>3TW3SUox)2#1C^Kg#>%DRZiZP>pc{x!bE7BxKJ84CEE55R)BM(MnRU z0H=8dzw7E$C%~`Wa{(_RHpIyjcn|h_J2u*CJB<;gq|bUsSx2lgs&BZz1?Oha^?xj0(b6{J7n3>gJ#R zoe`KvtOxPIwMoJVFGc`iOL;%YL1bCtsQ|Zm2D|I#mrlG1m80Cvf=c99@}UEO0gU@? z%D6vI9Tl!xzRVQ{o%N}(H+3+3@C@MUxDZDw6lLxm<`NV7{I&_(6-qn3w7j!d>S$Zf z?t6?(u484oc>MZIN(SpfYW6gqxbs1?jn$0!BKHNoml$C7o+`L!o$an`B#)%QFbmWn z!+#JxIWI04OJPk?pVGFnGTN|V1yWdU*B4N0Ub1tg@2?!pkR1G{QlZFq53%8RlBz&h zWhX&g!*7ty*U8p%clr_-&I@2gEVcs_5K@@=b}@?#BUN}3MeFBuo%yCA>~~O0oU7an zI!NqBM<5jzu*@+JjL4uy28zwkRTcCC-$f%p;(KzRUy$9HRG;hn@CcK$g`9aC$2wdB z9R+A4K|PoQ6fI`>w|U_9NAp+J=+^RjF_?JZzC~~4N1_-zEVB7g(J#OL>r(n2G^RQI zh-Ha{7YYx`pxs4#fhHJi_7DPmg94?fQ7@ZE?2EJ{bUtpS#^S8TZS%*J8FZev4xZ?q z**Aj3=ZcT3SAf5cjOOoa6M|{Y_FZx&XN^Rx>;c z4F1l2*-3R}9DV;0(!?ltUYY_i02{i@078C^YiOdV)pT*;gf0R~MPyuqq(a^-HQw4V zr)Dq)eHqbzqaHIMwoqRD+7U7%s~FAxn=Z7ltfd0t3d6 z$w9w39XcMbM*_HG^kzWA+Q**j=CpbF*rJ1#MAz}W=9-^$=a=K3vMLwL_^@N()O#5U znVF{WamrVyTkU&=L?`77I<9`cFUGht7mRb00Z~(c$-^&`8Yn>4kMJF77gs?17XBW0IiQlyi)Z-Q8PqU6|By{%QNYVLLtCbv@1f#UutuW>m zlj>fHI8)Q3hWkx5PYQZW#VWRLsrtx;CDlm%Yr>$2y9daU_!Xo*D>1?zgzQE@s&4QP zm6e$Ll`TpRw zgU$zcBV|hho-7*KmUT$bxx7L#x3Rw28`)`t+m9h&K5@|*>hdoyfet;%qaVz-2o0rw z419oo4mfBHzQ3Z`l*+>tfyl}A^z-?)4@RADnUmBnch`+A9{g#kmDH)ZcSiSpa5dh#mtoHg=v(6mkNqbmx$-(Rqeoc#Dn-$D{OMsPj1Ww)NfIb z^qPe&*G1#Wky1)tzDH+2_nq{Oz3PFab`3^9 zmr`{$`D({zC}ETO@O!o(>);2wOo^p?1Kdc~(N6X&W_#+)EgY}Nv`9TKkB+l*1QH=g zt;_?XGh5U2I1!}m)SRxjor3l0ye}?$K0+5`MIT6g8YkqE6qlc0hImC-@ju|vg@T5j#&mt;?3t~Chvi+F5EpOya(cVFr?d36| zh~>z4Hv(AVi4}H3gV4_qQm@lk=}C6?BH|;4p|_s}3K6%t2&ViIq}(F9W(j;hg{{hT ze%+g3zWcULCu6iNu79Ge%8hI9WxnBlRl|>JV%?QUayoCfKkL?!+bz=aB%FbP?-1_0 zCkIy{-SpELRle2OWmSAw+JE$c=+2naT9-JMpRA^mc1?~0-?#3qvgdl1o7;81-}+o( zGQBYE#oM-!PSlD0?{lM?Z<4YHqDv682-a#F{~INtZqJAjnddjYYVH^xf1E%)%!$9D znYlZrPuC1tW)fCIw`&xXXEma}ark2dClbFVoO9VdR?G*iD-eARtcLs3=L^u! z_Ym=#b@#TOb&XR_@N*$Yie?j)*zCkP4Ia8D1+uO+ky$L<>2_EB-O~1CP4G>(+3`tL zB4T7DW7hBHO-=Bh98iCGiQRsB{YAr69Cb0xW~ak_gS0QI*mZ=GPduu{X;e+}4epce zQ7fCA4x929vf!10HAvtrhCVbD2+}?ybilj1@wYUmf5yJHHo>)t%Kou9CRbuc@l}=G z9+Ms_dpX9Ki-Wod?W`T`X#6QJ)3dIR3YeIh3UC*z9_gdd&lfEpZQpv+fP*BcvS^#X zbP+p^=vcIW;7!`2)P%4|I}y9}d-s&c(_g?Mh~2>J&*m0{(Gep{_{DO0Eku&dpM7G| zEgL4;UsJD{+kWwF;m1pV(#SLTqz#jtr2ZDw`?o@MF>P;7t`BDnn_SOKaL#{)JqmXesaKr8gMPrHnmG&hyg?Xnmj;{>%bY=h!-Y{vctF={n6 z%g&bTdLe-}bfTQq?_ev2keB8&Lg{`xS(*30~#foiD# zN7Z)+Qu&8(=XBIDs)OvZx9pWNj$H~NTPQQKGZW{OWRHxHA|fk$mT|sOHVKh=WR)n{ z3cdH^`~AJ|@BN?W`8=O--`92B*Zn#DJOnnGwm7Tls8C!siq&jdSYL7zS$s|UtQp6v zT)r@u=tqnrb%jDZ_vJrspcL+YNVB)}v z5ASXzUZ;bBh%=BWS~mLLm^Rh@r(BeAd+vvW{P?Gzs{#~#R|&(%CI6``a8>?PCs$nj zXgs6**@GDx(UC#*w|#F~{;5FT`~OeCTNGn2rhVLVKQwOUkT?2SK&3v*n{EADx=~M< zzfE!%8#_95#VIbnF}@o4&|1j$;Vd<>hiOSONnJ1+?1geX+tZSV1&OftKICvb3&`nr zmfFG8Y1;hk7aeX@OZk)S&cjH?yVAaST|dm2|iHaR#NKQrf!55+(0E3AxbV@b@K ziBCk$kq3AUe4$i07{YrFTw5R0%2_ytSPa95#4EZ&zmS{&>EdYZr;EzX1hvclGrrb%BQ0(eH&mybI4Pr)3EV@(gARv5je_JOsP290OT_K`|CNjJlL?%}ZvnphMLO z#?WeFoK+E>!)E#B^{6{YQgt9dOTmZ)LC0NaPe0`qSsJQ+kXm$%ylgLK7pN%j|l;K0%+eg?74~RxS~zI~W`Vf7HDik*LAz`EZq3|Ff7@ zo;(r>8&93`!!Npk6Qu(d=H=ZMd=%HGFYk2#QHPl%7U5smq0Z3w7y!T03-6*UQ$ z93Qt2dQCC)y`4rjycxVab!bu@UM_f2!0@z^Tz*@_1&k@h?i{XY>HH_x>a-wqG*)13 z&1WDytoKTMK)rlL0DVrdWUGz@Od2iOKJA+qoD_%2(2`c*0l5MG$ODBPk~C*ZsZHS7|qPk-l3lpY;59KcQsNC7CJ*1a;NuLY&tg{ z#{d-HiZ}4487-oNxYmkyeqrU9Cj*RO(DmgiCes=)BSQWIz52!#zbmZZ+=LS2Hq?H zZ>F<|IR|exCjW8uQpX~%{Y8vA-Pa}7B=(=T!fPTF(9v|M%FkliXIK>PlZl;gykM*& z+HDhY@&T;eSx6V`jy0^+uz$`_TsB@avXp%(eJR(lm<|F~4g{HtLY=XsE1^S(G4`E$mmSccwU) z`Pk~MF0m=M$b;`cdDc(pw6dmb5W~GsiQ(2>4i_1;^De;u6PaQ2oy~cg4;jzS%tlwI z$&z?w^-S%(#;6c=k*P^3al4<|2ItX|JS3wj@yB|3GyH1fCv(uBN{eYfT_YJFAuk(n zAl<|ROJp0V+a$6qC$>0cV(BKMF+r2#!eYxVY#k(eR2WsUL0ka+;v_pQikUXm{70hj zLdCXfSe+mMjZ=I?YrBUsOhXzFc&W~0kRE%#l<>5hpL=Tf<*>!~+{}Wz83Q(h(#xpF zz4J%?M-v1&R7rn?3tPL&GBJ#jNZ30UTz3S?g8JxnCwPT1>geK|xxSyZ=B|9~WHV?; zQ>#X7F)}3+%h;7ryh+m?-?Y<(s;7~EwV?R$%eH7fOA^CjBS)Zr$=FFbUr=UIIZ~*! z)o9G$y<}U&_gvD9q0UJYx69Ua!=auzT4cW_H`#!6o%4vT*ddhGAwoWC*C@7Al@CII zhbo@;Mm_BfiiW#&m-crqY877_3B!A$hDB{!A9%*N<=~4V6})D$DOrcY3prrrJ_F|C z|Ko}~1`Cphg2VA>G~$8mtJ6aj_la?X$md|9F<<5Td)chTAN?bYuHf)k`$9=EyC%g? zwSF?<5K5MwMAx5pOsyK0;16VO$y&Wli+`GA{FiOsES(#?IYnt8h5VPT2)9TDQ8(c_ zDtBL@Uc+ZU{vc!MLf~vkhJj$zY3i=@=u7xzD0ebcV9#=T{7nfIfQMeN2`bkTO=re* zPV=HZ)77c?s2ZP~wTIM-5Sft+aEMrCju^o)xP$pB$O*urLd!n?fe88j%)c?L>$Sch zx`MaN%`Dg7EA6VB#eNd2P?pK#fnlBlUV{sbWiLS?Tj(gHf=ls5{MBPVs{v zW?OY)_%P}pRA5zK?Se#scNs-#rI^Pn^U=E*uVpVH2k9FUMT5z)$|y`IX13tT&_V9T zP`az4gSXtGg2qQ!AGNWfFEiHJ2IOw99~Gb=W2KnA<29$dpEs7TkxvN+;1V3BTOEd? z@I?ls$ogE#>TyRHc)=3Y`f=(GZUA73dns8*d}&inza$GUNRGDYx~g8&M3-C*8RmZ+ zs3qx-gcK$uBoShC+aVcTBNi#q{50)#%KUn6)|p_v?DnX|bdu{~u)?xk^gkv6s9!QP0PT)yb-Kzw&J4GuD+Z0K+6)=5&J) zwAM{)JHzkch_Ng#>1R%kp7+so(Z2-n>EWT%%HOWcWk8BJO9=9T+Z-f+V9u3HOPWq+ zHhva}xptWOOhPucteN=N(<)|9_~Nk`#`d0$1f8P77{5bsu)6_`wvE^DdjXE1t#i9x zKUlV^6^{|SPoDnf=*R|PO+{Fw3ARUavaUc_QDXh#~- z!LcCjk0f&BY+m)II|F-@&eXz!bB_F()01{J+(CSsbW=qkp#9Tj!AE9BJ1X)G>co9i z(U8v@JKCJVr743!EE(EB1lOils3Fe|(O#P)K2umu#GxuKhef0%`R;A4#`S#rao9oP zTpnw*N^)A#{&PHJntNvps`G_Wm>1fxC9YodVgvQz1yQ7M;ALy(Li=mOZ|hV@aBKcN zma%_NfQ;-YMTXH+F=2j2`TX}+=?OH1cZrm9;{}$@+*}O+1?doC^gKBN5>_^4Qo->| z)~V5O6oqpvkAp5 zQ!`%6juK~sdn*5%R@9l0V&{y$Y^~6Ion&CMuybes@hBvCT&X6}LmDMY2Y9V>XziQ0 zpIIdfYwvnzQ2njrIm6M7hcKteTF~1=wx;*#;An+kruLsL4A}LbHisentUmWW=9UWa z3c+Rmu{K9$$I3C+V3V-B!;wPMZ1KWRQnHYc0E-}wodmVu52SeO^&o{p(ten(9z+ zOaeMS+@|_=LIrRR0fjtc?k}8;ishW&&-lCrZXS_w{#%6jnw#GJw7r$X1Aw_@X(UoF4{!?4Ia5dcIpvZN$9kt9vYt5MyovZIe{^Uqtb^D} zyG|7I#!Q6+FJJ@=!bmXpAHxB(BFM|^c>S|}W9!HTJ6qbXr{D^4aY|yk$GgXH5ya=L~YV1mKZ@6=jll& z4y4ioHjiJeSU;yt!=aqJ(aq=^D24hWmC?W>wm(`l(U(u!xUth&JWZ{}i-~D{DzzIt zVR9J$4U!4FUp0UKL97oLhCU3MiXSe&A?1tSYRXWQq8qJw7w;Q@Zg17dx7C=Zec=ul}L*IskbSRJ7P z7MY;D?0&5Giwp`QKnO9+d|Fw^B-rr|Pb|Z;k=1&j{_=+@ZZrP>wBjULHPgfK>U-xu z={vQbgQJ^r?z#iJ=uxyPxxykVCpWK}j{ieEgU#THVKSI~6_|awDlQ3-Bm@av3uUxz zg49;(m?fog-oM**id>o02owmkO3Sc%FQl1geYBNbuxxjo%S=9vUX|Ov9gqw^q@I}< zpR!9Jj~hZB?{DgJ%1qVFb8pa#*#umV%g0Y8wm3QpQ2fMVXhy0c>w{Hs{t&8TLE~-n z)X{tZih}k*_AY330TS0jUK2FRd0y$Kv?XDMCj21g4rx4VKQ;@a^dykeQO*_(M8R3o}+ko>b<%ib$+-!31bN*Y8q3; zh;sr7%z&g3=RF#k&V|QO6Ce;>(vk2hw#70xek*0K>X^)#&}79b06BBJW-iFP-+-jk zLj(?jRMDPSM;amPloc1k$$;6k$-8Rkz(=TNeR#qRB5~P>cFi}`eKoaJ5(LiF;M3TU z7dRjNXX9DtdLUR~Gm!w9ZE}Jap(DD;EZh{)yi0@QP_=P&eUQWm5egL+J%efvsnt+& zUebWUzC|}RH)vC?7jMIP4ZmdLT(4J>YQZwxv;^OX8M%mW-t=`owg`d%1r>U5_au_L z_1=1fo%(QIoacnQyEu>eTXl|L!T?CE$>wjL!hc0jd_8U`))GvsX6p{D>NSLKGk25_ z49S+J&dlexxbizw^u*nzs5|MiU7wWvj$XWt;o&Uvq*t~|w3)%8?Xt}k*GDVANzWnl z#u3>C#hckR@O?a2^7wcf^%dteR5{XdNc3t->{sf))h=aWV{Fuv{-UN834}J(sPBU> zz6PI2+>0i}tHJlTKp3)P$^elR^v&-nM%oE+api*%ufJbb4gpc@+|hCRll>g=C}6fG zmvoOSn;YUf1gYusg;v`~&G|Its&*(?$+Rd}qB;_KG;<+~m1?d{kW*LBYln+|_^8OC z9FQaNThoh1+%(GqXvWsvJ*x9FRl(=<1hOD_3^@_o0rKI_4_zVG!jfN=?Y)|C+#db3gbkk*k==< z!p4Ne@?{{r<#*}yRUj@f)_H3rZ<}h%WIWj7CubY85}Vjl^M`<$N_5ct6B@C?Kq#m9 z8gOJoz3(8J|A1*6X?&0mNG=oudA!r!TU!kZIm9yJ^AxL{jmi1{iRPlu=;J@j1cM+3 zN@ifwKP)nzr4XJ`RtdYo*X{Ho$b~eh?A4&12QKlPkNk|8%$SauW2hDs``}i4 z=4S2j<~fj1&kC+A2iQyIc0W7{F!CZoC%nuHftk8RbvkbB1?+8nDW$>?vNY>C}q-1K9zX>J=AV7<2{6s|%+ zCyac4aJGwlAnM`Nsu+qB3;=!|Q_9XKkdZ;5>57G@%)4lHk~xlJ0`)kLOCYSIkoRT^Nn*YOZL5 z)bzPK|9~f!J%@qI-a1(b1LsC{>4_!Hy=065BmjZTMCn#yqt=`IUj9Ih%@NFM@O93G z4YG0?gQ-;Y1qMB#_S@x24sAyr zIO5WhuFs3E5YBp>yjd`p3js3z^Z_q8C9{e|aLkAXB&j!5=ndEe7{W2(dRqp9>U&SZ zB!M%)|BfOHyo4|7apL}`rAK9!fPQg#_QtW!ne$A`bUk4TmE;W3TV2D0ULWD$l|97oi>k>LD~{eX=deNIX`VG%YaIR< z`F|4oSee7sj^n7lrXikVPY?U%Q~KbD#wcaemYMPzXRMHI12G!dK$4bNO!M(WIO1{S z7(b`a2b7dS`(Z7i1ZwJnC#*s#t8SlY*RyiR^vs?{VcHi3A5<*e=1YVyOmqXYPCo64 z=DMX%X9kagp0mv)S<*qlM0SVDQt9Q@yri9_YZsj#%aBcc8~7xZTD>aBMD~Vqyz%w> z5~Ca|43mh*>)*QGLk*HS$1rTL(V*6+w>gGqfs=TVtn|0~@21yMnn+N;rq#S_i3IpV zge!#7A_}_=^$KJPUJ_4t3BP;!P#OsWpcI1A%X#Qa9sn{u1&62mrVEz_@e^uiQFr9E z{jPtx54U}}*F*C_4Y6t6gAoW1&Xf-brBW+jec*=~(X*MTpcEWbdx9_n90PiLy#sYl z12)DBq7PM9Zr%u8J>e8OiKsTVh)8C241h~M#Knp;2V*GQv1PAt0R*k4+Hig48dnaX zFnkxHJq8@6V?*g;{}tPX5v?jttjZVaN$dhcXq?9_0boC_^v}=WksmL;>A$EdaScf=}&P2kLc8&_soH zB1QOVvNj6z>e}q5+}&m$%NG6XXiF$p#8I&%V6h$*INZd}>x&a)P?yxP*_syJX=_{V zmrZyG)G09A^jvogT%R!c95q~|HC9BqcFO!tVGIBbM5ZVZ(Q2geh#>YL6upm;US4kUO^x$2ujwgI=a)P(ff>Jj%slX=IbxFjA38Q$ck34QghT0Jx# zg@>B!GDf`yS;-z!U1gl>@EeMhMqc0GTXxNt4Gi7VtgGD&+r6Ld4UAeZkX4qVviCI76*#{k1xLg>nAiwEa8NnXH5uSPU^ z>%f^J&YWM&u}?%6(V3H@-l!)^6C{zX1G{^u@$5WWj3Y8anDT9KtVK=yFsr38B6#-u@;vQ;2SL6|Ma`Eq( z{S-dc0dmN@XO*Vnz8!Ck15}5UBNR(AAeX*H%NF$io6n9N>!3T&lI_ZMf^3G|&FZZa z`-~XsP&81M!p>Av3aArG0^QJaXcRm~#!bF{t=Rh|JuCBhl{nFrw!K%YDu;9KYCgVk zVgS8^Moh(ciiJSM|$x6ao#pL znBrOb?ti{>5P}s)9T|lK$P-p*35F;rPbYraZ7}>90BYtuMX`mkVDO2Y{ce{4E=>+a zWSg`lS8~}j5MjL~{W7Nfj~>bf+qrJ#Dd+1|dbdw|=`gJ1yF0kJ)DO6?%HWnTPJ~6S zxbb#JGDgUsFF<+EJgMTvg2pn@eAs#^TWrT$N<(8sPPKkayjHL5XekYbq0Ulg@thLr zU&75KC+DKWu3Rj@4=VoO)ZK@_ufJsfeqafkKFDbQ!wD`J!yMYpodp!lg)T9^gw;E9-QXjJ zNivp7;g50RpbvG{!5sWPI^jcQ)&Y0x*8-+B;IJ0i@>`$;OOgsC<5gHm9{08c_zIr* zQogPFyUdA?jwGx|UdSo}$vyyXin@2fKLW&vo}!}G(qT`2gd;PcOnOBU2ej+!9sW_1 zg(?phu{K&CKLSOSUld>849UEsR*Ti^YG@4J1gXncVzjLxh5t)Gx;KBN1C%1Iv6yAR z;dOYE37awE7v?Z)^3b^dH53nne~G(&y}oGpR}@qeS1aY79Jmo$$b;+h%vZgJyuuM` zWpk1ca=u7Cuc-G1vMBNSFsl)fLMRRyu9q}y% zx!3+W&R_N?7)YQ5RxdAK;c4VE=xT>Z2<4zcMdY@~!rnB(iKzq)+|c|JO=c>;Sm}nhoNtVZ)sa| z1k&E@6b!TG<~n|`JjA6%R3*6#MA`p(p&Iqd4uQ9y0EPJbX?Q~M94`JE|BGB8$={>_3pVWoHPZ-BQuvN^&nY-S3DrdiF#EtkUopXCoM za)wW=kY zPtLhc_vCj!cJC3rIJP4%0g9g*qPJFY>vP0Q5MrF^BN{A!h}ct)Unf_5-DnkfWSeZJ z%pvDBGj^ovwf1TnqsZZpy@J^VMb^RN#O1YBOzZs2yA&O@Tx?kk z?4O#OltTCLd*luUa}S#(q8vMxrwl$9OoJh4r>7}p>7;G}c%7%%l}ofJ)HlQkdbcKnnQWNi)~CLN z1wax$$DU;4*(Qy}8lyKaCh)!VOoSq#xWPvq_=QVTq~H~$l1Ta<(7)eh9ktBv7v?qlGs+eW*-5WrU8WVK_K$?|RU{p0~ytvKacV^PcYtvWqQuG^ZTAw)8&RU2DEFg zmxOaabhR%f*?7^SEfL>o7fX@{lqZRgE|OO`YwMrW?UBF6krnb{*Hby?uH}cM(D)mK zQeCkWsG%7K8S5_GCOoVeGV@LPE+4YrXW~EG_02Wo)o+uNfwNPf3l#Zxg4c)@k!5Dy zKX(RJZIfu;0Y0x};(ceg%wVYS{EBza$2ToBrS<8mSxlW<(tBd0wR7jyr}OzDW6D31 z7^fnXxCG`2x72YI<5fUyR(B%W-oep(jtUYA4P1f5@D|;hhDcg7u|7;x?s}9#dBLp> z4a%EwOl4Sc{e$yw_$u^*Du*eFAmT&YcWV3Zp0&^FgFv2W#HvTwRj4;oT-89*=RG*j z6rV_-eIxZ8Gx`LrBcL7q+7D-*%nYFsSHYDQdoyM}<&*YNl?c8v#T_F5fniwUsq@FG zzrSl+JeRw40o#t3m01c=t-Y-b0MvH``F4*Px^t);3d^%Ort>jkOs;Y@tU@anbAHpT zsRSwCpyuVerAGegQ%YATe)z*ziZN}{{wp6zyJ`vT{S-a8OqLK^ao!B*xWLMM=jkpc zxXL6HlpA896%xL%*KmD$<i29{6!mB}?{E=Mu$HLfw87(WJ+J<;WjG>z`yOZ0 zonnjqef>N4v?6w!{{~!MJolSUDfTe7ZvNH;*)kbaYT*ZJh~ey_Xb4b39vx=9l0}{D z2+j%``M_N&KKhV~3{c3}(6vni9Im3&OXq}dslm7nssg-yx<$gEA+5Pr! zlx)Lte}p&!fsBHVRu29Wz<=#VEq8x>=|T;wQ4{_QqI0P?Bgk6$xXGQtZPA!+aX%Cc z-xk)s9q_^TDrp1vIv={O4xdO>SLT%VRTA~s$uLMWe{cKJ@;GdJg~fN!wTwzY1hp}E*O>SMCov`jL$0Ud)7^c>}Qb$hnrbMEuB1}Q6C z-1_HV0Sl?87tbG!?c0)?r}IS3pqu1{v)Cs3K(rzl@KrTQd7z9 ztCk%a@vE}fUrU^2H!C|U@)>j1iu6ooDEntgBGhpF{VndUj7c=^PrMOX9aT{ZX$y5C z{UZ+vJAhtpo%&XwswM|wCc++{^W5TuNE1Y zN^X?gAX19B6cQW0B4kPeLtmS;%FUH}@DMyf2m2rQcB(5-G^Fop88d`yf9rIuYc?(k z%~P`|vHwly_SkY(df9!`l}m%e08gWwz&Fcb^8)ugeS86=MWoLC3tZYV8_GAn2&Xk- z%SwqRFB!YOTzs(kibJL=l4yOrs`E`-;75Z^Z+DrFN$!Hv7n4%phcGQD5Z>0MJ5$+d z9HVNFZO?*;&)k|PfL=P|G4RDpP|T((?&+Ao6Q6;cy9N>}H)U&qxUSufTmi`26&B_dSxJZ()UY4J-Lhs`wk+RL| z&nUReHTtPwn?k!2yJ33zSvqXp4Yz`ByXp*dYxl?oXfK^dk2T&rw%PcaExRch2q(WeT^i1kH11 z0g_?jw7Y4Cw)_#mRRyK!TiM8o7BF&O>It~I}>b!PCFGHQaPMES1$;THvs5FOa{yfZ;S#6gml~)vU zVSh|HE>Gcz?pHTLW#Qgxk6E$m*B{1w=g?dThLPqxN|u8LbD&`04MzkzSSgN(mWKSp zlZ{kROgOUG;)b|+FG3>&;Ak3DoYWR9*qa zNf6&Woxg4V>=ug}#~lY*lWk%W4+V!%it(mmgU($AHrDqycE=dEq$8}{6bHWj-T9;c zZJE6r#NP!Q!cF=efF1@%wQxgFqS1%xq@@QdwIz?oLZr~IfNdrZ{lq-!q>!L-QFnHz zI%(_6r_GSxekdQ;cCLxpkPR2N=pS&y(f3^EMvgsFfti!=RQ03!`V&vR?d!)%cN+B% zYWE8j66%joWEd|d3)hX=UWwvoTeQ7t8GH8Es0SLgiTx0FVf&kSt(NUbK0{yQ1kc9P z!)8VOaYio(`Zv3~MUtvH1kdJz@v_0I=X3Y3RD-L!VfYxB&T`M0D>CShf>|LA{zHWa ziqY1$c9+qvb+D!zZcvP8gBA*E;) zZa_Q{l()=tO}t6R^|Iaami|6I{A%?_Ih)~IUS0GT+We=c9M(^JY1=)oGglc}^x=0e z`|OVJ?$w5{RbDd6f2f9b(-%I;u(N&fYy5n6I<%z`|E|9Hb{0$KNQqc)7<)-=QBoFS zd(~_#ERWiJ?HuoYT-VZ~!0?D=Y5j?ZT9d!C$^EL05l0OXMb5vzNlW&6=svF0hhql+MWnZKcPzeQN<6F|U; z?ax-@8Fc#V-*fe~Ph1({me@u6D?VQmvEe49DaPceABVVLxBlmKre%f#3l;zNT*J+G z{}Sx=6A!dZJEEe<2w1YcD@!MUV%>QAgbo$Bv)QZk2jYp_;}a}_v-UDmCnx}OM+OB{ zxH0Dhf7!U9{J(6k@h^yZMhb=TJWpvJ?oBru%a07Ys28Ina1AJvZjof>% zY*Q%^QG1=moTH#2B9Is8mocjOPVEB_XwJ^E3PU?qJv3_}zMCAXpn`Ko!Ii^JId{Wc z_Uh8}n{$SPx=uj`WP}XZIO5@)x#FyCJB;65Kc?xWNsPZPNHWSf zB`4n5|MB55%vm#F5&X4mK3W{QP?1R+|NF)v^-_5zma{kY0Lu;Y*y9A&m2~t8qpGz?^z?YH`Y0N+u4({H+^mb-< zvxrHi;sGu*J?BfrwQ-sn4cjXeW0=8w3SIXraRCX)Xgyq>j`+i97ul?Rmh?B;8Ieuk zAi!cVjaqG3YO^?qyTy9#E|5- zkl<#Kvt2Z$)>ji1*qyJx)6|kq(Gdzj>Tw0vWz?QXA05JR@S=i>E>dvws}P@|0_#M( zuw#|3OIL&&2Y(tyZG%$)CV^3P=J5VFV3fHO39hM!(*ff%p%EqJ`=}_WQKb3i1LO}_WiB*ZW$9QAZ$NC z6f|t2A*T&LStHNFtR&w;rQws20#U`i`>TWBV|)u~eN;HUADq0Rfx~8{N9oHliLgiF z1_l~S&nX{ySxXpL?8|3=S`&G}I!VA@U=j|4r_f|d4q#MKfx|Q!^ivKmF6FX9;eI~OJ~~~QHYd%X)x{-EovGS z=*C|Ag1s@YTjV)bD-11L)bRsRt%D#hv({knU&-?XvG&`1*K8l3w5d(x1&1$h0A4SC7g9tzu&Q84Z2!=8& zQW?K_WPTlEXHI%Q>Qx}LYWLc-(>omZ(tL6*O)hP$jk8BXMvBMd-MB%T(YufDn7{a5 zl{n$*J>_f~*py9B;V;pf&%e}Wlsr*ZVV=4=X{4hqegw9@txXPH#T|%W^y8|1x0`)e-~Z~18=HG2_QoJU+alw8VxCpN zP3tI;MOEGmPH{t@=)YrTyuJBns-ZaL;Iw|?72rp{ zs?HnYE75MIro9Kc`Y%I)fBRso6ZY~;Qr$I87dG4J6KW1X~k00*2 zD`;VBwL6b~;`6d2nL%TkmeQ8IPdYB>?BsNTZSoT-ew5F;hu=@;f{R?-KA)J=WWtoS zmEiARrl4@0)r0-IP|XY-0;*Ys;(8?(M5jzpE_Epsay9={ZHSL99E>?%Kan(*#n-~m zi4|RWiD>$2dj2)vCu<9vDU^+$D)XSf&@-{Gbow#EZ-rTjZK@G zCmEmh+(Dlqhq8gHSWfU6!U!I(L%#jqb|CDzIs7h;I43E=W!`&>*Gi0TYoKN4d>~}l z8)J0dw+T1rlsgys>sd)Hzx$Kti4cGN|8F?gOO2vwr8m@mG z74f2QFh25j1<*Vsy>)&uLy2(b)748>qJG5#{7cSql_lR=S2A(9|XzyG|&vrhzloHR*6+#|Tbwf)C1|tifH) zfU>deeTfz+Wn5qKuPov5F#wFp^=+@vqiN|scwhCeb9pN8MB;*&sbF8APRtLM4 zX0D#7QY8;!a|%R$TbBQIqT~0!+ zZ$nDDb?P|Xm{MBm-Dj5dF7`%}551_t3SkNamioToDO3T-eCf&+1s=D*%g0u2bhr-+ ziNFy*%Sr0$qEX~$p&)n=YMznpd}D=q@%?N)%pEd1b#6B$ga)K)zd}91wb<9}=Rx=K z1*qLoqMXYeUxx56c}Hi@w{4zydAK#Tl(F%Q1bAO`F^C$&hu*&ab^|L2w;r(ORy&G& z-Tz1agxidJ_&+_F9$y0!5{^0lCWB=fLF}Y=YEM+Faj4?ug}5%XV`#TkMKpHb_%Scj zslUC{LXHAsA1hrvOEA6qiRO!nlN)Qb>DlhmXSc3g$~32;WxOv&2vuwLH^w;_uL`)n z^*Dvc9YKc4A*;bDZcH#rq_Srg{urIyKh&mN{9~}tC<1n}hXD!cSebY1xGoxgfvhp; z&9jlTlY&_$50<{Z<1+}$a6-vDmnZaGR>Okv1$_%a7q02|B00og? zyK`zy0Vo*_w)*m#@djdq|5d`d>f3)xnTBqBd%^$4;Rp96Hd%F=?ZtI`hT3ycNJcn&un8G13xwP1`AQh!`nrgNN_jO(o?3JeX#WfqO7WoILYuWPyYH41 zqj}w>l{*u^21Y)!XS{Pk-=GvfePGy$HzI2~W}lq-mL}ZVhFkb!Yn(ml+0iF~(DPGC z8%5C3YiB?Wbl&xPVD1_g3ZnY;>ecDeEy;6|X8ir&z^=U+Z@f-f_ve`wcbL9?OT$*d8gJ$pUyGQ&Q}Ze=wx_e#w($4GU)v8D6bC{deC-l zi50d@AFp0FcXK=pxjXtd1>>|LkUL2FY|A(i@I`!RwvdIU7{s)zNG%&_ zq>-?*iZZ4BR8Pn3^6yLX_wQbALWL_@B2i=7tU-%ZM^x*6kg}4zvbt{^&|GqUdgd(A zl8k{{qINlAya@^MwXXThsO$xvMi&iUT8t?r(&A~NWLFi(i)D?b+m1}fgmm0|@cx|+ z!1J&D39sqyC#B+>$~}I%_(Ys;oz$f2F8S3ZHz-XGr>iUu#_$PUW#PvVpiHb0zCS_V>`Q0RhRwqzg+XS-@rh>`$W5(1VGZ$cB7!&gpz|h<=Ev#q-@{c#tHPiJ>lpO6})E z5ilK>E}pUOQ{T44FrZH4C!e@zOE+>iw70UHBHeHO+M?9wu2P*fmS^Fvg2WIF3t6gU z$!q`B*s9{b4XYGyZ?S!Q6jJbf>mb^Aw7m67GgWnzA69!o{Lyl4H8XQ1fLSonKelOF zT)f>2715Oz(8{xs7SyvViS%BeZy^GC4@)Tgf&{r+@vHbq)e}sxEaQC>E|*wkQ~4o> zB>QxIJtJ2-HxrfGpYTimUFC3F4AL5$;_qwEj`-jqUGK6f9%hBuR3QjqR(3;>l^}rn zeYCqLN;vx#Cl!GXN%&{+{(lyNte&AR{u8BD zGki(!uMN^=av7U1y4t?stj>o#7cmU@?g^d-z1AY-sy2nP4HuFY>*DNvw8Z`G8}R%3 zp0*;&Leb6GGl$r0M5U?HW-H@rwz~SJoVEASr*38tUln>cwYT-@&agbgyYdqM;w)s2 zCQDoc*(r`)JkJ`P)B5-I>pMMe-?nQ$8}qD&{K)R{u$~`>&&XIC)U32>gC?> za;+e>xMeov3IG{XTwHrYME}weYqC(cqhw8jpAU>?{N!#X-d9gn?2;kk@X^|#wvkRg zbT@6PG4vr0(n2E5rch{Iu$N9rnyucXj{E_N8@80V#gJU2<+byTxs>q_XS_4n{#LGe z(i$gl?z$UsmUm=@*?Bq~Td!LvFUMbVZas0340dfmB$<=EDS)gw=vRp0I)SzQVj4bp z{JZHL>(1V`h;b3{71+Kr_mh+8|7-~f;fWk}NJ@*;OKUOwQBe$8Y)Qn`Hs2#i1AmG5q30~w zOv0g4BFHww4lc8k>2UKeuS%E|I`cQy-_u z;ch#WN_rk?@xykm3O&?42~Hw=N$|fTj){13NCFjj-2~A1y7SjNcpa0`|2C$F1Py}4 zN8CgM8up^;KS=!s>)t3>JQQjUx?Jw-G?x3_aM32W+9|uXdFuK`RZ{lrZ98aKGAniS z2O|SVC7f)#PftDqRDf#}%)Pu*Go-<5ZitFovYAUVWNHw%B1wXk3E%|uK82tBYUtgo z@cL*;3WI4cdWoT147%K*HP2EqBI|hB-~~z z$9Z2}WYR@^^@1WAiq{0e?5b>I`MFGxQoHnW%ea!@+ga%nY@Nz0j8sBBr)7Ui!SU>u zAy?QOZ`&1`(Vd$@`serj;LZpI6k`u{dj^Z9`+M7WN|+a_0DG9z0>*SJ0gGGcO-3WPJ|LyU+~J`zDu%hUXdU z2|mg_qj73l^?f+L$>q1oVc~)3@Rd%x1AB|9g_|TDgsi!gze(1l zVQ-$X8qHW|6IkBO+E->gBNobZ2wE46lx~76kD5!CnMeH9I6pWzWCU)g>Zp^x$+6_- zKGY3U@BY|W%650da7Yv(RyHM>{<#1zW&xvJoQ&nBxnRnnb-J7ou|(@D^$m~Mm2U8R z|4AL3Z)LF{9d#+x5Zk+?%3+yRe+T!>2-Kh@{;c!CEa%g1jRlL?_ObMs$f<9{=mVB~ z0rW3({j-+?%;$j>*QorqJ(`;NB|7*Iw7iav`z`T-*KST+WV)Krm1pvle1mc=p?+#;V6qMU|+X^>^Eey-C1g zC*J)TQE_1~l~J0ejrU4vire2gHxNl5BV7{EYum70Ij++bou^7x^@9%nph^aWe z*;4l;>nwsxHFci1TIA~>KFJhO3O%^}XviH-f#fiP-!B>>l z=xJj2E)fcQc`hkeEhb+5i}LM09z`{>>$TGT10H^Yw7_FhF+y}P{ zaguy=9~9htPre?PDmIpik(yaK>2!(EHJ>EOo-ShXo-H}>_MOJ{f?lsjd*ru;C;pf} z-4oV6=E}XYH%g3l#k?$r*QjcHbG$dzZK@0>D0ESIss zg-j2(>>)Q6Ae~w3Jli}An1;YIHQ&>Vw~-+jlfN%w&7Crmy9^*c-W&p`!S{~O5a#O2 zYb*;R6%Z|1{0wqag8V4786P~8^qYK6vOJXL5mfI!^(z-^{>g9hdenmcad#uN#rd*N zr6AIWeKL@FBe6#|>(&-xUTu=)gJ{lt+Hp@~JtZapisAFiLw3 zT!_PDN!Si73EzsRZQNIR+Y7ybj$T+mRPI4~dd+C^&#bq{RMVRYK2fFdD1y`O=4$q( z;sLSelF*C6Ovk9#S19gK?yfHL6?|tZbDYbkM=>+h=_oQ-ymxd68+vLue&6{)zDnMv zr|$g+L+kq@K9ab@VFfFnv`O>t=xN&*UKS>w*un4!5&!0?zZ+pEoP`B?Ts{6O+_0;A z;`N|;t#UwPKa?j~pd-?Ged6Gt>yC%TIwCE}>0oU0 z$UJO&Yz-=uWpK>syM=%u|!pDvUb&ft5a9*Zw@G$2uK2&~K+xbsE5_+M433PBIe z2YO9D#usIaaGzxvo%CDAHW;lSD24~wn6aZpR%TP1KYbeHbA0C_!o=^`*6Yysu0-*bJf3vF?Im zE#Nz0!cnu1uE_WFJ8-W>f*#8qOByPb7uKXNQimlgC6)Kc^-(BM#L1j{?2>63W9l`i zO!e43i|p+cf32;A>zqXFU=g)0U z?85A;Jv1NIvRq9&oY=k170NF>3=5l8VLPl%-ShlTbN0DysMC2WJ(RV6U$iGvjV11S zj*GX$Rcx;TM8LIr+$we!Hq)UfxYf$DrbuN+mceOh>G6V}RBc?$lkh_OD+jSf{}mp* zF6qR-=RQDPEC4N#9>co!<8CrV!^B9*x5%57ulvC&3cm;i8E-tA8;MqTWYqMyue4+S zRn9hwJePUU)Qkd~bDhgw!pBTFg21?|!napl;pwsuE#0ey>FrMtq*IhvCV*4WUAm7$ z`mm?D8nif62+1IEeEz0E!S?P49|cm`qPmkh*7ib{-`IsPmSU`(rrZ~Jw1!D!<>8k3 z90W^4;}Z2(Tcc>#lTAO58n5~@SG`&nB%3Bm0HI0`wm7~y-o>* z3nhj$*v#JKkKaD>r55|uFf%c(u_&D8iApuQ@Xo5hTdy@KHp=1?>_c_ z@c0vQ4C`LA?zOIU#d)5Wm8v_%>wGZ7HeMlEw%FVycjy~f(rhpihBN@Qqa5tQew!ZP za>*KBb+P~m-^NSz3`U$Sxr?M_WjP~=RB>K1lxj?uv*z%h3A+E8APMf=QD(Zv+O8WC{@=%nJ)MLtW~&jQ>cH{Ixdfy0l!$aH1Fq7X;00F%`I zB8xVgvF2el|Ei$-kDwSYolNYVCD4iy$R>QRM(5RlB=j*8Uj1W{#o6l)E>6Mw?hN++ z0o~!x1|)uG&{w3cI62Gvyaca>ey2zevK665O6k_tA%xr>w~}@ zUyGOZ`8PGu8;5Eq2@Qd;xhW1D_ZJZ-Dr%b104^z7^t1G+Obi((aP$N7CxQfGQ;0hW z{zl_k@p@dgAB*OtnyXa!*(kmxhT`pi82-r+nAI?Kf(-o(K}v{PmB!rAis9L!7n|JX z$O(NR0`(|dSVY+g6r;g;UFl)4SpSkR)!9CLNqoZ7L2S7MWP{wHwnm!tXCB?O#8HG7 zpTJp5}})XJKfpoO>i(Tb+ES zAKrk*cI)WU-P6MjY3YUuyq3-bm-n|ig@IxS7_dY^X|4lpO@swdk6+(gCJ~1U+it~( zH1wHMizg*>nV`Ak`ZQGC{Y9_f1) zm^7RSj)woW-Z?;c<@3fyiM|*0c>dYjPDMhR$_@aMkys?8un`V;O^=ulS6iooBTO;9 z0U++iA-xAlvlT@r$csFfOc+w>Z8c^G!dJT+*~wG7@7Z0=U${1i^gz94VBB z6)7WBiL4TS2*O570lAh=lW(^;f>cnoeO?1aK&}@^!@<0FaNHEVqWH91+%jY*;RMhv zUp{TVj}-F2!a6WdpHFzMKYszr?{``E+TOG(nxgOZbmyNi0C`m8#yXEeLtbHZQZ-9y z)Z7!u1LOqFin~~vbdu{q|0Io1)SuyV{#$h#DXfU7D$xRAgxrJR;-W?%iNI>&fbwoK ztv>^E&x*GOvl@+p_!G(AuiuAV{h~Bk0Beb??W4W7HZ%ZLTz+Mc;Y(L6SUQ#}f5C?i zdkv-1^t19>dm#2%UzxXRrxzO$*(XdY<7u#uw!sQnPhOB{;Sj%;C0Uh02{22j;4(! zui?kWk=#RvN%ZV>8UZM$Qxc&83?Hx;ckY6sl)%NE<-qAn^GfI|aeau$%s5TK^lc=% z(iG%5v&pp{ZM1LY`LDeLSJ)r4=5q)+@b{>6&>-_~0^rF;&YdHD0!W7*&VvLXbPNt* zC}1YLk8U2Oq{aBWBlEiG_Ktv>+w2%WOI z*AxROy1?+{lmZz@0ahoAC`~93=-^SmDWL%hYQ>9(_r~}6Gko&BfALApmGUDYmv)Nq z7fHqNSwNE&XTMZdB8j~H72^o&kHXP0K4EY-8uA-;0dPMuxG+uvE1FgQbU%_Mosmh% zPXIWWfseiK*42Q`1U#coW^zqOzTZcnI*v&5&2T6)7>g7YgEXi_-SZtBi|v23WnBoL z4wrpWxgT=*5sa11woqpMdnKs8fX_81Y7H(M!eg}g&va7(GGzV5 zxB0k7qh7Krf@{wCG9YYk#vJ9EiDdfH0re&gWE&@gtGy_Ri3F>4=Kv8!)i8Q>g9P2y z8iFi2>{6FUZ9k-l0CEdnM$3}&SX8{I=-t#tG)BpW6d#-AK%x;$5P1$sAfEgS6Cv`~ zMDN}!$og0NRAJ%Z*3@vCG~z-m{Qfh82-n3lOZs!-W( z#?J0oDef3YiqW8e>E*?Kp{*gor{Q)T1&T>PjfTFhO=->)bFZLh>xA(<7Gjh}3aImJ zQr)O8+0z@;0S|j3n0*CJp8d2V?obwKCSviBSgIpz1{bEgw9(P+cLyrzPy5RB#9Nnk-G+I6gJ9R&(G+GN@f2?$UcD?=w94>=7+b7Snr2OoGwk!^i zL}6^0nS50e2hf$Ad+-r~^&vT*7{K~4mQAejc$AP#3|!%HB-*2@SOuN=(H{{-Qo^%8 zMF{k@0pTcXJ1!Q}w-UfB9bde1j3J|yK^phiTCy`-Dq!51iJuM+GIIWG&In`wtOwUDVSQH51A77l~zm7{W=^ zb21kW#z0>Z=@1!y=9D+=PoZOk{&WP=#4AwwOf)7Cs8?Zy6Cou=02**L}ymicFK9K}? zFCeD?pNoF>Jt#63ye8w0Oo$Fo$bc{qNy%s}@^f}O*}}N#s-Kn_`WsJ$RepoM!B1NVMLM}kxeb8^y#bT0F0gvOvvNXe5GwSTelL6R@UVo;EbC{ges39oxEM>qJ7|^N0HP6i z#2gH+jtUs!DoTUyDFP2ii#KPIOJEOf@T`^XEVH4uNDD&eT!I#DWqk zjW1F%;M^wP%JAJ9d2mwLC*YFtF2Fw-+?Ebbn|ONffl_;LwU5{fcYK98kbqm3_VW7! zOZ^B3HK>+k5UFO62{jrYn}^QUO@5eD*@_fhfr8dn{8rfsWm`aphihi{h$}|~Xh4Ir zIau6i0}6KitR9alqb4Vw#O)vD3t*^22iLLPhMCEw+VQuoa z!gmA0CE;VHK}d@Ai@p#^ko`2?jp6WEZsyJFS)wy0huwTzIX}yRRr^{G(eM`j8qFf^ zazFkA@&tSsdj-7_1+wzE5Un28xKJ_SUvxlPguefqW~Rr+aS)OO>fZsG<*%Ju+6xi7 z2cx#h4Kv|%eGWt$J%{@;CV>+41^dezvZdvfU*#L<054#RbP16|(n2=4QwD@-AN&kq z*bA#n&ivv;Gr@3x10u?S7D%E-oSk|%H2D+!1c1LM%~Fwe5$&~#Xw>=3z#=wY|97GF zy3=f?zJhn3kZ42z3_@B}?$|&sWEN2r`UBWh`m?J$P8YBJlO9mefJ|uj z_M*ROrzPR8Ntu%774tI(6woJls(YC32p>Y+hw+I_*r67HE~tt5IKHIfRJkxaUD&g? z5V`LZ0BkTgcGmP2Ni@nUeX&J-OPEFIHmTO(erkv3Tl|BmTk{}^F@k*OPdwWRy|Kf{ zpBDDy?EO{Hz6YALS;nMj^bCgIvu!HVs;e^-MPA%UNEbI0Xaunc-#5Ze8PH8ETAu5V zo&wrT-OXyRWRU~nNzy5nnyY@1XFnvvWqqDR2d@-#nwa<{ZD-I~8V+PoAI z99xfq14)mIPt{!8W6JRrK-C3`h-P*`cT*sOztRZ`6lOm%zv&KMNg?X8R`9wTv@~y% zzZ9qAA}d!8##CkBm^m~M&VDg`&h-$gk1TatC4CTW0t}r}3X>}IU2y=~F|+|xT8a78 z5=i*Tql)!KjLSeFg@ZSGet*QaU4hh9RH|OPunW1?R@`s3&@DAu%eQb6Z~{r`Xo4rz z(o4CN>c40nJ^*fDKS%*z>a!z7B~W63E9AY5#o)4{`<^3ROW506N*v&<9?WqeWe@Wk zOSPxN`j&QI5uGs$AXB25Bp-*y#YH)#wz^FGvxIb;KbM2MOF_wvDGW;Zv z&*iuaG|e%&aQ*r=5r3}GbR|DkQ#E#D@2q39J8-4UHKV=+mE;(IGPr;dDtqNodg)Ac zy5MP#)VAC)m->o^_w*+GRjx|kGE$-5-BByU`DDGw2c#954p%!nJG@Jlfke{Y5{IOe zWCHEzI)G*XwO!M!YvP_IhoWw2HQ5?uj;fMtwXX{9)@!P|8lEAT0|z0hhcBPg#nYUQ z9LprsBNM~V`cx4L+&}1){L1R024W(u`B^BVKs>73fm)ZD~+14QG&a2Mgn4Z%t3KGt}mfkx%hU9*Pw{X~s!4V?J{RY%A12CTzmxC4v z_=Qbgd>EH!SG<6q-yZP=S1lA;_58yFX~kFo$Js{0anzWL7mmadwVN_~b9r2bs_#`E zcNCpEo=zy($g-Ib10mvAfhdCkA2at;?Mk32%wXdz(?MAG$!yq^O6 zk)fA^{i4dTGK}@FTqfx8;WJfVu|PYk40{5ySrZ2(&GozGHDiLsgdo|>wbA|tJ88il zAka&dk?OYgNVtrmtQZwqjBnwbe9%V5|o?uum_dqAO|9N z-=)(k%@q{J8GsNY*v5K#yekW1BGPkdz{ zSg!A=DZ`@J%&*V1r{h#~d2O(A;_z|^Z;%AllC(lt^7$#{byU>E(sr}K4rcg!ocH1Y zweLVf`m-w~>87Dh5H`5^{r+v=a?Xm^I`PZcSYx8&V!9R4ANn0tX@nU0Z6=0zvgHQL zWV&RS*aIEW?k4rX0_YX3u>wS;Riz*Cy7SG%G*%Fi>s7GnT-1kWCJuy~&SuUC3YSQg zn%=~p?p)M4IX^q}+4b7J_Pg0kJiDGXuwFNuKQhCuX;LDx$alr*T67Lf^9V$h&6~z- zixRdOvb0Fs_j#sf0Q&E2tX%8IqzU*4=wIa66!g7WEZIOKIaW*6SK=FF)ndc42rYXj zs)KqoQ);SMYlP!IYZV`esyX4;*i6icu@y4;W2WSIDeOKX^cIX5I`ew$9FwRXZN3-r z!%-@kz2G>|5)+KJeW^rrWu>J!a-Z}(O+fR!!IA6o%vx9xqLGi{U%-!@BxXFi;;i#YshWE3ik z?2z}}*s$^}hp`&3dGL2`s`Be9kQ1{E?B)VDzuxZFA}(HY8wO5eN_5X030a7b>$sCy#qRT*=6ytWW#C zkqBpTUAf3qEhuaYf=3 ze28GIm#<^mHty`N$R()rxyMAY+FzLn*HIC-<3qptix|4Rd)PlNf>(e2f*cD1?WHvy z?&AlYyZR;MdLHMv6EnFWFhss7rnNQr9=}3^_Tr2==8HTl&bt7sD((+Ygb=f*`mNlq z&&!05QrTe|+|FhptxZRlX8_~87W?hxyYFq6nwN0w{b0oFu;b100|1;TR4QOiIPSPz ze_gT|o3?}nAtc_*ebXu^iglxXYL8fH!>0iq#F72g(d}6f!yiy*1hlxe#{=?DO=CMZ zP=T{?>?o|$lB|2)lq4h+o4Vowi6hpwXi-|efG3Pr23^0Do71YNyY)$V?o34_OI-22 z`WME8Zlw!!L9~ycv3+)m$Iu4TUMOJ&-3V?kK~?XBu-RKgB%B`_-#YLCbMc(rs>b9!4!yI2mf3`~~Ur><4GX-+X$x_OW66(C~9or7X{=sFcUDrJg!{ zjIRRIyd8C_r8$i1(;XjSx_iSm^S8N$BhAplaX(oV_mM4g*%__D;TJbR~(=-@8m4%yg$SNC!nHk0^s*a}S@*|^z} zS%hxRin{#PH7QL!Ef*?`r*|>PW#l{&qA`-qbon}UX6v0AqC>d#>`s;Uz)$aY_~T{J zt)(aI9E@7}#=R8czTYQMd8yVh@9W)a_`Xhj%7&VM%q<7UGMwRl-J*2IBHtPf1G(eQ zZi|R;C~wdd+xjWFUn*0hfxcW!>08_qIao@yFKw)XRM102O`!S5LnNyYX1{rXx&1mD zs0cy*>GXN59mtVA?dv>8?NwY%a!uuX9MSBW%?7QxD_Qzp9A^lBZ1@$*<$4HzGF%Qk z{!lfRMaXe&Nja{G(8?4XyhQpk*?BgDoC{6n+11eFm^vZa?UiNl{x%?~`}lH#vGUWb z3ET~3pnR_H$Q-$XuDzM}T;-H6&ym0I$Q1 zcOiS6)c$51bf@cRQHfNx(>cY}Ik{Y4yV7%hYp!(QVZ5^V~X% zn4+p3QM;JrY#S&8p)w-z<1cgxLi1fp4$GmkyhwD9!@DuZ1#~*bUo3w}r49vdQsszJXqC>`x5xzCdj1h!|~{+)->t zgt+j8FPcqI%|jS)m@47DymY>usf`_a4Xi7O&)bq0@*K-p5N_fsR$r1g8(ZC1od=zb zRX^zQek9Lb zLruX&!EGf*Y#tB)j6afN9`BQ}n@r?<{VrHnY)WPbtZ@X;XN=QrE}Iin9HWn#;G~u14(&~EV~A+1qs~rGk@8ZN_;_g? zu=lp1Y!E`V(Q4cCBS#o+ml-P6L?EIkoVgRSL)JU-7_c`w=(K7~W<>WcFV*)=biP4j6`gp_r`C#Q3@ zm%CXCS!_!wCM8d_=6KUy^YWoQsrO3yV2{X*DSl$=!VgiylitMfweXqz*z2Oo53Mh) zFQ>r%Eelb@0h<)^K`oSZuZ30vv0!TE%1%zfPbutjgFA`a7deoJF3(h4WWw}peRVBB z@{Q*~k?)jY)`}%7Yzw**Ub`+dxgg#_ftT!hzuqfZBGu{FHs+9DGcl{;46I3gq)7OD#=VFZx|QUz1yU<$f*w&?wA;HeNxyjV*P0D zTyFIA-YnNdc0IgM(Kr-{!VU_4{z0=xP>ta+Q6JeDEosM(w)7D6UR2{{CF>H}R|un{ zhN)$zr7b*J@-Y{dA(Z)oI(RZWJGrGG`z4k~W^wRi0QFXd^65kQ&kX%su)g+bX|F5V zA+BBB*|i@c&xi(-6z)Ozku5$y_Jp>52&zcliBmdgDBpkKUeoFv=>3@G!(%k>tNjWt zg3;fOAJyilJt4OA_%qLIkY3gZ|$Wm=rr?_bL;Ul)c98=bN#e9i-Qw0MMnT27*%^Yxy?v`KsOz|K5gxwaa* zyn*Pv*@k8>TQNNA=`?GB!yHo)Tp?MYiSEL)liWWZpQ#!S>-}Os${VH=&m6&Lw}I71 zd0JZ}&p$L`iN0k{j^;<2Jtz>$Sh=U$>1Os1y^nUpiJUoX zL(B5-fQ5V_>KyV4Ewg}|{XjolUp|}godfxL)RfOtTSlAEQRk@?xpXQByTkj7gi#%) zDyoX|`W5O{7bbyoDN7%nQ99A|Hi(5&QB=!UFC0 z_%U!hamI+lFh(!amyymo7L2E^OZQ9buRzUVq?=z~UpQk7)2nc9lNeXuBJUPnoV>DE zazjjJcV~ZolunG7^G=i^fcUzdY`+88u9s=R(VKHy-1#;cskfUw?je0DGQfu??7jLW zI%|O+O&yq}Yz&=3?eJNTEV{DeiO`Z?79y0DG}rEI=Vk3iHK6BtDDz%ol|t2%*P(IA zdc1A{f&@#UaRddlT-dPss?YOwv%cnKYg`ct{N-(s=4{2_8`3c0;M$)?Igbju2(`iZ z@Q#79M0Bhc6_K=y`I5xCu{^Q|dlA%?!BRHUGdNa!fJX^a|8%veed$0TPE$~OboYLT z)T7cbgiuos`?aNh^8s2yYKk6Qgfj(y^}Mm2lkH9+Gzr**p@;631x0Y1zEmuWD-x-F z=*ZU2J&WgNz{Io?ON+=+k58&y2AR$@U03Z52JZ^IK5gzkWc#^+mie{$$%v|zY*WM4 zQOMrS!q%<3u`-yV-TFVP) zBSamBjpjPU>*oDhTznQnaqdCl2$T`IuP5^GkKA}&0*>#>tC_pDT`a_-H@HkT+Vc?k zfxrH!G`F*DFrS4u93IeyA#Ifv^)ZU1$?xe1Q4`d`34>E>t*L@+&k33B+xxry*Hu*& z??nI3xAm&yLQjn4*;62M!m=}W+|c0n)LPi|VsPa(+yH%8lE#LvKEoxQm)I#z2DPFk zEs;7*M353pnLSz-m%8qvctI(3@Mo2wPcNUmA_P`VDKxQovL|8|S*Q zHH=a4_aW#w?;>Z05OGLKeFqviL+`aUz0kWnIug=KP17<)eVz>;`5^Ib6AGk^lG?ObVNSFKn$TzJwHV}y&GcWzT}P=k(|UC zeGt6vHrP<>;5RnCxOy-y^pmd5u%;U>RzdLO~> z6is^Y{aD`U z*;d+ArYJ_NInSpkrHer<$PdbF7bN6XgZT^>R=37;{VKk3UV5f5TsVg2JI;#Rwp z15c(C1k!-aE_7sP7n|{}WK&k)iz?Qbz!CX^6MUL5ER2eHINWr3|0niDLCr_|Qyw=K3xkUPZ;UBBG%WtA||P|^22 z#zC`~@W}AX;VY?y940lq-im!86bLuIH0sH$@9OtG>g)Nym(S0|c^6HZ8)x*2g%C^!rBZp>%iHYlPdb1+8ObBUEAU(L!ttU6fg<;Kot+)x*ly?`( z3j5k|h2sf-vHw`GCc~T{TQ?Jel`eC@4L6C^q+9lybE<9#T3b z;?3es>#LjUstBS&QDX*3?k8UbimSsPMfq(4mh1-N^HJ2oL_6hsSMoJsqaYn52jeR= zr0_jvCk{%BqT?Wrx5gM7S-hGr@|&XCnetR^G>gB2ccF)a=$kZe-U_mc8 zZ&7k*qO3NU7(}clu(0@E%!Y#%!g-?x3yCUg9S64}%5ZVdooV04m(xR(L=8m_)b{)k zKhE^M!&AI#Ib#T{+5yqWSvUXo>A9JPwvVrapMxS!Q`zT<0Rf!R1Aej*%C*#y%@le6 zTd)r3AV;VL&$8XmtFOfm3vYjHencWA>C0E>;|jci{t>|s;?l2ZL}|ETl}05GFPku-*b5zvevb?4kYC*g9 zn5s@ZyD4IS^$pp4tgo*C8dwZx*x7#jwn&Y)MT)1_%v!^!!OyCf3bvb+_pDA&1KLlH zgMv?;!BrS|5Hw$_vJCtemRZzW-7jdi5yyHC7u|jr_^hScH;e^R`yji}tl%)3$TKza z9IS&RdcO8EzCsCHOi>4cra-wBzKx0>HGPe}jRR>3xoeAR8#*eUk5C{AMACjk*Gu2R znw9ix9{%LNJ}u89Y9HACaijALM@q~#jP6$@#rFN=sN-NW-Sg%XL><`AD`s$ zja&7gP%UOA|FY{TwzY=-&oXN%&%=X+sLEiZTZ(8bN1kv6d~KX;|GLwS!)DI*v|N$n zgN5y!Pgza{A!*9lHsjP0X_|syF{1O}AD%W{brNDOHlV)zJ$Uv~g6&?x`=RE+op6hm z{uF*Eu?Z->!ste*RkBN~g|5MEx(qt%`>yu_kNRwbs!#4Y#AyIOBR_FyM+`mF=vqTx2V(8P6oB(!_LYn%Gxt$VMy%|;{Cpr#O0KAJ+8)t@i^b5kQ z$dLD3#@RD=AUuOpv&FBhQgDiRG!4k(D$?q@= zvx&2A|E;|YjOeZ6P_?aZ1^4wPnpACpgbE@u69z>K$Iz#cQmYp%l-RFGV8^y_7-Z$p z*5*t6^7Qv{Vtjq^yz%bZ*9;}f9qy}g+X)V^lTjd0G#**-g;WN2)MQ{{ouMX&Ec75qgFAqg$zj5Eca4(>T8Caem2w+(@={ z=(m1|#aA)U9|vkBXdkb?84y^OxX+BGPcGV`3r^l?irrV+P~Y<4sE8sLQkK zepz;|-6#E&KwNTOq=?@ANTLH|9h4GUG0B2$^Sl6{~pQzJ|q8Hl7H>? z-!t;>b@T7#{(t*ZDF6B-e~-rBW%<_w{Ohm&$36ddll;3${sw~oMmK*A#oy@c-&pcL mhT`8%0@4-#AAfuA2IbkU7S~TVG_tqAKN$(d7lmTRZ~qU)h7JY* literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/favicons/favicon-busy-1.ico b/server/jupyter_server/static/favicons/favicon-busy-1.ico new file mode 100644 index 0000000000000000000000000000000000000000..00ac191e771f9df4dc853aae6bcb85f89f94f794 GIT binary patch literal 61209 zcmeFa^^VWGy;+miU_ELG*T<5h!PSa z&7w%h(hcA9x)wjreZT*J=coGzU$Sz=oO8|`Gjq(GYnZOKDg_x684(c?g}R#ZH6kLU zKlF$6DEOCG<;$nQU&LNZ!d5S;-(lM)e!+7lt6OTZr{@P~-# zNER{C5%4!L^lw>+zrRKLXC3+feugeAE_!G{M1&?%SH7U{Njx`)bg1QnB z`IAfaQ7{oP0!d2Ejt>0$1G_AycZ!pS`hWjD5POz5n2iPV_qx9~Q0E6W>bwl2XZYV6 z&||7>|9CFpdgU`&cv|~g!I%Hu{`bYYo1Mr0B?olD6Egl?*Z6*7cu}{I8Gv=aT;+$zS0854Zm(O#T;c z{tNE^2F(9*lK)M~{>uRWOI-i@$bb3vfBE)*`8I@*|D{s@|E5ykS?x_5ISIDM$&ftd zKpt^SuJk~*@z?X{V(uL;?{$j+TZgIXFZ&KIs9`;SRvD(o`bVmb^_Nr|GajkN>fYaJp*_|VzZ0YUmQRAP4t5^m;LL;)VFUQ1E*gU=>@B`^Q^)9C9-nFA zUgfQ;9w(>nypfG-uc#0^c}A{K|M>Pm$9hPSQvF6>hl^7`8zpP|YgL9rt)M?3VQ!}W z6P=KKswRw|w-~EDFL-!PdmS0OY`YQarXqJqQ&Us#l42~qAM5^f20mp`fqsANm`-S> zl3!l_{N1~MAC?0+k*6B{F$C5o?+rFQ_{KZiE%fG}m7_7kjn{uhwEZ4ejbWu2-Q23b zeL1Vad%UICx{~v|yfljj0lRx{pvQ9CvD!L>kF6rHSDI)G>n-_f@BdJf4tU|S&f@m| zkFKQGcO~D0e0)pM!f^|KM;C3A{2PS&3H=oaAz*7U2Y<{`M>GvPjYSEDlpd%wDqAq? zQ7V>LTWo%8lDlY6r)z)Cp8PP6V5b82d3|{(3DZ*;W#sz-<`HRj`jU&q+%jW(aPy{8##RXGP8EB%kH07L>^Ru;>~s4**P&-t%S%|@ z44>skoju3&6!!oD)d7ZX{!022 z!YzXMSKm|`kC~TqN3NFfIJmyXcAT1(HQ4p)U&iRo-;q1izU2te{tB-8JP4t%6F)qIEE=_7u2F> z|MVEoB-4j$PO4Q)%kPSd8@P5m0uD-D@x@*Nuh&YI2=8^hL(Ju`?PNtoc$g&v=33_@ zSCQhSPFsdd4TFZc-4FP8KcAgA`rcRg&Bt$whbDF(QGZi^(%OY^7w!h8y&@66ZzzIt zNL)Ral8dEJ8RILG6VzHwIc=YjK*>i?rZ%p#}N7e8B@7lkdk`YCfj zP4>`^T}{zrlT|^Phmu}K9B8hu77AV&H+}NM^ue{=l!(;ZgxmnrkEHXriGP zTzS&*;L`@t8D%5k17r!|N^EVsZ6XTm!xsb!%gDLy6siA#~>GqY3U?fygoLaIxXiQSljc@VRCCMh%x55z_dI;y3-g z1K(d02m|-^m#!!Ppw?dpM_r}!(MjL1Oy>Qketxn6i|C15f8sd!Wj3{yp~;vbxN z?V{?iiF=b66sEW${Pbo?7-1WdIkWKMBu$!!Msf-hWxEz)Xl|#SBEKFAjI*xDh`EF* z_Ais}szGdVN?YCP7<_$fBHDKSWRZo>Qe)Xe5u>ss;ThCH4Kuxv!y&Y%a%JJWnLP}7 zV2iA#uR5RBy7hLfeW*gGDdR-dLf4q?cE{iC=I6`8>uEO=94$5(+x4W~b%jZ1>5Rs} zK~n@n&^LyZxoZBcE^YYT-;aa9WRq9J3meCmd~84~ue-?bjI}@rTEKA8%gerGvf`G6 zksxddM3@kA{#}V@LnH*lP$Yqr{#L0}G&8FE^$im3S(lD*C0?Hl`Y%TbOREB~@{0w% zNw5oUU8dhFi$LK5x)$UKZ0=Ubu8XSbk zkc#5p?TPFPgS`lSmc-dehK7N{ovw7rSm8?6o4WJ0lk^cyu9Zce-IlckoJNsv8pOIT z2W!L9c@LO?z~bZUqor_j$x_v``Fcc5+aR5u>8(RlpWp>HASIf2^xmjIk<(0Z91g3u}ep{{S$j|pbr`R(-yG8F3Vjjq#*yLUci z23-@;houk`04h-*BOxHqCCj#78Ll6cJ(xa(-@ArhvW;sbCkUGWguOP+KtL$cv5@N- z<4^A>>F%nZU-<>aG)w{so&UA=`abA=>@t|HT1D2Sbkl)c64NvLyP1X}&nnta!;eC< zn*+6ax(O0sz%>q=CUTmxp#OHgusyly{e4Smm$%|h$WFpKmBBJ-)XT&O%kWs&&vg)G zJ$eCWw6r)9MazA-SR{0nS_w7bDs`M;B300b1<8}hXjJXgGmL8sBHnkFlCU)oz}x%8 zg%n|7@NcmWqGtzGZIeatiy00*9quO<3HIj;40q{|zA}6hjvHgml|HuosgAFplG6#Z zX1gAIKEkI2FZe{6sxh#@SP(BYtz*SYIk{S+j(FlkhDj* zm;>os#>jbvNgejt*R=c!Ewk(8Dm(hQHj6x$(#O`4C?6f>9Go<|n_>JZ7YXj$Oal3< z>$YX{Ql&plo)HejmWzhdiWd$eG<7hrQ964%B)o#nV6=yiCjZ{DGlv)>rh8kikwQ>) z;$Gh0Ak7S#lH+0msmibz6I{C6IessS{1kIt9e*LD3RubOa&}0B;S~j<$4Go%Fe(Nk z2y!)0Pt7_~M99k}9*~y5%2olM!073CNHdlO_{`l(fq(c~KA&RNXJ=yj|Yu zPKdJK<-xv8^w*q&7nzrjw!b)8ubu68Cp>ktWh7RC#ktlnQ=|4YDIA2!L_nm$Bk#a3 zqUJ|wt(FH%6^ae0y5tq0btFHmu*`mT@VRDth+#~it=p~Z%t-jScB0B}-gC-%T8;pFF*(=3n|4Apd+DusC*8G~>FeF2~kS*mm!;c_0#mD8UuO()$W$9R*Q zdM%)^N=L^T(9$E+#RtE04`>k9&k7 z2zDwGI*@Kyc1aL&^#r7(Z9`C!|EWr9y9De?(pu;@hWk=8_X;l^Mo2V>kVhYDUw}U{ z34cMek6noQaV*|opg2AHCPBSPyx)}=JVyD#({{#f7wn7EvLf|9w5 zTkP0ap0^+b8PcblB`a~Eq=t^`6!OOlxV0jxHCwb^wDsOLjdU$>!dDJ3DL@;$PM%~R8pO7*F3Ifb3 z`Ez9KAu6V;mt&dBwfX(h_ujb7bvqRNN_JNe|2kTDdkMjR2vug&U^-eiQgFUnQ|lLn zv253=!a6JIyCm=i6JdM-&W+0{8LPs21-L%b_`KM$EaE6;2sKpkK*;^%dVEc-9bw=> z8rZ#?X`>FT+gLqJ^j!CL`~BN8PU$u|B2fKk+R&Jn*|US$gx1L30PJC_Eh>?KjJFX* zHSW?vtTQh!uva3aide}N=6QEooE}O~Ar#Z&HLC+5sj~PHb^YUtQblJgb*?Ty^_B~LKe+m{NA6JOe<#n>&b7W!A^4nK z_asKjnyY8EzwxE!C3tlw0AeW92y4Lxl8Dtcbntv5qZj^EqJYA$zreiGHpZj!Fhp#D z%(_KKq$~)II$C52&Py|k=LT#+glHr+O;QMU#(3qE#fwrvUW(ZSPWP+h9VslWCE|CT z({qXZ(qQxnlJ^59P3g1pVTF&YyKSKNJ_dOsUwdtMIP_pLA=lY~rSI`q<{UY6^)ZGP z@^tMdXZY`|l&8V(Rp!B&SnE&I9fu5s&djnC?ZZEO<~E0}6=uCR(X}Haj59Rr5OkQp z;_R*oP{S{|kmVks1(4_X^T!-_F>l)0NGGH3^A-|(gZ;ZN1f7#;azX$;izy9}YkVWZ z^x^_L8KyTe(REQJ+y8~yD|YDjS>J(81Z!u0hT%mE;bB|(Il=kKJL<=mai1oS2SqTt zH`7EDVmX=vLZ1}LC^!-7L$;ruB{LLedn_!pL`{0op2t>4fCdQ=({uB8MF`ny#)-b# zyYgjLm7k4BKk^ojy{#|yzN_PSGb|KV0-XxDSS-Iw7hc(QT-TgIa+mN~2E;p%Rm|}9 z_7GA7GIoHIf!y|-kgh%oLswBtixl(L zki2pij0{H4Av}sUf{Oaw2no@bTmT@7D=)4?022E+uv0ON+c>#7V_{8@SL`P(A%Zu7 ze94}R5KBlUjKF$7;6w=it$}Fp9l^dcv9P07yMhC*`+QjHdry~$ zIrJR67lARuyuWHBpw+w-(0%{oPsZAB`=g>W<0Og4?}yBauxS6yqwJu>HT$I*L8$be}JSIU9|BPG=q41iQtW&5y5p= zEPNCJPX!WZoZufGxVJNKmKjNEh2T<{9|JIO)Hl7yLfhNi$Oa89@F+tUhMHu)yO>Zd8bU@MPMTxq_uZHshwPOY9fR!(jIlNJndp_0&y8le%z+A zB>CHTRY)fC75S8cG?8fs3akC& z7LbGhkqW|A&$AvBgpq-0Qja1ws`>r`L6FS7ruF<#vJa&P_mA}xwvaj*AcU;}(`k6= z*db&)%j|JO3o+@+M=X~E(S5a@d2GW3Ea}kzIy8uDCuAKN^i{S^%64teH{MaRi%DVb zgZhNRcD;Sxl2GdsBf3Ele|6WBaQOtE>(avo*FV)qR^-C6QPiY4h}VZ^lH35I@-9U> zVW+gnJek+HgBg(%JjJ)J>Ug%u?5)U#K7toRgrNq#asscY%Z2&IxDhldvKDbCN;*9R z`%b;GZVZ9XaM^XYg743%h>Zm76W2Z2NWo2{B;Zo?XcOQfUdZo`96ms zcOEI3@q^Jo^z138dFAvltT@8s?dc9)&D=u^ZG}`-crTh@fqKZ+d6?ZfPiG_vWe%Qs zA-3*k;9ZJM1eR~S#UKp9Mb8D?SGS!e9&(=LTaP*9nA#Tm@ncU7Aa>N#6 z2Y*HeFFeThr&&N;gwq8X$d*sE-z3ltQnHh1dLInrVV2VhP>n1->3U4+s$%xCgijx@UmooLnHqI2O$8b;6Vubog;Q}qx} zu-ZQ$6h1ZSCm=dApt!|+0I<9bhVEUpuJj%Tg!%!K=wdkR6aOq6v+NPTu~9i;HlUJ{ zP{pnFKd$NCRqA8*g_$x;BPdDfM68*#Afs-hVk5galpU@f6!P)i+%4Lki@72=sTN{8*<033a)PgKdIeXnl#W|uj{ zt$Zntv1(@bgKN8;?J^}1{C2bq2&eaCD+qXJOY+%guyIW=a=nv-bX+~FJO>mfcR6*g zbP-CnSUGU@P>%8==w9|1L}eihl>&>RIUi-G8R5=j@a8NSg|U2sig2YF2C zxLFbuugExDJ=UwHK<0I^>^c|frBJcAx{K$2SA7ENBO#Nq0q~#l7*}Rzh3~upu2Sfs$SrQx%2Gju@ebl&AD23rPYwadB1ySM7Ak z6NGBkG{F!gfxgP`5(@vvXdKitq>u3Ce5_0gLLbuET%5^UE^@mQjuz}rz!u6@M7<%| zlQ~^us{LNsd8%h^{>?BV*dCL@#*38)pU5D;Hx{lo6$ z%<@x157;r>Nqos(rTIL8Fd174x+LlB9t9iRqAU;5k%%K^PGj?ik)KYcCKLSPGLV@5 zY(Gr7-(mp4`4VVi6((PzJB@isQfP6O=Ck);oFuh8glR89vK8;Va2d9t6tvH}y62>9 zHnICr#98uH_p*?|pG-d(-J(CKKzb?#{@Ln$8?HgQ+3^q<#yxLGa?*2R%18j)=4vGI zJwx*06to}g>Od9T-6D37D)>?E73D#Ly-f4jQq*b}vQ`gvh9{OaIDXvXhAfK~=t7%l zDG@UCt`k-MpInW;k3P}Ha`MDhq3soma+>xtbq?8-764Y!i$YE!@2ztyEQs#=`4~Z~H5@TcI#yXWf?qn|x@KSa&ur7Bu(R*NB+=8xp z2CJuLoHZr=F|=xCxM0XQ$ZPa$so@SBHvr3cwHLv#Gj)(2wh?A%6HqZ`(ssML%UNuHuK%v4f)M7S837Om4A64-Q_NfIeNf(*^2-FFM1 z?p-pktF7cr?sYTI)G#Snh!}t$^3`-OFVV{B>LH17NE@B8>3o+;1k=9f83ie4QShZu zdGySSo*oiOUa;7=R4)L-o)At?xVfbh9*Rh=!M-AZ9;v<{U@#d* zD!j`En&t>p^F7Fa2Yc*JLgKex&ywh}U5gv3W?hg*fFz=dqh|PxK2Zr|5k#1iz|C%b zd2temjToIm5YYAZXD#be1_BCPlAV1Cl_Hg+o!3LgyvEf?U{7~ z(SSbnY0q`QU&u|ViAMn3`9SN%KCK0C^&Wd4Y1EO zv!>T4?&n=9B@}gX16mbuoVImWl~$4fXn{xCy#kLHUo(S~`8&L>ZI>@WwMkq1B-BOV zN16J4u#~p#X%O~?Np?Czkiu70cSHV6?P-w3wIfl}WXik^etDnT&dDB?CdG8c=$J9c zQh^GO0`!p}9k7N(%kn>@s3fRV_kz+>&;y8^ToQBN)UBqj1(Fv)Ui4icdw80hFf6-P zmSEbW-@SsZAD)W?<59veWc$yA&bC5o(DeYO^q#f`)_#v3ZkxbQt#7|VQ*n{ z-a>YvNCv z9H8a#KL)C~K$DXKz0$P=Yhn-;^n^9rPdo9Q4KJoiH9Ev&w326NDM<-+@KebJb&QfF z#a?QbwDk;}ZJ0|gHy>^|$P4sdI@$|DR;NXF{6TrhX;CSdp(;TPK42r0+Ut*LQe_>w z>)4Pfe^+dG-_Rpb;j;=t95qxr^6Zv)wGj*XT)Dl`>r7+j$W;Y<~Li$Vt7ZHaqDYO(`@<_zG1pC3W&Fooc4LG=*62Zwxr1~Tq<=Bkx~>|pSG{k}>Jh##avQ;Vbx-YH=dZ_bvA zLZ%{N4xDXiqEHRWCful{eagD6Uc0$!PRHsf)^hC*H^`r#j2%r=7@M8m<5SvmXy|u z9K$~sb;|4zU{1@z+PV06&c5zMaxua^8@kIt5dIXnxz8=+I;Lz04TPb{N$Z|rpJb~x z1#BO%QiYyL`&K=a-^j;0a>{>Md95=1sX~5KC;5=)mfgv@<#O3=W%5`c0|`AIb^ZHC zdr7dwYczdjcR9;W&c|Q-`X+o$&tjCmJ|${1|I-P7pSk(rZUvwvfVB5`5OV9xWQ+U0PUPnggwyz@#xfW3$CuYRK(ius z-0N+n({ddnWG`rTQLsZ^EkO?;*u+L6(n>oO84#tY-J}jD3Y~?b4+`ddrnrOsT;Vah z2jnTi=p2LsOTieJA9=dqtORW<`*$&bEFo8GVE$fTu-HZ&?s1fd_A8yhur2hl?Jp1N z^zaP0SXD;*ZP)1Z{u$C{D#G3elQox*%r70~KL%eOzd5^bDQud|LlH4wAbc}Epkd>bJ?utdhi3Rrb;&e1k!?WGFoZ$ld0s{qeM##uyJ4 z>HS_EL@u43eT&MTC+CTmN+%OzPS5Lc7~ZR4X+5JD zkV=1xsXAf8!@pzCXla35}-V&*m-t8bcg%f-k zLA@Mt9bnY3*r{Vo_F46=fYRP@Zlg!Vf?z{@0aBZ~&#|Wi#W@1^MZ`h=kotTf6|xku zZ0vbPieS)>7k4g=13^2cp`z5DdhvZnUgoPY#?{8zjydw#cD#Gz!&f?`PzDqQ^dON> zM+#&K7yh2%$2t7&!P~wIc7A3g8Y^@1n|&ki&9jGEls2Xo@Hv@t0+kU?NNpN?P-DE> zTNvMug6c!!U#5%*ja*N;(L4}`&21P==JR^m?Z4q?^3|y#3abtc&Y*I( z`~gD=^p zszu+XTzMF_;JUVT1B+zhnU{q&O6*G3>Ng#W+)l-A-C7xST&a?;>|9^k=+*NL8&6qo?qpoz zD<32ScnI3>p{MEUY=0AG181P#%`tsKezy$u&&HzqxrT(1Ujco0f4{(y=8wFn4<#!F z+wR5s!DQP8{lN#_GuOrjf8zC5M4iFjN*H7Lw6i~%!5!B|Cx_-z6Bb|OG}#}L{h23x zO&*o=q78bK75+1)YZ`Sjq+=f?+FiVg3@NQ{8%b=tYeVQLM^WPoYEFDDgo)L7M8wg7 zCSc(ipfP|+K~h?~?fBv&KAlu|6Z3h!by!|Vs*jk zaH04LHS|QGy{feB<11JnSrXwBsx`}Nq#N-}GS;6)t#nv-7JB{kdI3FAi?)w0H7Tub zKlBx7T6K-#0Zfx5(@EdX<1%&Y{66j@O4`a@eDM7VeoWLk(_YW{Crg|0I%~H7y|m0) z$vFzakW2O@wuiDnHQL{<2#^H|(v?i};uN3K;*p9=`1WFJkEM)+>)wcdDYj9cYw`xv z$yc%@e)gx&MsBCHHNy-_9)uZA%;~WGf}t8f{#9*1{Gc~y4F+q#;4mdyK%P;y->Zq% zyv%$ib3;yiYJ|D0yX)WY0tjvT^&uL{il7inog5EH^=Ey|aI@$K*-+_C&a#S=^Lhm) zcXk5CKd{{VS?^de{!nCN&(6bhGuMY~z`M;k1g$DL*$?m_?XLY#xCHpeEYx=`3_0KX z#;23(Ql+RuGa^j74^^;^O{EvT-`SbmsZ)-=I~EyHA@Xvk>Fo)-yaqVm{Lt5K5S{`z zzH*xHVgxW)ty-BIxIK$L*7bypD0BRu2&EC)`qkgVBrz9PPJ^KYifWl1k1j&)J;MR) zmFoH=EQ~q^&<#!GiKphpQNKP#B4U*8izTp*v3|SNqa&2!Xv*B+t{qS{RaW@f?E6#5 zXa31GNT^~h-lhFEFME?n#&e7+i8Y+HdHHGP0>y^HogN+Len&03oU2bZnAW;aIvPR* z!!2MJVLYZ)K$&(gv8g;k2cf!JN(vlEk(1YU@%xIg~R*Qq{tQ{3=|6Y2!Tx(utq=I4%$;EsJ< ze#?eA7!k(uw`e5H!CCYVkmBoSTU$We2}X|&Qv>=%mu3kP*8UPv--#U^)ziB>tbGj8 ztWlE;6?U{wv#}anJefX;PvT@W!d^k401uWCtn(aVkpV^PBk6hZilTUYkwM*WHTvFN} z<^mp9OeQHXv?Tjy(^jiizpUsdZUo#kk@o#)s+2!o!*DkvX-{rDocx%>-7}M551_Ux z7}Z-Og4%CrVG`p~C7scfy{7{gv!0>tOMZ9F>j((P*u=a&c%z!3C1CZg7Y(|VMP3;8e!oQyBh{&SJ=)gCI?H^cef0ii zvz8xEBvQ>^>d(tO656(=XWr7Ak03uL3z`HfuL_q_p+}JF^8jz6PfCYu(~=j{Ws&XD zwtvQnLs#4K;5pY=lf{6!bJA4h-dz5r=lZ)P)Ojknc?XqCE}<>{uMdlw-HTu*osq#D z4$aIY8Uay?1r4{ORpo@}F{1&Vwwqhsq1R9zz0yi?^bM3&m&M328YE!g8_btQ8Gz~( z4rN#gkoDR#k3cR0YQda_i%I)TuoxSdrm;%4fve6*RuxYDe*C^OX%pM(ykqCdRts*7 z*%p5@++eyZO92)WM}wb!qW_)*w2LtVG>hg>Q2S;+)>N=avQ8j@*%J71zs((g|z03!d+tDirwE9fUQmbm($ETrrAFz7x20kWVek~?K{aJM@i(RAa(WSgMEEl)7^a*BjC zV8(JY;fb*LfMeUl_EJ#MDf_oVT(00i$jzG#gAU)Q`(aq;1HSc)=oj2%)aP~mMxXt> zsM{{{YgyOutipy1`7uhiD~ve0C#Tok$k@|#Xr>%Nd#iv=$-a&rmcddI$k1=~7WS?i zB%e!Vct?^I8i#>nV6)|@f*SlkcE9<0>Z!`!$rIPLzf`0k&F^>WN;_2Des!gn(1?l$ zcB#$z4lcd5dEmZg`reOz=m|9ogG|TQ1^8B5D`FI@hE0`nZAzNy{*KcKIMhOz4$#rNwh%n3L9;$>wGoh+yD?PV9`*z|Z_3INrPd`}`i7G_*LSANTkk?6xARX}pUbre zqtP4A5!2)2`YQL z@2)EG<$3y?!7!$aQ;V2=_j^6;2duUKui_wGqdaOg6v ztu$M#1QmHv^LkBv?e=LR?^t(nk85FsCSuM&fu5*E1lkEROFhX@wWMoXwIIXw zcbe%HAZxXL<3k{zk0y5<5WBmZlZmvU>#N?-1xMt_XvKf^*!^ja+RlGsWt>}W4cPrV zE8L&UN<^TJW-B1I#R*=hX7D%G$46Piz+^oh1Wt=Fw8D5onTvA*n*y05Ku9MUC^mcMbXR867Z^6}Y<8w5eL! z+81Wdwb(<18<;%|P7`4(n4pFa)cx~YoRTBZ4w6HY?Q6fG$`yr@f4_p>1mRtc|JOw5 z_LAu7weDRZx*`&n8|P_4YW#*p%_qu|f#%BT@#?xJI?!-3pyCfBy^#BHCW`=!M}WVB zSYPgbyIv9G4aZdWw0M@@BQW*t6##m}0lV7|B`sG$;jWx+{sGuR);%xHmekE4@(IlZ_!)Ei>jd|8{JuN)8%@z>wVJOzX2Z(i)@$3j?y9tYd; zgwMzm7^W!#?|q2= zb>1`r$_+q{0$6~X?lizM=uB|i^ju?Q{ta|=auI7Tdo?B1enM^ZVl%nb`Fw_4$`$uPzRhbPnELHaz;5%&tHr+RuZ~M#!a2QJGU>AM8YOX5f>X$Ty zAw(N2#hxk5JQ$%htA&<81_ThWx)BMMz|ZB{1g1j<9Da42Q*LpQ^EK2|*{VAej@HO#^F5#W9J znz&3hq&8oR7;57@VxqQXOr04ANuP1U)K_CzVSKL%T%Fzp`}r~5q7&NBw^(o(r%Bht z1R%1O2@d}xdu)%&QS&rMXeZn0wH16jbA?v?ve4m3)>S31_9xK5Miwhr&6@6g=x`Ms z+EQ9@6HLOX5gWe_Yl9GO0YbRGR^F<0Aw-A#*tfjQ8=h}7F5|p3cbWjw+VtN!53T1T z7*D6YZSV^^%%7Id^W~sffY&D5XV=(z^q-O}}ja<&0OR72v>% z%HH_7v(PLsMiwAM531K5>~O3tCak`~bf@&lZM5Xyz*7t*cNPYCvIT5%@^>I}# zSz_eD^MRgkLmGX65b`yBU)jytflGug9w3kl8nVK=p^=By>(HdFCrE7E6o&4Gk?yWv zl4~sVQ;DPF7;Z=!Uf?mj{6)Whz;n7WL)ZIj5?mDg0^@UGO3j=geE>zJ9O!0%6Pxz4 zwH#vCEhk5s#%L#^Q#9gce*$4pYyn?PwW_tRX`lA^xgCbIi)}siyyTE3FXks(3PfZ1N-qvaD@@!zqy2fgv+EBy*87L?_(b+uW7sn8 z14ZPYE}(^aRsjf*SW(Rv?ANC?w2tRqRtng*-27d%_*Lgo*-K7LU1)SjjG|qS&Sj$q zsh}CWUTsM|B|+l?+?zTbl>P1k^biYe!ys*2CT=<}S|k43NKODhvQrs+Bs505e&dBS zB4D-L>}Eu6F+JZ+8T;uQP3dC;nyovD@$Pm+U`IL47z2zJ z$B1CmcDL4XIkowTtcbLg9lx+sarBX&O`yQ{*E)VHVGt|KkoPGy^%y4j2+NqE;mNi5 zP*XdIkp4&2j&4UvI<;mD0a@Bh;)*7GZD;H88vp|>qPkLFlTg|`t)cP*$11Kj$*>RA z+6b+dou+>+2r!6U1;o^orDu47nP{CbdRHt5&Osj}oHwVDzJW{69Q0n+a3B-FPmT`I z`!9dQT&yt5PU92n9b_FTxl`sv;c5-;qs~hM${n^#hp>H%y6;)k`a6)4;UO5@@h__x z5~-k90}LYd$N`Dl9v#1Rwf#&~QcMU&q=9vumji09VVF>xNp&i*+Mp!I$df^**?QCa zlbT=I>qR3+_PnzF?j=O0O8hBe^*XmZtDWtC=V8is^`kX|jcsq5X~K>2fD~$A_>?b7 zJ|d0?qf8=ctFv4J384chJNebVk$W|L251bIDsUGn{waE~`&V3+(+r}6Ix{L>RbDy* z7Yh#nfRvZIL%JB(fPevN5>(%b|2(dVo9fIStJ@Hy92XQUcW4{+-^j<;Iqf)jT(s4= z2mtu}P@=*J;n?pIs24uQYd!(m1s!pa2Ck0ET>*zzBBCQ!$E8IS;IvNgp6WOMNg} zM9&W7+bV-IxBLx;21{J(bMN}zf_onAYtHoALi3G0X!cuRXCKE4f`)$=on1>~y+d86 zqPECc~TG3NmmA}0^Zs-xxoT(_$Q7T=lhG^LGiuYs`u<){< z{RU3>t$g0);1J#V4$YGMV;mGsNuqISB`L~%qS!h$fe|x&EIKGgp^Qtktuy*4N*?b5 z*b=3I;jA~+T0)2dVd0xl0q`+S=PrQaLSUlrs~G{o`~+pCgQM_xyF#K+Qr0W~EJmqJ zVQ$E$YGU$!<1TrvAgJS3?WS*7fMSxob`zX2%nv2er?i2NoQwlN;|o>qkxo)&-(>&y0*l+mrcjY7SnUHW1Z(k5~ll8fIBQxxH7gbbe9eNlm(7)Gl*N^RrI~_=*X!-Ois6KAq0`a)!p!{pSbZzP>MB z_{}Q=UcYW)JLT&b6za$znj+8L9y28PL&`Hw0}R`|EcO1c zK^3o$siSv(54_*e-Sl%)xc<%NKnu433Z%{xQ0k=&ES1ydN*XXb{982mvX82zd)a>O zkCzb*DQ&a2igmoQdn)|rKFp7`_+>048jnn#Uc8)pKQu|0Eg2pk0a~Ddni*nM-=IVw zk-!rSJC)1lxLxMOrx)xr2JA5E+zS^fSfQM3*;8$7W*tymT#>%u4M&s$;ptDpx4>fP zpzqT7(%=06It_HPolMciTqdpzgVPbl7w%MKc=P#i3*f_w=8l_`s5b~BU!MT&W-y4@ z`1}s#J3==adO!=x)eEu^Fgo+x4eB1at!a)`g$BIm^Ihm176>}?S%!43W8uK(fDsVW zN6eN(L1RY9eN&io%by5X#V^uLzM8`mWLjj<(Y{$xad;Bq6bWcfJ^A!i01>d>W;O;o zm#^L~#bNVF7^vXW5sGWk>!3U!!k7XO8t0^7f(&j33C=^vyI1b;)ykHat?%cuDYCG7 zJGvNOh^V-O>3TW3SUox)2#1C^Kg#>%DRZiZP>pc{x!bE7BxKJ84CEE55R)BM(MnRU z0H=8dzw7E$C%~`Wa{(_RHpIyjcn|h_J2u*CJB<;gq|bUsSx2lgs&BZz1?Oha^?xj0(b6{J7n3>gJ#R zoe`KvtOxPIwMoJVFGc`iOL;%YL1bCtsQ|Zm2D|I#mrlG1m80Cvf=c99@}UEO0gU@? z%D6vI9Tl!xzRVQ{o%N}(H+3+3@C@MUxDZDw6lLxm<`NV7{I&_(6-qn3w7j!d>S$Zf z?t6?(u484oc>MZIN(SpfYW6gqxbs1?jn$0!BKHNoml$C7o+`L!o$an`B#)%QFbmWn z!+#JxIWI04OJPk?pVGFnGTN|V1yWdU*B4N0Ub1tg@2?!pkR1G{QlZFq53%8RlBz&h zWhX&g!*7ty*U8p%clr_-&I@2gEVcs_5K@@=b}@?#BUN}3MeFBuo%yCA>~~O0oU7an zI!NqBM<5jzu*@+JjL4uy28zwkRTcCC-$f%p;(KzRUy$9HRG;hn@CcK$g`9aC$2wdB z9R+A4K|PoQ6fI`>w|U_9NAp+J=+^RjF_?JZzC~~4N1_-zEVB7g(J#OL>r(n2G^RQI zh-Ha{7YYx`pxs4#fhHJi_7DPmg94?fQ7@ZE?2EJ{bUtpS#^S8TZS%*J8FZev4xZ?q z**Aj3=ZcT3SAf5cjOOoa6M|{Y_FZx&XN^Rx>;c z4F1l2*-3R}9DV;0(!?ltUYY_i02{i@078C^YiOdV)pT*;gf0R~MPyuqq(a^-HQw4V zr)Dq)eHqbzqaHIMwoqRD+7U7%s~FAxn=Z7ltfd0t3d6 z$w9w39XcMbM*_HG^kzWA+Q**j=CpbF*rJ1#MAz}W=9-^$=a=K3vMLwL_^@N()O#5U znVF{WamrVyTkU&=L?`77I<9`cFUGht7mRb00Z~(c$-^&`8Yn>4kMJF77gs?17XBW0IiQlyi)Z-Q8PqU6|By{%QNYVLLtCbv@1f#UutuW>m zlj>fHI8)Q3hWkx5PYQZW#VWRLsrtx;CDlm%Yr>$2y9daU_!Xo*D>1?zgzQE@s&4QP zm6e$Ll`TpRw zgU$zcBV|hho-7*KmUT$bxx7L#x3Rw28`)`t+m9h&K5@|*>hdoyfet;%qaVz-2o0rw z419oo4mfBHzQ3Z`l*+>tfyl}A^z-?)4@RADnUmBnch`+A9{g#kmDH)ZcSiSpa5dh#mtoHg=v(6mkNqbmx$-(Rqeoc#Dn-$D{OMsPj1Ww)NfIb z^qPe&*G1#Wky1)tzDH+2_nq{Oz3PFab`3^9 zmr`{$`D({zC}ETO@O!o(>);2wOo^p?1Kdc~(N6X&W_#+)EgY}Nv`9TKkB+l*1QH=g zt;_?XGh5U2I1!}m)SRxjor3l0ye}?$K0+5`MIT6g8YkqE6qlc0hImC-@ju|vg@T5j#&mt;?3t~Chvi+F5EpOya(cVFr?d36| zh~>z4Hv(AVi4}H3gV4_qQm@lk=}C6?BH|;4p|_s}3K6%t2&ViIq}(F9W(j;hg{{hT ze%+g3zWcULCu6iNu79Ge%8hI9WxnBlRl|>JV%?QUayoCfKkL?!+bz=aB%FbP?-1_0 zCkIy{-SpELRle2OWmSAw+JE$c=+2naT9-JMpRA^mc1?~0-?#3qvgdl1o7;81-}+o( zGQBYE#oM-!PSlD0?{lM?Z<4YHqDv682-a#F{~INtZqJAjnddjYYVH^xf1E%)%!$9D znYlZrPuC1tW)fCIw`&xXXEma}ark2dClbFVoO9VdR?G*iD-eARtcLs3=L^u! z_Ym=#b@#TOb&XR_@N*$Yie?j)*zCkP4Ia8D1+uO+ky$L<>2_EB-O~1CP4G>(+3`tL zB4T7DW7hBHO-=Bh98iCGiQRsB{YAr69Cb0xW~ak_gS0QI*mZ=GPduu{X;e+}4epce zQ7fCA4x929vf!10HAvtrhCVbD2+}?ybilj1@wYUmf5yJHHo>)t%Kou9CRbuc@l}=G z9+Ms_dpX9Ki-Wod?W`T`X#6QJ)3dIR3YeIh3UC*z9_gdd&lfEpZQpv+fP*BcvS^#X zbP+p^=vcIW;7!`2)P%4|I}y9}d-s&c(_g?Mh~2>J&*m0{(Gep{_{DO0Eku&dpM7G| zEgL4;UsJD{+kWwF;m1pV(#SLTqz#jtr2ZDw`?o@MF>P;7t`BDnn_SOKaL#{)JqmXesaKr8gMPrHnmG&hyg?Xnmj;{>%bY=h!-Y{vctF={n6 z%g&bTdLe-}bfTQq?_ev2keB8&Lg{`xS(*30~#foiD# zN7Z)+Qu&8(=XBIDs)OvZx9pWNj$H~NTPQQKGZW{OWRHxHA|fk$mT|sOHVKh=WR)n{ z3cdH^`~AJ|@BN?W`8=O--`92B*Zn#DJOnnGwm7Tls8C!siq&jdSYL7zS$s|UtQp6v zT)r@u=tqnrb%jDZ_vJrspcL+YNVB)}v z5ASXzUZ;bBh%=BWS~mLLm^Rh@r(BeAd+vvW{P?Gzs{#~#R|&(%CI6``a8>?PCs$nj zXgs6**@GDx(UC#*w|#F~{;5FT`~OeCTNGn2rhVLVKQwOUkT?2SK&3v*n{EADx=~M< zzfE!%8#_95#VIbnF}@o4&|1j$;Vd<>hiOSONnJ1+?1geX+tZSV1&OftKICvb3&`nr zmfFG8Y1;hk7aeX@OZk)S&cjH?yVAaST|dm2|iHaR#NKQrf!55+(0E3AxbV@b@K ziBCk$kq3AUe4$i07{YrFTw5R0%2_ytSPa95#4EZ&zmS{&>EdYZr;EzX1hvclGrrb%BQ0(eH&mybI4Pr)3EV@(gARv5je_JOsP290OT_K`|CNjJlL?%}ZvnphMLO z#?WeFoK+E>!)E#B^{6{YQgt9dOTmZ)LC0NaPe0`qSsJQ+kXm$%ylgLK7pN%j|l;K0%+eg?74~RxS~zI~W`Vf7HDik*LAz`EZq3|Ff7@ zo;(r>8&93`!!Npk6Qu(d=H=ZMd=%HGFYk2#QHPl%7U5smq0Z3w7y!T03-6*UQ$ z93Qt2dQCC)y`4rjycxVab!bu@UM_f2!0@z^Tz*@_1&k@h?i{XY>HH_x>a-wqG*)13 z&1WDytoKTMK)rlL0DVrdWUGz@Od2iOKJA+qoD_%2(2`c*0l5MG$ODBPk~C*ZsZHS7|qPk-l3lpY;59KcQsNC7CJ*1a;NuLY&tg{ z#{d-HiZ}4487-oNxYmkyeqrU9Cj*RO(DmgiCes=)BSQWIz52!#zbmZZ+=LS2Hq?H zZ>F<|IR|exCjW8uQpX~%{Y8vA-Pa}7B=(=T!fPTF(9v|M%FkliXIK>PlZl;gykM*& z+HDhY@&T;eSx6V`jy0^+uz$`_TsB@avXp%(eJR(lm<|F~4g{HtLY=XsE1^S(G4`E$mmSccwU) z`Pk~MF0m=M$b;`cdDc(pw6dmb5W~GsiQ(2>4i_1;^De;u6PaQ2oy~cg4;jzS%tlwI z$&z?w^-S%(#;6c=k*P^3al4<|2ItX|JS3wj@yB|3GyH1fCv(uBN{eYfT_YJFAuk(n zAl<|ROJp0V+a$6qC$>0cV(BKMF+r2#!eYxVY#k(eR2WsUL0ka+;v_pQikUXm{70hj zLdCXfSe+mMjZ=I?YrBUsOhXzFc&W~0kRE%#l<>5hpL=Tf<*>!~+{}Wz83Q(h(#xpF zz4J%?M-v1&R7rn?3tPL&GBJ#jNZ30UTz3S?g8JxnCwPT1>geK|xxSyZ=B|9~WHV?; zQ>#X7F)}3+%h;7ryh+m?-?Y<(s;7~EwV?R$%eH7fOA^CjBS)Zr$=FFbUr=UIIZ~*! z)o9G$y<}U&_gvD9q0UJYx69Ua!=auzT4cW_H`#!6o%4vT*ddhGAwoWC*C@7Al@CII zhbo@;Mm_BfiiW#&m-crqY877_3B!A$hDB{!A9%*N<=~4V6})D$DOrcY3prrrJ_F|C z|Ko}~1`Cphg2VA>G~$8mtJ6aj_la?X$md|9F<<5Td)chTAN?bYuHf)k`$9=EyC%g? zwSF?<5K5MwMAx5pOsyK0;16VO$y&Wli+`GA{FiOsES(#?IYnt8h5VPT2)9TDQ8(c_ zDtBL@Uc+ZU{vc!MLf~vkhJj$zY3i=@=u7xzD0ebcV9#=T{7nfIfQMeN2`bkTO=re* zPV=HZ)77c?s2ZP~wTIM-5Sft+aEMrCju^o)xP$pB$O*urLd!n?fe88j%)c?L>$Sch zx`MaN%`Dg7EA6VB#eNd2P?pK#fnlBlUV{sbWiLS?Tj(gHf=ls5{MBPVs{v zW?OY)_%P}pRA5zK?Se#scNs-#rI^Pn^U=E*uVpVH2k9FUMT5z)$|y`IX13tT&_V9T zP`az4gSXtGg2qQ!AGNWfFEiHJ2IOw99~Gb=W2KnA<29$dpEs7TkxvN+;1V3BTOEd? z@I?ls$ogE#>TyRHc)=3Y`f=(GZUA73dns8*d}&inza$GUNRGDYx~g8&M3-C*8RmZ+ zs3qx-gcK$uBoShC+aVcTBNi#q{50)#%KUn6)|p_v?DnX|bdu{~u)?xk^gkv6s9!QP0PT)yb-Kzw&J4GuD+Z0K+6)=5&J) zwAM{)JHzkch_Ng#>1R%kp7+so(Z2-n>EWT%%HOWcWk8BJO9=9T+Z-f+V9u3HOPWq+ zHhva}xptWOOhPucteN=N(<)|9_~Nk`#`d0$1f8P77{5bsu)6_`wvE^DdjXE1t#i9x zKUlV^6^{|SPoDnf=*R|PO+{Fw3ARUavaUc_QDXh#~- z!LcCjk0f&BY+m)II|F-@&eXz!bB_F()01{J+(CSsbW=qkp#9Tj!AE9BJ1X)G>co9i z(U8v@JKCJVr743!EE(EB1lOils3Fe|(O#P)K2umu#GxuKhef0%`R;A4#`S#rao9oP zTpnw*N^)A#{&PHJntNvps`G_Wm>1fxC9YodVgvQz1yQ7M;ALy(Li=mOZ|hV@aBKcN zma%_NfQ;-YMTXH+F=2j2`TX}+=?OH1cZrm9;{}$@+*}O+1?doC^gKBN5>_^4Qo->| z)~V5O6oqpvkAp5 zQ!`%6juK~sdn*5%R@9l0V&{y$Y^~6Ion&CMuybes@hBvCT&X6}LmDMY2Y9V>XziQ0 zpIIdfYwvnzQ2njrIm6M7hcKteTF~1=wx;*#;An+kruLsL4A}LbHisentUmWW=9UWa z3c+Rmu{K9$$I3C+V3V-B!;wPMZ1KWRQnHYc0E-}wodmVu52SeO^&o{p(ten(9z+ zOaeMS+@|_=LIrRR0fjtc?k}8;ishW&&-lCrZXS_w{#%6jnw#GJw7r$X1Aw_@X(UoF4{!?4Ia5dcIpvZN$9kt9vYt5MyovZIe{^Uqtb^D} zyG|7I#!Q6+FJJ@=!bmXpAHxB(BFM|^c>S|}W9!HTJ6qbXr{D^4aY|yk$GgXH5ya=L~YV1mKZ@6=jll& z4y4ioHjiJeSU;yt!=aqJ(aq=^D24hWmC?W>wm(`l(U(u!xUth&JWZ{}i-~D{DzzIt zVR9J$4U!4FUp0UKL97oLhCU3MiXSe&A?1tSYRXWQq8qJw7w;Q@Zg17dx7C=Zec=ul}L*IskbSRJ7P z7MY;D?0&5Giwp`QKnO9+d|Fw^B-rr|Pb|Z;k=1&j{_=+@ZZrP>wBjULHPgfK>U-xu z={vQbgQJ^r?z#iJ=uxyPxxykVCpWK}j{ieEgU#THVKSI~6_|awDlQ3-Bm@av3uUxz zg49;(m?fog-oM**id>o02owmkO3Sc%FQl1geYBNbuxxjo%S=9vUX|Ov9gqw^q@I}< zpR!9Jj~hZB?{DgJ%1qVFb8pa#*#umV%g0Y8wm3QpQ2fMVXhy0c>w{Hs{t&8TLE~-n z)X{tZih}k*_AY330TS0jUK2FRd0y$Kv?XDMCj21g4rx4VKQ;@a^dykeQO*_(M8R3o}+ko>b<%ib$+-!31bN*Y8q3; zh;sr7%z&g3=RF#k&V|QO6Ce;>(vk2hw#70xek*0K>X^)#&}79b06BBJW-iFP-+-jk zLj(?jRMDPSM;amPloc1k$$;6k$-8Rkz(=TNeR#qRB5~P>cFi}`eKoaJ5(LiF;M3TU z7dRjNXX9DtdLUR~Gm!w9ZE}Jap(DD;EZh{)yi0@QP_=P&eUQWm5egL+J%efvsnt+& zUebWUzC|}RH)vC?7jMIP4ZmdLT(4J>YQZwxv;^OX8M%mW-t=`owg`d%1r>U5_au_L z_1=1fo%(QIoacnQyEu>eTXl|L!T?CE$>wjL!hc0jd_8U`))GvsX6p{D>NSLKGk25_ z49S+J&dlexxbizw^u*nzs5|MiU7wWvj$XWt;o&Uvq*t~|w3)%8?Xt}k*GDVANzWnl z#u3>C#hckR@O?a2^7wcf^%dteR5{XdNc3t->{sf))h=aWV{Fuv{-UN834}J(sPBU> zz6PI2+>0i}tHJlTKp3)P$^elR^v&-nM%oE+api*%ufJbb4gpc@+|hCRll>g=C}6fG zmvoOSn;YUf1gYusg;v`~&G|Its&*(?$+Rd}qB;_KG;<+~m1?d{kW*LBYln+|_^8OC z9FQaNThoh1+%(GqXvWsvJ*x9FRl(=<1hOD_3^@_o0rKI_4_zVG!jfN=?Y)|C+#db3gbkk*k==< z!p4Ne@?{{r<#*}yRUj@f)_H3rZ<}h%WIWj7CubY85}Vjl^M`<$N_5ct6B@C?Kq#m9 z8gOJoz3(8J|A1*6X?&0mNG=oudA!r!TU!kZIm9yJ^AxL{jmi1{iRPlu=;J@j1cM+3 zN@ifwKP)nzr4XJ`RtdYo*X{Ho$b~eh?A4&12QKlPkNk|8%$SauW2hDs``}i4 z=4S2j<~fj1&kC+A2iQyIc0W7{F!CZoC%nuHftk8RbvkbB1?+8nDW$>?vNY>C}q-1K9zX>J=AV7<2{6s|%+ zCyac4aJGwlAnM`Nsu+qB3;=!|Q_9XKkdZ;5>57G@%)4lHk~xlJ0`)kLOCYSIkoRT^Nn*YOZL5 z)bzPK|9~f!J%@qI-a1(b1LsC{>4_!Hy=065BmjZTMCn#yqt=`IUj9Ih%@NFM@O93G z4YG0?gQ-;Y1qMB#_S@x24sAyr zIO5WhuFs3E5YBp>yjd`p3js3z^Z_q8C9{e|aLkAXB&j!5=ndEe7{W2(dRqp9>U&SZ zB!M%)|BfOHyo4|7apL}`rAK9!fPQg#_QtW!ne$A`bUk4TmE;W3TV2D0ULWD$l|97oi>k>LD~{eX=deNIX`VG%YaIR< z`F|4oSee7sj^n7lrXikVPY?U%Q~KbD#wcaemYMPzXRMHI12G!dK$4bNO!M(WIO1{S z7(b`a2b7dS`(Z7i1ZwJnC#*s#t8SlY*RyiR^vs?{VcHi3A5<*e=1YVyOmqXYPCo64 z=DMX%X9kagp0mv)S<*qlM0SVDQt9Q@yri9_YZsj#%aBcc8~7xZTD>aBMD~Vqyz%w> z5~Ca|43mh*>)*QGLk*HS$1rTL(V*6+w>gGqfs=TVtn|0~@21yMnn+N;rq#S_i3IpV zge!#7A_}_=^$KJPUJ_4t3BP;!P#OsWpcI1A%X#Qa9sn{u1&62mrVEz_@e^uiQFr9E z{jPtx54U}}*F*C_4Y6t6gAoW1&Xf-brBW+jec*=~(X*MTpcEWbdx9_n90PiLy#sYl z12)DBq7PM9Zr%u8J>e8OiKsTVh)8C241h~M#Knp;2V*GQv1PAt0R*k4+Hig48dnaX zFnkxHJq8@6V?*g;{}tPX5v?jttjZVaN$dhcXq?9_0boC_^v}=WksmL;>A$EdaScf=}&P2kLc8&_soH zB1QOVvNj6z>e}q5+}&m$%NG6XXiF$p#8I&%V6h$*INZd}>x&a)P?yxP*_syJX=_{V zmrZyG)G09A^jvogT%R!c95q~|HC9BqcFO!tVGIBbM5ZVZ(Q2geh#>YL6upm;US4kUO^x$2ujwgI=a)P(ff>Jj%slX=IbxFjA38Q$ck34QghT0Jx# zg@>B!GDf`yS;-z!U1gl>@EeMhMqc0GTXxNt4Gi7VtgGD&+r6Ld4UAeZkX4qVviCI76*#{k1xLg>nAiwEa8NnXH5uSPU^ z>%f^J&YWM&u}?%6(V3H@-l!)^6C{zX1G{^u@$5WWj3Y8anDT9KtVK=yFsr38B6#-u@;vQ;2SL6|Ma`Eq( z{S-dc0dmN@XO*Vnz8!Ck15}5UBNR(AAeX*H%NF$io6n9N>!3T&lI_ZMf^3G|&FZZa z`-~XsP&81M!p>Av3aArG0^QJaXcRm~#!bF{t=Rh|JuCBhl{nFrw!K%YDu;9KYCgVk zVgS8^Moh(ciiJSM|$x6ao#pL znBrOb?ti{>5P}s)9T|lK$P-p*35F;rPbYraZ7}>90BYtuMX`mkVDO2Y{ce{4E=>+a zWSg`lS8~}j5MjL~{W7Nfj~>bf+qrJ#Dd+1|dbdw|=`gJ1yF0kJ)DO6?%HWnTPJ~6S zxbb#JGDgUsFF<+EJgMTvg2pn@eAs#^TWrT$N<(8sPPKkayjHL5XekYbq0Ulg@thLr zU&75KC+DKWu3Rj@4=VoO)ZK@_ufJsfeqafkKFDbQ!wD`J!yMYpodp!lg)T9^gw;E9-QXjJ zNivp7;g50RpbvG{!5sWPI^jcQ)&Y0x*8-+B;IJ0i@>`$;OOgsC<5gHm9{08c_zIr* zQogPFyUdA?jwGx|UdSo}$vyyXin@2fKLW&vo}!}G(qT`2gd;PcOnOBU2ej+!9sW_1 zg(?phu{K&CKLSOSUld>849UEsR*Ti^YG@4J1gXncVzjLxh5t)Gx;KBN1C%1Iv6yAR z;dOYE37awE7v?Z)^3b^dH53nne~G(&y}oGpR}@qeS1aY79Jmo$$b;+h%vZgJyuuM` zWpk1ca=u7Cuc-G1vMBNSFsl)fLMRRyu9q}y% zx!3+W&R_N?7)YQ5RxdAK;c4VE=xT>Z2<4zcMdY@~!rnB(iKzq)+|c|JO=c>;Sm}nhoNtVZ)sa| z1k&E@6b!TG<~n|`JjA6%R3*6#MA`p(p&Iqd4uQ9y0EPJbX?Q~M94`JE|BGB8$={>_3pVWoHPZ-BQuvN^&nY-S3DrdiF#EtkUopXCoM za)wW=kY zPtLhc_vCj!cJC3rIJP4%0g9g*qPJFY>vP0Q5MrF^BN{A!h}ct)Unf_5-DnkfWSeZJ z%pvDBGj^ovwf1TnqsZZpy@J^VMb^RN#O1YBOzZs2yA&O@Tx?kk z?4O#OltTCLd*luUa}S#(q8vMxrwl$9OoJh4r>7}p>7;G}c%7%%l}ofJ)HlQkdbcKnnQWNi)~CLN z1wax$$DU;4*(Qy}8lyKaCh)!VOoSq#xWPvq_=QVTq~H~$l1Ta<(7)eh9ktBv7v?qlGs+eW*-5WrU8WVK_K$?|RU{p0~ytvKacV^PcYtvWqQuG^ZTAw)8&RU2DEFg zmxOaabhR%f*?7^SEfL>o7fX@{lqZRgE|OO`YwMrW?UBF6krnb{*Hby?uH}cM(D)mK zQeCkWsG%7K8S5_GCOoVeGV@LPE+4YrXW~EG_02Wo)o+uNfwNPf3l#Zxg4c)@k!5Dy zKX(RJZIfu;0Y0x};(ceg%wVYS{EBza$2ToBrS<8mSxlW<(tBd0wR7jyr}OzDW6D31 z7^fnXxCG`2x72YI<5fUyR(B%W-oep(jtUYA4P1f5@D|;hhDcg7u|7;x?s}9#dBLp> z4a%EwOl4Sc{e$yw_$u^*Du*eFAmT&YcWV3Zp0&^FgFv2W#HvTwRj4;oT-89*=RG*j z6rV_-eIxZ8Gx`LrBcL7q+7D-*%nYFsSHYDQdoyM}<&*YNl?c8v#T_F5fniwUsq@FG zzrSl+JeRw40o#t3m01c=t-Y-b0MvH``F4*Px^t);3d^%Ort>jkOs;Y@tU@anbAHpT zsRSwCpyuVerAGegQ%YATe)z*ziZN}{{wp6zyJ`vT{S-a8OqLK^ao!B*xWLMM=jkpc zxXL6HlpA896%xL%*KmD$<i29{6!mB}?{E=Mu$HLfw87(WJ+J<;WjG>z`yOZ0 zonnjqef>N4v?6w!{{~!MJolSUDfTe7ZvNH;*)kbaYT*ZJh~ey_Xb4b39vx=9l0}{D z2+j%``M_N&KKhV~3{c3}(6vni9Im3&OXq}dslm7nssg-yx<$gEA+5Pr! zlx)Lte}p&!fsBHVRu29Wz<=#VEq8x>=|T;wQ4{_QqI0P?Bgk6$xXGQtZPA!+aX%Cc z-xk)s9q_^TDrp1vIv={O4xdO>SLT%VRTA~s$uLMWe{cKJ@;GdJg~fN!wTwzY1hp}E*O>SMCov`jL$0Ud)7^c>}Qb$hnrbMEuB1}Q6C z-1_HV0Sl?87tbG!?c0)?r}IS3pqu1{v)Cs3K(rzl@KrTQd7z9 ztCk%a@vE}fUrU^2H!C|U@)>j1iu6ooDEntgBGhpF{VndUj7c=^PrMOX9aT{ZX$y5C z{UZ+vJAhtpo%&XwswM|wCc++{^W5TuNE1Y zN^X?gAX19B6cQW0B4kPeLtmS;%FUH}@DMyf2m2rQcB(5-G^Fop88d`yf9rIuYc?(k z%~P`|vHwly_SkY(df9!`l}m%e08gWwz&Fcb^8)ugeS86=MWoLC3tZYV8_GAn2&Xk- z%SwqRFB!YOTzs(kibJL=l4yOrs`E`-;75Z^Z+DrFN$!Hv7n4%phcGQD5Z>0MJ5$+d z9HVNFZO?*;&)k|PfL=P|G4RDpP|T((?&+Ao6Q6;cy9N>}H)U&qxUSufTmi`26&B_dSxJZ()UY4J-Lhs`wk+RL| z&nUReHTtPwn?k!2yJ33zSvqXp4Yz`ByXp*dYxl?oXfK^dk2T&rw%PcaExRch2q(WeT^i1kH11 z0g_?jw7Y4Cw)_#mRRyK!TiM8o7BF&O>It~I}>b!PCFGHQaPMES1$;THvs5FOa{yfZ;S#6gml~)vU zVSh|HE>Gcz?pHTLW#Qgxk6E$m*B{1w=g?dThLPqxN|u8LbD&`04MzkzSSgN(mWKSp zlZ{kROgOUG;)b|+FG3>&;Ak3DoYWR9*qa zNf6&Woxg4V>=ug}#~lY*lWk%W4+V!%it(mmgU($AHrDqycE=dEq$8}{6bHWj-T9;c zZJE6r#NP!Q!cF=efF1@%wQxgFqS1%xq@@QdwIz?oLZr~IfNdrZ{lq-!q>!L-QFnHz zI%(_6r_GSxekdQ;cCLxpkPR2N=pS&y(f3^EMvgsFfti!=RQ03!`V&vR?d!)%cN+B% zYWE8j66%joWEd|d3)hX=UWwvoTeQ7t8GH8Es0SLgiTx0FVf&kSt(NUbK0{yQ1kc9P z!)8VOaYio(`Zv3~MUtvH1kdJz@v_0I=X3Y3RD-L!VfYxB&T`M0D>CShf>|LA{zHWa ziqY1$c9+qvb+D!zZcvP8gBA*E;) zZa_Q{l()=tO}t6R^|Iaami|6I{A%?_Ih)~IUS0GT+We=c9M(^JY1=)oGglc}^x=0e z`|OVJ?$w5{RbDd6f2f9b(-%I;u(N&fYy5n6I<%z`|E|9Hb{0$KNQqc)7<)-=QBoFS zd(~_#ERWiJ?HuoYT-VZ~!0?D=Y5j?ZT9d!C$^EL05l0OXMb5vzNlW&6=svF0hhql+MWnZKcPzeQN<6F|U; z?ax-@8Fc#V-*fe~Ph1({me@u6D?VQmvEe49DaPceABVVLxBlmKre%f#3l;zNT*J+G z{}Sx=6A!dZJEEe<2w1YcD@!MUV%>QAgbo$Bv)QZk2jYp_;}a}_v-UDmCnx}OM+OB{ zxH0Dhf7!U9{J(6k@h^yZMhb=TJWpvJ?oBru%a07Ys28Ina1AJvZjof>% zY*Q%^QG1=moTH#2B9Is8mocjOPVEB_XwJ^E3PU?qJv3_}zMCAXpn`Ko!Ii^JId{Wc z_Uh8}n{$SPx=uj`WP}XZIO5@)x#FyCJB;65Kc?xWNsPZPNHWSf zB`4n5|MB55%vm#F5&X4mK3W{QP?1R+|NF)v^-_5zma{kY0Lu;Y*y9A&m2~t8qpGz?^z?YH`Y0N+u4({H+^mb-< zvxrHi;sGu*J?BfrwQ-sn4cjXeW0=8w3SIXraRCX)Xgyq>j`+i97ul?Rmh?B;8Ieuk zAi!cVjaqG3YO^?qyTy9#E|5- zkl<#Kvt2Z$)>ji1*qyJx)6|kq(Gdzj>Tw0vWz?QXA05JR@S=i>E>dvws}P@|0_#M( zuw#|3OIL&&2Y(tyZG%$)CV^3P=J5VFV3fHO39hM!(*ff%p%EqJ`=}_WQKb3i1LO}_WiB*ZW$9QAZ$NC z6f|t2A*T&LStHNFtR&w;rQws20#U`i`>TWBV|)u~eN;HUADq0Rfx~8{N9oHliLgiF z1_l~S&nX{ySxXpL?8|3=S`&G}I!VA@U=j|4r_f|d4q#MKfx|Q!^ivKmF6FX9;eI~OJ~~~QHYd%X)x{-EovGS z=*C|Ag1s@YTjV)bD-11L)bRsRt%D#hv({knU&-?XvG&`1*K8l3w5d(x1&1$h0A4SC7g9tzu&Q84Z2!=8& zQW?K_WPTlEXHI%Q>Qx}LYWLc-(>omZ(tL6*O)hP$jk8BXMvBMd-MB%T(YufDn7{a5 zl{n$*J>_f~*py9B;V;pf&%e}Wlsr*ZVV=4=X{4hqegw9@txXPH#T|%W^y8|1x0`)e-~Z~18=HG2_QoJU+alw8VxCpN zP3tI;MOEGmPH{t@=)YrTyuJBns-ZaL;Iw|?72rp{ zs?HnYE75MIro9Kc`Y%I)fBRso6ZY~;Qr$I87dG4J6KW1X~k00*2 zD`;VBwL6b~;`6d2nL%TkmeQ8IPdYB>?BsNTZSoT-ew5F;hu=@;f{R?-KA)J=WWtoS zmEiARrl4@0)r0-IP|XY-0;*Ys;(8?(M5jzpE_Epsay9={ZHSL99E>?%Kan(*#n-~m zi4|RWiD>$2dj2)vCu<9vDU^+$D)XSf&@-{Gbow#EZ-rTjZK@G zCmEmh+(Dlqhq8gHSWfU6!U!I(L%#jqb|CDzIs7h;I43E=W!`&>*Gi0TYoKN4d>~}l z8)J0dw+T1rlsgys>sd)Hzx$Kti4cGN|8F?gOO2vwr8m@mG z74f2QFh25j1<*Vsy>)&uLy2(b)748>qJG5#{7cSql_lR=S2A(9|XzyG|&vrhzloHR*6+#|Tbwf)C1|tifH) zfU>deeTfz+Wn5qKuPov5F#wFp^=+@vqiN|scwhCeb9pN8MB;*&sbF8APRtLM4 zX0D#7QY8;!a|%R$TbBQIqT~0!+ zZ$nDDb?P|Xm{MBm-Dj5dF7`%}551_t3SkNamioToDO3T-eCf&+1s=D*%g0u2bhr-+ ziNFy*%Sr0$qEX~$p&)n=YMznpd}D=q@%?N)%pEd1b#6B$ga)K)zd}91wb<9}=Rx=K z1*qLoqMXYeUxx56c}Hi@w{4zydAK#Tl(F%Q1bAO`F^C$&hu*&ab^|L2w;r(ORy&G& z-Tz1agxidJ_&+_F9$y0!5{^0lCWB=fLF}Y=YEM+Faj4?ug}5%XV`#TkMKpHb_%Scj zslUC{LXHAsA1hrvOEA6qiRO!nlN)Qb>DlhmXSc3g$~32;WxOv&2vuwLH^w;_uL`)n z^*Dvc9YKc4A*;bDZcH#rq_Srg{urIyKh&mN{9~}tC<1n}hXD!cSebY1xGoxgfvhp; z&9jlTlY&_$50<{Z<1+}$a6-vDmnZaGR>Okv1$_%a7q02|B00og? zyK`zy0Vo*_w)*m#@djdq|5d`d>f3)xnTBqBd%^$4;Rp96Hd%F=?ZtI`hT3ycNJcn&un8G13xwP1`AQh!`nrgNN_jO(o?3JeX#WfqO7WoILYuWPyYH41 zqj}w>l{*u^21Y)!XS{Pk-=GvfePGy$HzI2~W}lq-mL}ZVhFkb!Yn(ml+0iF~(DPGC z8%5C3YiB?Wbl&xPVD1_g3ZnY;>ecDeEy;6|X8ir&z^=U+Z@f-f_ve`wcbL9?OT$*d8gJ$pUyGQ&Q}Ze=wx_e#w($4GU)v8D6bC{deC-l zi50d@AFp0FcXK=pxjXtd1>>|LkUL2FY|A(i@I`!RwvdIU7{s)zNG%&_ zq>-?*iZZ4BR8Pn3^6yLX_wQbALWL_@B2i=7tU-%ZM^x*6kg}4zvbt{^&|GqUdgd(A zl8k{{qINlAya@^MwXXThsO$xvMi&iUT8t?r(&A~NWLFi(i)D?b+m1}fgmm0|@cx|+ z!1J&D39sqyC#B+>$~}I%_(Ys;oz$f2F8S3ZHz-XGr>iUu#_$PUW#PvVpiHb0zCS_V>`Q0RhRwqzg+XS-@rh>`$W5(1VGZ$cB7!&gpz|h<=Ev#q-@{c#tHPiJ>lpO6})E z5ilK>E}pUOQ{T44FrZH4C!e@zOE+>iw70UHBHeHO+M?9wu2P*fmS^Fvg2WIF3t6gU z$!q`B*s9{b4XYGyZ?S!Q6jJbf>mb^Aw7m67GgWnzA69!o{Lyl4H8XQ1fLSonKelOF zT)f>2715Oz(8{xs7SyvViS%BeZy^GC4@)Tgf&{r+@vHbq)e}sxEaQC>E|*wkQ~4o> zB>QxIJtJ2-HxrfGpYTimUFC3F4AL5$;_qwEj`-jqUGK6f9%hBuR3QjqR(3;>l^}rn zeYCqLN;vx#Cl!GXN%&{+{(lyNte&AR{u8BD zGki(!uMN^=av7U1y4t?stj>o#7cmU@?g^d-z1AY-sy2nP4HuFY>*DNvw8Z`G8}R%3 zp0*;&Leb6GGl$r0M5U?HW-H@rwz~SJoVEASr*38tUln>cwYT-@&agbgyYdqM;w)s2 zCQDoc*(r`)JkJ`P)B5-I>pMMe-?nQ$8}qD&{K)R{u$~`>&&XIC)U32>gC?> za;+e>xMeov3IG{XTwHrYME}weYqC(cqhw8jpAU>?{N!#X-d9gn?2;kk@X^|#wvkRg zbT@6PG4vr0(n2E5rch{Iu$N9rnyucXj{E_N8@80V#gJU2<+byTxs>q_XS_4n{#LGe z(i$gl?z$UsmUm=@*?Bq~Td!LvFUMbVZas0340dfmB$<=EDS)gw=vRp0I)SzQVj4bp z{JZHL>(1V`h;b3{71+Kr_mh+8|7-~f;fWk}NJ@*;OKUOwQBe$8Y)Qn`Hs2#i1AmG5q30~w zOv0g4BFHww4lc8k>2UKeuS%E|I`cQy-_u z;ch#WN_rk?@xykm3O&?42~Hw=N$|fTj){13NCFjj-2~A1y7SjNcpa0`|2C$F1Py}4 zN8CgM8up^;KS=!s>)t3>JQQjUx?Jw-G?x3_aM32W+9|uXdFuK`RZ{lrZ98aKGAniS z2O|SVC7f)#PftDqRDf#}%)Pu*Go-<5ZitFovYAUVWNHw%B1wXk3E%|uK82tBYUtgo z@cL*;3WI4cdWoT147%K*HP2EqBI|hB-~~z z$9Z2}WYR@^^@1WAiq{0e?5b>I`MFGxQoHnW%ea!@+ga%nY@Nz0j8sBBr)7Ui!SU>u zAy?QOZ`&1`(Vd$@`serj;LZpI6k`u{dj^Z9`+M7WN|+a_0DG9z0>*SJ0gGGcO-3WPJ|LyU+~J`zDu%hUXdU z2|mg_qj73l^?f+L$>q1oVc~)3@Rd%x1AB|9g_|TDgsi!gze(1l zVQ-$X8qHW|6IkBO+E->gBNobZ2wE46lx~76kD5!CnMeH9I6pWzWCU)g>Zp^x$+6_- zKGY3U@BY|W%650da7Yv(RyHM>{<#1zW&xvJoQ&nBxnRnnb-J7ou|(@D^$m~Mm2U8R z|4AL3Z)LF{9d#+x5Zk+?%3+yRe+T!>2-Kh@{;c!CEa%g1jRlL?_ObMs$f<9{=mVB~ z0rW3({j-+?%;$j>*QorqJ(`;NB|7*Iw7iav`z`T-*KST+WV)Krm1pvle1mc=p?+#;V6qMU|+X^>^Eey-C1g zC*J)TQE_1~l~J0ejrU4vire2gHxNl5BV7{EYum70Ij++bou^7x^@9%nph^aWe z*;4l;>nwsxHFci1TIA~>KFJhO3O%^}XviH-f#fiP-!B>>l z=xJj2E)fcQc`hkeEhb+5i}LM09z`{>>$TGT10H^Yw7_FhF+y}P{ zaguy=9~9htPre?PDmIpik(yaK>2!(EHJ>EOo-ShXo-H}>_MOJ{f?lsjd*ru;C;pf} z-4oV6=E}XYH%g3l#k?$r*QjcHbG$dzZK@0>D0ESIss zg-j2(>>)Q6Ae~w3Jli}An1;YIHQ&>Vw~-+jlfN%w&7Crmy9^*c-W&p`!S{~O5a#O2 zYb*;R6%Z|1{0wqag8V4786P~8^qYK6vOJXL5mfI!^(z-^{>g9hdenmcad#uN#rd*N zr6AIWeKL@FBe6#|>(&-xUTu=)gJ{lt+Hp@~JtZapisAFiLw3 zT!_PDN!Si73EzsRZQNIR+Y7ybj$T+mRPI4~dd+C^&#bq{RMVRYK2fFdD1y`O=4$q( z;sLSelF*C6Ovk9#S19gK?yfHL6?|tZbDYbkM=>+h=_oQ-ymxd68+vLue&6{)zDnMv zr|$g+L+kq@K9ab@VFfFnv`O>t=xN&*UKS>w*un4!5&!0?zZ+pEoP`B?Ts{6O+_0;A z;`N|;t#UwPKa?j~pd-?Ged6Gt>yC%TIwCE}>0oU0 z$UJO&Yz-=uWpK>syM=%u|!pDvUb&ft5a9*Zw@G$2uK2&~K+xbsE5_+M433PBIe z2YO9D#usIaaGzxvo%CDAHW;lSD24~wn6aZpR%TP1KYbeHbA0C_!o=^`*6Yysu0-*bJf3vF?Im zE#Nz0!cnu1uE_WFJ8-W>f*#8qOByPb7uKXNQimlgC6)Kc^-(BM#L1j{?2>63W9l`i zO!e43i|p+cf32;A>zqXFU=g)0U z?85A;Jv1NIvRq9&oY=k170NF>3=5l8VLPl%-ShlTbN0DysMC2WJ(RV6U$iGvjV11S zj*GX$Rcx;TM8LIr+$we!Hq)UfxYf$DrbuN+mceOh>G6V}RBc?$lkh_OD+jSf{}mp* zF6qR-=RQDPEC4N#9>co!<8CrV!^B9*x5%57ulvC&3cm;i8E-tA8;MqTWYqMyue4+S zRn9hwJePUU)Qkd~bDhgw!pBTFg21?|!napl;pwsuE#0ey>FrMtq*IhvCV*4WUAm7$ z`mm?D8nif62+1IEeEz0E!S?P49|cm`qPmkh*7ib{-`IsPmSU`(rrZ~Jw1!D!<>8k3 z90W^4;}Z2(Tcc>#lTAO58n5~@SG`&nB%3Bm0HI0`wm7~y-o>* z3nhj$*v#JKkKaD>r55|uFf%c(u_&D8iApuQ@Xo5hTdy@KHp=1?>_c_ z@c0vQ4C`LA?zOIU#d)5Wm8v_%>wGZ7HeMlEw%FVycjy~f(rhpihBN@Qqa5tQew!ZP za>*KBb+P~m-^NSz3`U$Sxr?M_WjP~=RB>K1lxj?uv*z%h3A+E8APMf=QD(Zv+O8WC{@=%nJ)MLtW~&jQ>cH{Ixdfy0l!$aH1Fq7X;00F%`I zB8xVgvF2el|Ei$-kDwSYolNYVCD4iy$R>QRM(5RlB=j*8Uj1W{#o6l)E>6Mw?hN++ z0o~!x1|)uG&{w3cI62Gvyaca>ey2zevK665O6k_tA%xr>w~}@ zUyGOZ`8PGu8;5Eq2@Qd;xhW1D_ZJZ-Dr%b104^z7^t1G+Obi((aP$N7CxQfGQ;0hW z{zl_k@p@dgAB*OtnyXa!*(kmxhT`pi82-r+nAI?Kf(-o(K}v{PmB!rAis9L!7n|JX z$O(NR0`(|dSVY+g6r;g;UFl)4SpSkR)!9CLNqoZ7L2S7MWP{wHwnm!tXCB?O#8HG7 zpTJp5}})XJKfpoO>i(Tb+ES zAKrk*cI)WU-P6MjY3YUuyq3-bm-n|ig@IxS7_dY^X|4lpO@swdk6+(gCJ~1U+it~( zH1wHMizg*>nV`Ak`ZQGC{Y9_f1) zm^7RSj)woW-Z?;c<@3fyiM|*0c>dYjPDMhR$_@aMkys?8un`V;O^=ulS6iooBTO;9 z0U++iA-xAlvlT@r$csFfOc+w>Z8c^G!dJT+*~wG7@7Z0=U${1i^gz94VBB z6)7WBiL4TS2*O570lAh=lW(^;f>cnoeO?1aK&}@^!@<0FaNHEVqWH91+%jY*;RMhv zUp{TVj}-F2!a6WdpHFzMKYszr?{``E+TOG(nxgOZbmyNi0C`m8#yXEeLtbHZQZ-9y z)Z7!u1LOqFin~~vbdu{q|0Io1)SuyV{#$h#DXfU7D$xRAgxrJR;-W?%iNI>&fbwoK ztv>^E&x*GOvl@+p_!G(AuiuAV{h~Bk0Beb??W4W7HZ%ZLTz+Mc;Y(L6SUQ#}f5C?i zdkv-1^t19>dm#2%UzxXRrxzO$*(XdY<7u#uw!sQnPhOB{;Sj%;C0Uh02{22j;4(! zui?kWk=#RvN%ZV>8UZM$Qxc&83?Hx;ckY6sl)%NE<-qAn^GfI|aeau$%s5TK^lc=% z(iG%5v&pp{ZM1LY`LDeLSJ)r4=5q)+@b{>6&>-_~0^rF;&YdHD0!W7*&VvLXbPNt* zC}1YLk8U2Oq{aBWBlEiG_Ktv>+w2%WOI z*AxROy1?+{lmZz@0ahoAC`~93=-^SmDWL%hYQ>9(_r~}6Gko&BfALApmGUDYmv)Nq z7fHqNSwNE&XTMZdB8j~H72^o&kHXP0K4EY-8uA-;0dPMuxG+uvE1FgQbU%_Mosmh% zPXIWWfseiK*42Q`1U#coW^zqOzTZcnI*v&5&2T6)7>g7YgEXi_-SZtBi|v23WnBoL z4wrpWxgT=*5sa11woqpMdnKs8fX_81Y7H(M!eg}g&va7(GGzV5 zxB0k7qh7Krf@{wCG9YYk#vJ9EiDdfH0re&gWE&@gtGy_Ri3F>4=Kv8!)i8Q>g9P2y z8iFi2>{6FUZ9k-l0CEdnM$3}&SX8{I=-t#tG)BpW6d#-AK%x;$5P1$sAfEgS6Cv`~ zMDN}!$og0NRAJ%Z*3@vCG~z-m{Qfh82-n3lOZs!-W( z#?J0oDef3YiqW8e>E*?Kp{*gor{Q)T1&T>PjfTFhO=->)bFZLh>xA(<7Gjh}3aImJ zQr)O8+0z@;0S|j3n0*CJp8d2V?obwKCSviBSgIpz1{bEgw9(P+cLyrzPy5RB#9Nnk-G+I6gJ9R&(G+GN@f2?$UcD?=w94>=7+b7Snr2OoGwk!^i zL}6^0nS50e2hf$Ad+-r~^&vT*7{K~4mQAejc$AP#3|!%HB-*2@SOuN=(H{{-Qo^%8 zMF{k@0pTcXJ1!Q}w-UfB9bde1j3J|yK^phiTCy`-Dq!51iJuM+GIIWG&In`wtOwUDVSQH51A77l~zm7{W=^ zb21kW#z0>Z=@1!y=9D+=PoZOk{&WP=#4AwwOf)7Cs8?Zy6Cou=02**L}ymicFK9K}? zFCeD?pNoF>Jt#63ye8w0Oo$Fo$bc{qNy%s}@^f}O*}}N#s-Kn_`WsJ$RepoM!B1NVMLM}kxeb8^y#bT0F0gvOvvNXe5GwSTelL6R@UVo;EbC{ges39oxEM>qJ7|^N0HP6i z#2gH+jtUs!DoTUyDFP2ii#KPIOJEOf@T`^XEVH4uNDD&eT!I#DWqk zjW1F%;M^wP%JAJ9d2mwLC*YFtF2Fw-+?Ebbn|ONffl_;LwU5{fcYK98kbqm3_VW7! zOZ^B3HK>+k5UFO62{jrYn}^QUO@5eD*@_fhfr8dn{8rfsWm`aphihi{h$}|~Xh4Ir zIau6i0}6KitR9alqb4Vw#O)vD3t*^22iLLPhMCEw+VQuoa z!gmA0CE;VHK}d@Ai@p#^ko`2?jp6WEZsyJFS)wy0huwTzIX}yRRr^{G(eM`j8qFf^ zazFkA@&tSsdj-7_1+wzE5Un28xKJ_SUvxlPguefqW~Rr+aS)OO>fZsG<*%Ju+6xi7 z2cx#h4Kv|%eGWt$J%{@;CV>+41^dezvZdvfU*#L<054#RbP16|(n2=4QwD@-AN&kq z*bA#n&ivv;Gr@3x10u?S7D%E-oSk|%H2D+!1c1LM%~Fwe5$&~#Xw>=3z#=wY|97GF zy3=f?zJhn3kZ42z3_@B}?$|&sWEN2r`UBWh`m?J$P8YBJlO9mefJ|uj z_M*ROrzPR8Ntu%774tI(6woJls(YC32p>Y+hw+I_*r67HE~tt5IKHIfRJkxaUD&g? z5V`LZ0BkTgcGmP2Ni@nUeX&J-OPEFIHmTO(erkv3Tl|BmTk{}^F@k*OPdwWRy|Kf{ zpBDDy?EO{Hz6YALS;nMj^bCgIvu!HVs;e^-MPA%UNEbI0Xaunc-#5Ze8PH8ETAu5V zo&wrT-OXyRWRU~nNzy5nnyY@1XFnvvWqqDR2d@-#nwa<{ZD-I~8V+PoAI z99xfq14)mIPt{!8W6JRrK-C3`h-P*`cT*sOztRZ`6lOm%zv&KMNg?X8R`9wTv@~y% zzZ9qAA}d!8##CkBm^m~M&VDg`&h-$gk1TatC4CTW0t}r}3X>}IU2y=~F|+|xT8a78 z5=i*Tql)!KjLSeFg@ZSGet*QaU4hh9RH|OPunW1?R@`s3&@DAu%eQb6Z~{r`Xo4rz z(o4CN>c40nJ^*fDKS%*z>a!z7B~W63E9AY5#o)4{`<^3ROW506N*v&<9?WqeWe@Wk zOSPxN`j&QI5uGs$AXB25Bp-*y#YH)#wz^FGvxIb;KbM2MOF_wvDGW;Zv z&*iuaG|e%&aQ*r=5r3}GbR|DkQ#E#D@2q39J8-4UHKV=+mE;(IGPr;dDtqNodg)Ac zy5MP#)VAC)m->o^_w*+GRjx|kGE$-5-BByU`DDGw2c#954p%!nJG@Jlfke{Y5{IOe zWCHEzI)G*XwO!M!YvP_IhoWw2HQ5?uj;fMtwXX{9)@!P|8lEAT0|z0hhcBPg#nYUQ z9LprsBNM~V`cx4L+&}1){L1R024W(u`B^BVKs>73fm)ZD~+14QG&a2Mgn4Z%t3KGt}mfkx%hU9*Pw{X~s!4V?J{RY%A12CTzmxC4v z_=Qbgd>EH!SG<6q-yZP=S1lA;_58yFX~kFo$Js{0anzWL7mmadwVN_~b9r2bs_#`E zcNCpEo=zy($g-Ib10mvAfhdCkA2at;?Mk32%wXdz(?MAG$!yq^O6 zk)fA^{i4dTGK}@FTqfx8;WJfVu|PYk40{5ySrZ2(&GozGHDiLsgdo|>wbA|tJ88il zAka&dk?OYgNVtrmtQZwqjBnwbe9%V5|o?uum_dqAO|9N z-=)(k%@q{J8GsNY*v5K#yekW1BGPkdz{ zSg!A=DZ`@J%&*V1r{h#~d2O(A;_z|^Z;%AllC(lt^7$#{byU>E(sr}K4rcg!ocH1Y zweLVf`m-w~>87Dh5H`5^{r+v=a?Xm^I`PZcSYx8&V!9R4ANn0tX@nU0Z6=0zvgHQL zWV&RS*aIEW?k4rX0_YX3u>wS;Riz*Cy7SG%G*%Fi>s7GnT-1kWCJuy~&SuUC3YSQg zn%=~p?p)M4IX^q}+4b7J_Pg0kJiDGXuwFNuKQhCuX;LDx$alr*T67Lf^9V$h&6~z- zixRdOvb0Fs_j#sf0Q&E2tX%8IqzU*4=wIa66!g7WEZIOKIaW*6SK=FF)ndc42rYXj zs)KqoQ);SMYlP!IYZV`esyX4;*i6icu@y4;W2WSIDeOKX^cIX5I`ew$9FwRXZN3-r z!%-@kz2G>|5)+KJeW^rrWu>J!a-Z}(O+fR!!IA6o%vx9xqLGi{U%-!@BxXFi;;i#YshWE3ik z?2z}}*s$^}hp`&3dGL2`s`Be9kQ1{E?B)VDzuxZFA}(HY8wO5eN_5X030a7b>$sCy#qRT*=6ytWW#C zkqBpTUAf3qEhuaYf=3 ze28GIm#<^mHty`N$R()rxyMAY+FzLn*HIC-<3qptix|4Rd)PlNf>(e2f*cD1?WHvy z?&AlYyZR;MdLHMv6EnFWFhss7rnNQr9=}3^_Tr2==8HTl&bt7sD((+Ygb=f*`mNlq z&&!05QrTe|+|FhptxZRlX8_~87W?hxyYFq6nwN0w{b0oFu;b100|1;TR4QOiIPSPz ze_gT|o3?}nAtc_*ebXu^iglxXYL8fH!>0iq#F72g(d}6f!yiy*1hlxe#{=?DO=CMZ zP=T{?>?o|$lB|2)lq4h+o4Vowi6hpwXi-|efG3Pr23^0Do71YNyY)$V?o34_OI-22 z`WME8Zlw!!L9~ycv3+)m$Iu4TUMOJ&-3V?kK~?XBu-RKgB%B`_-#YLCbMc(rs>b9!4!yI2mf3`~~Ur><4GX-+X$x_OW66(C~9or7X{=sFcUDrJg!{ zjIRRIyd8C_r8$i1(;XjSx_iSm^S8N$BhAplaX(oV_mM4g*%__D;TJbR~(=-@8m4%yg$SNC!nHk0^s*a}S@*|^z} zS%hxRin{#PH7QL!Ef*?`r*|>PW#l{&qA`-qbon}UX6v0AqC>d#>`s;Uz)$aY_~T{J zt)(aI9E@7}#=R8czTYQMd8yVh@9W)a_`Xhj%7&VM%q<7UGMwRl-J*2IBHtPf1G(eQ zZi|R;C~wdd+xjWFUn*0hfxcW!>08_qIao@yFKw)XRM102O`!S5LnNyYX1{rXx&1mD zs0cy*>GXN59mtVA?dv>8?NwY%a!uuX9MSBW%?7QxD_Qzp9A^lBZ1@$*<$4HzGF%Qk z{!lfRMaXe&Nja{G(8?4XyhQpk*?BgDoC{6n+11eFm^vZa?UiNl{x%?~`}lH#vGUWb z3ET~3pnR_H$Q-$XuDzM}T;-H6&ym0I$Q1 zcOiS6)c$51bf@cRQHfNx(>cY}Ik{Y4yV7%hYp!(QVZ5^V~X% zn4+p3QM;JrY#S&8p)w-z<1cgxLi1fp4$GmkyhwD9!@DuZ1#~*bUo3w}r49vdQsszJXqC>`x5xzCdj1h!|~{+)->t zgt+j8FPcqI%|jS)m@47DymY>usf`_a4Xi7O&)bq0@*K-p5N_fsR$r1g8(ZC1od=zb zRX^zQek9Lb zLruX&!EGf*Y#tB)j6afN9`BQ}n@r?<{VrHnY)WPbtZ@X;XN=QrE}Iin9HWn#;G~u14(&~EV~A+1qs~rGk@8ZN_;_g? zu=lp1Y!E`V(Q4cCBS#o+ml-P6L?EIkoVgRSL)JU-7_c`w=(K7~W<>WcFV*)=biP4j6`gp_r`C#Q3@ zm%CXCS!_!wCM8d_=6KUy^YWoQsrO3yV2{X*DSl$=!VgiylitMfweXqz*z2Oo53Mh) zFQ>r%Eelb@0h<)^K`oSZuZ30vv0!TE%1%zfPbutjgFA`a7deoJF3(h4WWw}peRVBB z@{Q*~k?)jY)`}%7Yzw**Ub`+dxgg#_ftT!hzuqfZBGu{FHs+9DGcl{;46I3gq)7OD#=VFZx|QUz1yU<$f*w&?wA;HeNxyjV*P0D zTyFIA-YnNdc0IgM(Kr-{!VU_4{z0=xP>ta+Q6JeDEosM(w)7D6UR2{{CF>H}R|un{ zhN)$zr7b*J@-Y{dA(Z)oI(RZWJGrGG`z4k~W^wRi0QFXd^65kQ&kX%su)g+bX|F5V zA+BBB*|i@c&xi(-6z)Ozku5$y_Jp>52&zcliBmdgDBpkKUeoFv=>3@G!(%k>tNjWt zg3;fOAJyilJt4OA_%qLIkY3gZ|$Wm=rr?_bL;Ul)c98=bN#e9i-Qw0MMnT27*%^Yxy?v`KsOz|K5gxwaa* zyn*Pv*@k8>TQNNA=`?GB!yHo)Tp?MYiSEL)liWWZpQ#!S>-}Os${VH=&m6&Lw}I71 zd0JZ}&p$L`iN0k{j^;<2Jtz>$Sh=U$>1Os1y^nUpiJUoX zL(B5-fQ5V_>KyV4Ewg}|{XjolUp|}godfxL)RfOtTSlAEQRk@?xpXQByTkj7gi#%) zDyoX|`W5O{7bbyoDN7%nQ99A|Hi(5&QB=!UFC0 z_%U!hamI+lFh(!amyymo7L2E^OZQ9buRzUVq?=z~UpQk7)2nc9lNeXuBJUPnoV>DE zazjjJcV~ZolunG7^G=i^fcUzdY`+88u9s=R(VKHy-1#;cskfUw?je0DGQfu??7jLW zI%|O+O&yq}Yz&=3?eJNTEV{DeiO`Z?79y0DG}rEI=Vk3iHK6BtDDz%ol|t2%*P(IA zdc1A{f&@#UaRddlT-dPss?YOwv%cnKYg`ct{N-(s=4{2_8`3c0;M$)?Igbju2(`iZ z@Q#79M0Bhc6_K=y`I5xCu{^Q|dlA%?!BRHUGdNa!fJX^a|8%veed$0TPE$~OboYLT z)T7cbgiuos`?aNh^8s2yYKk6Qgfj(y^}Mm2lkH9+Gzr**p@;631x0Y1zEmuWD-x-F z=*ZU2J&WgNz{Io?ON+=+k58&y2AR$@U03Z52JZ^IK5gzkWc#^+mie{$$%v|zY*WM4 zQOMrS!q%<3u`-yV-TFVP) zBSamBjpjPU>*oDhTznQnaqdCl2$T`IuP5^GkKA}&0*>#>tC_pDT`a_-H@HkT+Vc?k zfxrH!G`F*DFrS4u93IeyA#Ifv^)ZU1$?xe1Q4`d`34>E>t*L@+&k33B+xxry*Hu*& z??nI3xAm&yLQjn4*;62M!m=}W+|c0n)LPi|VsPa(+yH%8lE#LvKEoxQm)I#z2DPFk zEs;7*M353pnLSz-m%8qvctI(3@Mo2wPcNUmA_P`VDKxQovL|8|S*Q zHH=a4_aW#w?;>Z05OGLKeFqviL+`aUz0kWnIug=KP17<)eVz>;`5^Ib6AGk^lG?ObVNSFKn$TzJwHV}y&GcWzT}P=k(|UC zeGt6vHrP<>;5RnCxOy-y^pmd5u%;U>RzdLO~> z6is^Y{aD`U z*;d+ArYJ_NInSpkrHer<$PdbF7bN6XgZT^>R=37;{VKk3UV5f5TsVg2JI;#Rwp z15c(C1k!-aE_7sP7n|{}WK&k)iz?Qbz!CX^6MUL5ER2eHINWr3|0niDLCr_|Qyw=K3xkUPZ;UBBG%WtA||P|^22 z#zC`~@W}AX;VY?y940lq-im!86bLuIH0sH$@9OtG>g)Nym(S0|c^6HZ8)x*2g%C^!rBZp>%iHYlPdb1+8ObBUEAU(L!ttU6fg<;Kot+)x*ly?`( z3j5k|h2sf-vHw`GCc~T{TQ?Jel`eC@4L6C^q+9lybE<9#T3b z;?3es>#LjUstBS&QDX*3?k8UbimSsPMfq(4mh1-N^HJ2oL_6hsSMoJsqaYn52jeR= zr0_jvCk{%BqT?Wrx5gM7S-hGr@|&XCnetR^G>gB2ccF)a=$kZe-U_mc8 zZ&7k*qO3NU7(}clu(0@E%!Y#%!g-?x3yCUg9S64}%5ZVdooV04m(xR(L=8m_)b{)k zKhE^M!&AI#Ib#T{+5yqWSvUXo>A9JPwvVrapMxS!Q`zT<0Rf!R1Aej*%C*#y%@le6 zTd)r3AV;VL&$8XmtFOfm3vYjHencWA>C0E>;|jci{t>|s;?l2ZL}|ETl}05GFPku-*b5zvevb?4kYC*g9 zn5s@ZyD4IS^$pp4tgo*C8dwZx*x7#jwn&Y)MT)1_%v!^!!OyCf3bvb+_pDA&1KLlH zgMv?;!BrS|5Hw$_vJCtemRZzW-7jdi5yyHC7u|jr_^hScH;e^R`yji}tl%)3$TKza z9IS&RdcO8EzCsCHOi>4cra-wBzKx0>HGPe}jRR>3xoeAR8#*eUk5C{AMACjk*Gu2R znw9ix9{%LNJ}u89Y9HACaijALM@q~#jP6$@#rFN=sN-NW-Sg%XL><`AD`s$ zja&7gP%UOA|FY{TwzY=-&oXN%&%=X+sLEiZTZ(8bN1kv6d~KX;|GLwS!)DI*v|N$n zgN5y!Pgza{A!*9lHsjP0X_|syF{1O}AD%W{brNDOHlV)zJ$Uv~g6&?x`=RE+op6hm z{uF*Eu?Z->!ste*RkBN~g|5MEx(qt%`>yu_kNRwbs!#4Y#AyIOBR_FyM+`mF=vqTx2V(8P6oB(!_LYn%Gxt$VMy%|;{Cpr#O0KAJ+8)t@i^b5kQ z$dLD3#@RD=AUuOpv&FBhQgDiRG!4k(D$?q@= zvx&2A|E;|YjOeZ6P_?aZ1^4wPnpACpgbE@u69z>K$Iz#cQmYp%l-RFGV8^y_7-Z$p z*5*t6^7Qv{Vtjq^yz%bZ*9;}f9qy}g+X)V^lTjd0G#**-g;WN2)MQ{{ouMX&Ec75qgFAqg$zj5Eca4(>T8Caem2w+(@={ z=(m1|#aA)U9|vkBXdkb?84y^OxX+BGPcGV`3r^l?irrV+P~Y<4sE8sLQkK zepz;|-6#E&KwNTOq=?@ANTLH|9h4GUG0B2$^Sl6{~pQzJ|q8Hl7H>? z-!t;>b@T7#{(t*ZDF6B-e~-rBW%<_w{Ohm&$36ddll;3${sw~oMmK*A#oy@c-&pcL mhT`8%0@4-#AAfuA2IbkU7S~TVG_tqAKN$(d7lmTRZ~qU)h7JY* literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/favicons/favicon-busy-2.ico b/server/jupyter_server/static/favicons/favicon-busy-2.ico new file mode 100644 index 0000000000000000000000000000000000000000..00ac191e771f9df4dc853aae6bcb85f89f94f794 GIT binary patch literal 61209 zcmeFa^^VWGy;+miU_ELG*T<5h!PSa z&7w%h(hcA9x)wjreZT*J=coGzU$Sz=oO8|`Gjq(GYnZOKDg_x684(c?g}R#ZH6kLU zKlF$6DEOCG<;$nQU&LNZ!d5S;-(lM)e!+7lt6OTZr{@P~-# zNER{C5%4!L^lw>+zrRKLXC3+feugeAE_!G{M1&?%SH7U{Njx`)bg1QnB z`IAfaQ7{oP0!d2Ejt>0$1G_AycZ!pS`hWjD5POz5n2iPV_qx9~Q0E6W>bwl2XZYV6 z&||7>|9CFpdgU`&cv|~g!I%Hu{`bYYo1Mr0B?olD6Egl?*Z6*7cu}{I8Gv=aT;+$zS0854Zm(O#T;c z{tNE^2F(9*lK)M~{>uRWOI-i@$bb3vfBE)*`8I@*|D{s@|E5ykS?x_5ISIDM$&ftd zKpt^SuJk~*@z?X{V(uL;?{$j+TZgIXFZ&KIs9`;SRvD(o`bVmb^_Nr|GajkN>fYaJp*_|VzZ0YUmQRAP4t5^m;LL;)VFUQ1E*gU=>@B`^Q^)9C9-nFA zUgfQ;9w(>nypfG-uc#0^c}A{K|M>Pm$9hPSQvF6>hl^7`8zpP|YgL9rt)M?3VQ!}W z6P=KKswRw|w-~EDFL-!PdmS0OY`YQarXqJqQ&Us#l42~qAM5^f20mp`fqsANm`-S> zl3!l_{N1~MAC?0+k*6B{F$C5o?+rFQ_{KZiE%fG}m7_7kjn{uhwEZ4ejbWu2-Q23b zeL1Vad%UICx{~v|yfljj0lRx{pvQ9CvD!L>kF6rHSDI)G>n-_f@BdJf4tU|S&f@m| zkFKQGcO~D0e0)pM!f^|KM;C3A{2PS&3H=oaAz*7U2Y<{`M>GvPjYSEDlpd%wDqAq? zQ7V>LTWo%8lDlY6r)z)Cp8PP6V5b82d3|{(3DZ*;W#sz-<`HRj`jU&q+%jW(aPy{8##RXGP8EB%kH07L>^Ru;>~s4**P&-t%S%|@ z44>skoju3&6!!oD)d7ZX{!022 z!YzXMSKm|`kC~TqN3NFfIJmyXcAT1(HQ4p)U&iRo-;q1izU2te{tB-8JP4t%6F)qIEE=_7u2F> z|MVEoB-4j$PO4Q)%kPSd8@P5m0uD-D@x@*Nuh&YI2=8^hL(Ju`?PNtoc$g&v=33_@ zSCQhSPFsdd4TFZc-4FP8KcAgA`rcRg&Bt$whbDF(QGZi^(%OY^7w!h8y&@66ZzzIt zNL)Ral8dEJ8RILG6VzHwIc=YjK*>i?rZ%p#}N7e8B@7lkdk`YCfj zP4>`^T}{zrlT|^Phmu}K9B8hu77AV&H+}NM^ue{=l!(;ZgxmnrkEHXriGP zTzS&*;L`@t8D%5k17r!|N^EVsZ6XTm!xsb!%gDLy6siA#~>GqY3U?fygoLaIxXiQSljc@VRCCMh%x55z_dI;y3-g z1K(d02m|-^m#!!Ppw?dpM_r}!(MjL1Oy>Qketxn6i|C15f8sd!Wj3{yp~;vbxN z?V{?iiF=b66sEW${Pbo?7-1WdIkWKMBu$!!Msf-hWxEz)Xl|#SBEKFAjI*xDh`EF* z_Ais}szGdVN?YCP7<_$fBHDKSWRZo>Qe)Xe5u>ss;ThCH4Kuxv!y&Y%a%JJWnLP}7 zV2iA#uR5RBy7hLfeW*gGDdR-dLf4q?cE{iC=I6`8>uEO=94$5(+x4W~b%jZ1>5Rs} zK~n@n&^LyZxoZBcE^YYT-;aa9WRq9J3meCmd~84~ue-?bjI}@rTEKA8%gerGvf`G6 zksxddM3@kA{#}V@LnH*lP$Yqr{#L0}G&8FE^$im3S(lD*C0?Hl`Y%TbOREB~@{0w% zNw5oUU8dhFi$LK5x)$UKZ0=Ubu8XSbk zkc#5p?TPFPgS`lSmc-dehK7N{ovw7rSm8?6o4WJ0lk^cyu9Zce-IlckoJNsv8pOIT z2W!L9c@LO?z~bZUqor_j$x_v``Fcc5+aR5u>8(RlpWp>HASIf2^xmjIk<(0Z91g3u}ep{{S$j|pbr`R(-yG8F3Vjjq#*yLUci z23-@;houk`04h-*BOxHqCCj#78Ll6cJ(xa(-@ArhvW;sbCkUGWguOP+KtL$cv5@N- z<4^A>>F%nZU-<>aG)w{so&UA=`abA=>@t|HT1D2Sbkl)c64NvLyP1X}&nnta!;eC< zn*+6ax(O0sz%>q=CUTmxp#OHgusyly{e4Smm$%|h$WFpKmBBJ-)XT&O%kWs&&vg)G zJ$eCWw6r)9MazA-SR{0nS_w7bDs`M;B300b1<8}hXjJXgGmL8sBHnkFlCU)oz}x%8 zg%n|7@NcmWqGtzGZIeatiy00*9quO<3HIj;40q{|zA}6hjvHgml|HuosgAFplG6#Z zX1gAIKEkI2FZe{6sxh#@SP(BYtz*SYIk{S+j(FlkhDj* zm;>os#>jbvNgejt*R=c!Ewk(8Dm(hQHj6x$(#O`4C?6f>9Go<|n_>JZ7YXj$Oal3< z>$YX{Ql&plo)HejmWzhdiWd$eG<7hrQ964%B)o#nV6=yiCjZ{DGlv)>rh8kikwQ>) z;$Gh0Ak7S#lH+0msmibz6I{C6IessS{1kIt9e*LD3RubOa&}0B;S~j<$4Go%Fe(Nk z2y!)0Pt7_~M99k}9*~y5%2olM!073CNHdlO_{`l(fq(c~KA&RNXJ=yj|Yu zPKdJK<-xv8^w*q&7nzrjw!b)8ubu68Cp>ktWh7RC#ktlnQ=|4YDIA2!L_nm$Bk#a3 zqUJ|wt(FH%6^ae0y5tq0btFHmu*`mT@VRDth+#~it=p~Z%t-jScB0B}-gC-%T8;pFF*(=3n|4Apd+DusC*8G~>FeF2~kS*mm!;c_0#mD8UuO()$W$9R*Q zdM%)^N=L^T(9$E+#RtE04`>k9&k7 z2zDwGI*@Kyc1aL&^#r7(Z9`C!|EWr9y9De?(pu;@hWk=8_X;l^Mo2V>kVhYDUw}U{ z34cMek6noQaV*|opg2AHCPBSPyx)}=JVyD#({{#f7wn7EvLf|9w5 zTkP0ap0^+b8PcblB`a~Eq=t^`6!OOlxV0jxHCwb^wDsOLjdU$>!dDJ3DL@;$PM%~R8pO7*F3Ifb3 z`Ez9KAu6V;mt&dBwfX(h_ujb7bvqRNN_JNe|2kTDdkMjR2vug&U^-eiQgFUnQ|lLn zv253=!a6JIyCm=i6JdM-&W+0{8LPs21-L%b_`KM$EaE6;2sKpkK*;^%dVEc-9bw=> z8rZ#?X`>FT+gLqJ^j!CL`~BN8PU$u|B2fKk+R&Jn*|US$gx1L30PJC_Eh>?KjJFX* zHSW?vtTQh!uva3aide}N=6QEooE}O~Ar#Z&HLC+5sj~PHb^YUtQblJgb*?Ty^_B~LKe+m{NA6JOe<#n>&b7W!A^4nK z_asKjnyY8EzwxE!C3tlw0AeW92y4Lxl8Dtcbntv5qZj^EqJYA$zreiGHpZj!Fhp#D z%(_KKq$~)II$C52&Py|k=LT#+glHr+O;QMU#(3qE#fwrvUW(ZSPWP+h9VslWCE|CT z({qXZ(qQxnlJ^59P3g1pVTF&YyKSKNJ_dOsUwdtMIP_pLA=lY~rSI`q<{UY6^)ZGP z@^tMdXZY`|l&8V(Rp!B&SnE&I9fu5s&djnC?ZZEO<~E0}6=uCR(X}Haj59Rr5OkQp z;_R*oP{S{|kmVks1(4_X^T!-_F>l)0NGGH3^A-|(gZ;ZN1f7#;azX$;izy9}YkVWZ z^x^_L8KyTe(REQJ+y8~yD|YDjS>J(81Z!u0hT%mE;bB|(Il=kKJL<=mai1oS2SqTt zH`7EDVmX=vLZ1}LC^!-7L$;ruB{LLedn_!pL`{0op2t>4fCdQ=({uB8MF`ny#)-b# zyYgjLm7k4BKk^ojy{#|yzN_PSGb|KV0-XxDSS-Iw7hc(QT-TgIa+mN~2E;p%Rm|}9 z_7GA7GIoHIf!y|-kgh%oLswBtixl(L zki2pij0{H4Av}sUf{Oaw2no@bTmT@7D=)4?022E+uv0ON+c>#7V_{8@SL`P(A%Zu7 ze94}R5KBlUjKF$7;6w=it$}Fp9l^dcv9P07yMhC*`+QjHdry~$ zIrJR67lARuyuWHBpw+w-(0%{oPsZAB`=g>W<0Og4?}yBauxS6yqwJu>HT$I*L8$be}JSIU9|BPG=q41iQtW&5y5p= zEPNCJPX!WZoZufGxVJNKmKjNEh2T<{9|JIO)Hl7yLfhNi$Oa89@F+tUhMHu)yO>Zd8bU@MPMTxq_uZHshwPOY9fR!(jIlNJndp_0&y8le%z+A zB>CHTRY)fC75S8cG?8fs3akC& z7LbGhkqW|A&$AvBgpq-0Qja1ws`>r`L6FS7ruF<#vJa&P_mA}xwvaj*AcU;}(`k6= z*db&)%j|JO3o+@+M=X~E(S5a@d2GW3Ea}kzIy8uDCuAKN^i{S^%64teH{MaRi%DVb zgZhNRcD;Sxl2GdsBf3Ele|6WBaQOtE>(avo*FV)qR^-C6QPiY4h}VZ^lH35I@-9U> zVW+gnJek+HgBg(%JjJ)J>Ug%u?5)U#K7toRgrNq#asscY%Z2&IxDhldvKDbCN;*9R z`%b;GZVZ9XaM^XYg743%h>Zm76W2Z2NWo2{B;Zo?XcOQfUdZo`96ms zcOEI3@q^Jo^z138dFAvltT@8s?dc9)&D=u^ZG}`-crTh@fqKZ+d6?ZfPiG_vWe%Qs zA-3*k;9ZJM1eR~S#UKp9Mb8D?SGS!e9&(=LTaP*9nA#Tm@ncU7Aa>N#6 z2Y*HeFFeThr&&N;gwq8X$d*sE-z3ltQnHh1dLInrVV2VhP>n1->3U4+s$%xCgijx@UmooLnHqI2O$8b;6Vubog;Q}qx} zu-ZQ$6h1ZSCm=dApt!|+0I<9bhVEUpuJj%Tg!%!K=wdkR6aOq6v+NPTu~9i;HlUJ{ zP{pnFKd$NCRqA8*g_$x;BPdDfM68*#Afs-hVk5galpU@f6!P)i+%4Lki@72=sTN{8*<033a)PgKdIeXnl#W|uj{ zt$Zntv1(@bgKN8;?J^}1{C2bq2&eaCD+qXJOY+%guyIW=a=nv-bX+~FJO>mfcR6*g zbP-CnSUGU@P>%8==w9|1L}eihl>&>RIUi-G8R5=j@a8NSg|U2sig2YF2C zxLFbuugExDJ=UwHK<0I^>^c|frBJcAx{K$2SA7ENBO#Nq0q~#l7*}Rzh3~upu2Sfs$SrQx%2Gju@ebl&AD23rPYwadB1ySM7Ak z6NGBkG{F!gfxgP`5(@vvXdKitq>u3Ce5_0gLLbuET%5^UE^@mQjuz}rz!u6@M7<%| zlQ~^us{LNsd8%h^{>?BV*dCL@#*38)pU5D;Hx{lo6$ z%<@x157;r>Nqos(rTIL8Fd174x+LlB9t9iRqAU;5k%%K^PGj?ik)KYcCKLSPGLV@5 zY(Gr7-(mp4`4VVi6((PzJB@isQfP6O=Ck);oFuh8glR89vK8;Va2d9t6tvH}y62>9 zHnICr#98uH_p*?|pG-d(-J(CKKzb?#{@Ln$8?HgQ+3^q<#yxLGa?*2R%18j)=4vGI zJwx*06to}g>Od9T-6D37D)>?E73D#Ly-f4jQq*b}vQ`gvh9{OaIDXvXhAfK~=t7%l zDG@UCt`k-MpInW;k3P}Ha`MDhq3soma+>xtbq?8-764Y!i$YE!@2ztyEQs#=`4~Z~H5@TcI#yXWf?qn|x@KSa&ur7Bu(R*NB+=8xp z2CJuLoHZr=F|=xCxM0XQ$ZPa$so@SBHvr3cwHLv#Gj)(2wh?A%6HqZ`(ssML%UNuHuK%v4f)M7S837Om4A64-Q_NfIeNf(*^2-FFM1 z?p-pktF7cr?sYTI)G#Snh!}t$^3`-OFVV{B>LH17NE@B8>3o+;1k=9f83ie4QShZu zdGySSo*oiOUa;7=R4)L-o)At?xVfbh9*Rh=!M-AZ9;v<{U@#d* zD!j`En&t>p^F7Fa2Yc*JLgKex&ywh}U5gv3W?hg*fFz=dqh|PxK2Zr|5k#1iz|C%b zd2temjToIm5YYAZXD#be1_BCPlAV1Cl_Hg+o!3LgyvEf?U{7~ z(SSbnY0q`QU&u|ViAMn3`9SN%KCK0C^&Wd4Y1EO zv!>T4?&n=9B@}gX16mbuoVImWl~$4fXn{xCy#kLHUo(S~`8&L>ZI>@WwMkq1B-BOV zN16J4u#~p#X%O~?Np?Czkiu70cSHV6?P-w3wIfl}WXik^etDnT&dDB?CdG8c=$J9c zQh^GO0`!p}9k7N(%kn>@s3fRV_kz+>&;y8^ToQBN)UBqj1(Fv)Ui4icdw80hFf6-P zmSEbW-@SsZAD)W?<59veWc$yA&bC5o(DeYO^q#f`)_#v3ZkxbQt#7|VQ*n{ z-a>YvNCv z9H8a#KL)C~K$DXKz0$P=Yhn-;^n^9rPdo9Q4KJoiH9Ev&w326NDM<-+@KebJb&QfF z#a?QbwDk;}ZJ0|gHy>^|$P4sdI@$|DR;NXF{6TrhX;CSdp(;TPK42r0+Ut*LQe_>w z>)4Pfe^+dG-_Rpb;j;=t95qxr^6Zv)wGj*XT)Dl`>r7+j$W;Y<~Li$Vt7ZHaqDYO(`@<_zG1pC3W&Fooc4LG=*62Zwxr1~Tq<=Bkx~>|pSG{k}>Jh##avQ;Vbx-YH=dZ_bvA zLZ%{N4xDXiqEHRWCful{eagD6Uc0$!PRHsf)^hC*H^`r#j2%r=7@M8m<5SvmXy|u z9K$~sb;|4zU{1@z+PV06&c5zMaxua^8@kIt5dIXnxz8=+I;Lz04TPb{N$Z|rpJb~x z1#BO%QiYyL`&K=a-^j;0a>{>Md95=1sX~5KC;5=)mfgv@<#O3=W%5`c0|`AIb^ZHC zdr7dwYczdjcR9;W&c|Q-`X+o$&tjCmJ|${1|I-P7pSk(rZUvwvfVB5`5OV9xWQ+U0PUPnggwyz@#xfW3$CuYRK(ius z-0N+n({ddnWG`rTQLsZ^EkO?;*u+L6(n>oO84#tY-J}jD3Y~?b4+`ddrnrOsT;Vah z2jnTi=p2LsOTieJA9=dqtORW<`*$&bEFo8GVE$fTu-HZ&?s1fd_A8yhur2hl?Jp1N z^zaP0SXD;*ZP)1Z{u$C{D#G3elQox*%r70~KL%eOzd5^bDQud|LlH4wAbc}Epkd>bJ?utdhi3Rrb;&e1k!?WGFoZ$ld0s{qeM##uyJ4 z>HS_EL@u43eT&MTC+CTmN+%OzPS5Lc7~ZR4X+5JD zkV=1xsXAf8!@pzCXla35}-V&*m-t8bcg%f-k zLA@Mt9bnY3*r{Vo_F46=fYRP@Zlg!Vf?z{@0aBZ~&#|Wi#W@1^MZ`h=kotTf6|xku zZ0vbPieS)>7k4g=13^2cp`z5DdhvZnUgoPY#?{8zjydw#cD#Gz!&f?`PzDqQ^dON> zM+#&K7yh2%$2t7&!P~wIc7A3g8Y^@1n|&ki&9jGEls2Xo@Hv@t0+kU?NNpN?P-DE> zTNvMug6c!!U#5%*ja*N;(L4}`&21P==JR^m?Z4q?^3|y#3abtc&Y*I( z`~gD=^p zszu+XTzMF_;JUVT1B+zhnU{q&O6*G3>Ng#W+)l-A-C7xST&a?;>|9^k=+*NL8&6qo?qpoz zD<32ScnI3>p{MEUY=0AG181P#%`tsKezy$u&&HzqxrT(1Ujco0f4{(y=8wFn4<#!F z+wR5s!DQP8{lN#_GuOrjf8zC5M4iFjN*H7Lw6i~%!5!B|Cx_-z6Bb|OG}#}L{h23x zO&*o=q78bK75+1)YZ`Sjq+=f?+FiVg3@NQ{8%b=tYeVQLM^WPoYEFDDgo)L7M8wg7 zCSc(ipfP|+K~h?~?fBv&KAlu|6Z3h!by!|Vs*jk zaH04LHS|QGy{feB<11JnSrXwBsx`}Nq#N-}GS;6)t#nv-7JB{kdI3FAi?)w0H7Tub zKlBx7T6K-#0Zfx5(@EdX<1%&Y{66j@O4`a@eDM7VeoWLk(_YW{Crg|0I%~H7y|m0) z$vFzakW2O@wuiDnHQL{<2#^H|(v?i};uN3K;*p9=`1WFJkEM)+>)wcdDYj9cYw`xv z$yc%@e)gx&MsBCHHNy-_9)uZA%;~WGf}t8f{#9*1{Gc~y4F+q#;4mdyK%P;y->Zq% zyv%$ib3;yiYJ|D0yX)WY0tjvT^&uL{il7inog5EH^=Ey|aI@$K*-+_C&a#S=^Lhm) zcXk5CKd{{VS?^de{!nCN&(6bhGuMY~z`M;k1g$DL*$?m_?XLY#xCHpeEYx=`3_0KX z#;23(Ql+RuGa^j74^^;^O{EvT-`SbmsZ)-=I~EyHA@Xvk>Fo)-yaqVm{Lt5K5S{`z zzH*xHVgxW)ty-BIxIK$L*7bypD0BRu2&EC)`qkgVBrz9PPJ^KYifWl1k1j&)J;MR) zmFoH=EQ~q^&<#!GiKphpQNKP#B4U*8izTp*v3|SNqa&2!Xv*B+t{qS{RaW@f?E6#5 zXa31GNT^~h-lhFEFME?n#&e7+i8Y+HdHHGP0>y^HogN+Len&03oU2bZnAW;aIvPR* z!!2MJVLYZ)K$&(gv8g;k2cf!JN(vlEk(1YU@%xIg~R*Qq{tQ{3=|6Y2!Tx(utq=I4%$;EsJ< ze#?eA7!k(uw`e5H!CCYVkmBoSTU$We2}X|&Qv>=%mu3kP*8UPv--#U^)ziB>tbGj8 ztWlE;6?U{wv#}anJefX;PvT@W!d^k401uWCtn(aVkpV^PBk6hZilTUYkwM*WHTvFN} z<^mp9OeQHXv?Tjy(^jiizpUsdZUo#kk@o#)s+2!o!*DkvX-{rDocx%>-7}M551_Ux z7}Z-Og4%CrVG`p~C7scfy{7{gv!0>tOMZ9F>j((P*u=a&c%z!3C1CZg7Y(|VMP3;8e!oQyBh{&SJ=)gCI?H^cef0ii zvz8xEBvQ>^>d(tO656(=XWr7Ak03uL3z`HfuL_q_p+}JF^8jz6PfCYu(~=j{Ws&XD zwtvQnLs#4K;5pY=lf{6!bJA4h-dz5r=lZ)P)Ojknc?XqCE}<>{uMdlw-HTu*osq#D z4$aIY8Uay?1r4{ORpo@}F{1&Vwwqhsq1R9zz0yi?^bM3&m&M328YE!g8_btQ8Gz~( z4rN#gkoDR#k3cR0YQda_i%I)TuoxSdrm;%4fve6*RuxYDe*C^OX%pM(ykqCdRts*7 z*%p5@++eyZO92)WM}wb!qW_)*w2LtVG>hg>Q2S;+)>N=avQ8j@*%J71zs((g|z03!d+tDirwE9fUQmbm($ETrrAFz7x20kWVek~?K{aJM@i(RAa(WSgMEEl)7^a*BjC zV8(JY;fb*LfMeUl_EJ#MDf_oVT(00i$jzG#gAU)Q`(aq;1HSc)=oj2%)aP~mMxXt> zsM{{{YgyOutipy1`7uhiD~ve0C#Tok$k@|#Xr>%Nd#iv=$-a&rmcddI$k1=~7WS?i zB%e!Vct?^I8i#>nV6)|@f*SlkcE9<0>Z!`!$rIPLzf`0k&F^>WN;_2Des!gn(1?l$ zcB#$z4lcd5dEmZg`reOz=m|9ogG|TQ1^8B5D`FI@hE0`nZAzNy{*KcKIMhOz4$#rNwh%n3L9;$>wGoh+yD?PV9`*z|Z_3INrPd`}`i7G_*LSANTkk?6xARX}pUbre zqtP4A5!2)2`YQL z@2)EG<$3y?!7!$aQ;V2=_j^6;2duUKui_wGqdaOg6v ztu$M#1QmHv^LkBv?e=LR?^t(nk85FsCSuM&fu5*E1lkEROFhX@wWMoXwIIXw zcbe%HAZxXL<3k{zk0y5<5WBmZlZmvU>#N?-1xMt_XvKf^*!^ja+RlGsWt>}W4cPrV zE8L&UN<^TJW-B1I#R*=hX7D%G$46Piz+^oh1Wt=Fw8D5onTvA*n*y05Ku9MUC^mcMbXR867Z^6}Y<8w5eL! z+81Wdwb(<18<;%|P7`4(n4pFa)cx~YoRTBZ4w6HY?Q6fG$`yr@f4_p>1mRtc|JOw5 z_LAu7weDRZx*`&n8|P_4YW#*p%_qu|f#%BT@#?xJI?!-3pyCfBy^#BHCW`=!M}WVB zSYPgbyIv9G4aZdWw0M@@BQW*t6##m}0lV7|B`sG$;jWx+{sGuR);%xHmekE4@(IlZ_!)Ei>jd|8{JuN)8%@z>wVJOzX2Z(i)@$3j?y9tYd; zgwMzm7^W!#?|q2= zb>1`r$_+q{0$6~X?lizM=uB|i^ju?Q{ta|=auI7Tdo?B1enM^ZVl%nb`Fw_4$`$uPzRhbPnELHaz;5%&tHr+RuZ~M#!a2QJGU>AM8YOX5f>X$Ty zAw(N2#hxk5JQ$%htA&<81_ThWx)BMMz|ZB{1g1j<9Da42Q*LpQ^EK2|*{VAej@HO#^F5#W9J znz&3hq&8oR7;57@VxqQXOr04ANuP1U)K_CzVSKL%T%Fzp`}r~5q7&NBw^(o(r%Bht z1R%1O2@d}xdu)%&QS&rMXeZn0wH16jbA?v?ve4m3)>S31_9xK5Miwhr&6@6g=x`Ms z+EQ9@6HLOX5gWe_Yl9GO0YbRGR^F<0Aw-A#*tfjQ8=h}7F5|p3cbWjw+VtN!53T1T z7*D6YZSV^^%%7Id^W~sffY&D5XV=(z^q-O}}ja<&0OR72v>% z%HH_7v(PLsMiwAM531K5>~O3tCak`~bf@&lZM5Xyz*7t*cNPYCvIT5%@^>I}# zSz_eD^MRgkLmGX65b`yBU)jytflGug9w3kl8nVK=p^=By>(HdFCrE7E6o&4Gk?yWv zl4~sVQ;DPF7;Z=!Uf?mj{6)Whz;n7WL)ZIj5?mDg0^@UGO3j=geE>zJ9O!0%6Pxz4 zwH#vCEhk5s#%L#^Q#9gce*$4pYyn?PwW_tRX`lA^xgCbIi)}siyyTE3FXks(3PfZ1N-qvaD@@!zqy2fgv+EBy*87L?_(b+uW7sn8 z14ZPYE}(^aRsjf*SW(Rv?ANC?w2tRqRtng*-27d%_*Lgo*-K7LU1)SjjG|qS&Sj$q zsh}CWUTsM|B|+l?+?zTbl>P1k^biYe!ys*2CT=<}S|k43NKODhvQrs+Bs505e&dBS zB4D-L>}Eu6F+JZ+8T;uQP3dC;nyovD@$Pm+U`IL47z2zJ z$B1CmcDL4XIkowTtcbLg9lx+sarBX&O`yQ{*E)VHVGt|KkoPGy^%y4j2+NqE;mNi5 zP*XdIkp4&2j&4UvI<;mD0a@Bh;)*7GZD;H88vp|>qPkLFlTg|`t)cP*$11Kj$*>RA z+6b+dou+>+2r!6U1;o^orDu47nP{CbdRHt5&Osj}oHwVDzJW{69Q0n+a3B-FPmT`I z`!9dQT&yt5PU92n9b_FTxl`sv;c5-;qs~hM${n^#hp>H%y6;)k`a6)4;UO5@@h__x z5~-k90}LYd$N`Dl9v#1Rwf#&~QcMU&q=9vumji09VVF>xNp&i*+Mp!I$df^**?QCa zlbT=I>qR3+_PnzF?j=O0O8hBe^*XmZtDWtC=V8is^`kX|jcsq5X~K>2fD~$A_>?b7 zJ|d0?qf8=ctFv4J384chJNebVk$W|L251bIDsUGn{waE~`&V3+(+r}6Ix{L>RbDy* z7Yh#nfRvZIL%JB(fPevN5>(%b|2(dVo9fIStJ@Hy92XQUcW4{+-^j<;Iqf)jT(s4= z2mtu}P@=*J;n?pIs24uQYd!(m1s!pa2Ck0ET>*zzBBCQ!$E8IS;IvNgp6WOMNg} zM9&W7+bV-IxBLx;21{J(bMN}zf_onAYtHoALi3G0X!cuRXCKE4f`)$=on1>~y+d86 zqPECc~TG3NmmA}0^Zs-xxoT(_$Q7T=lhG^LGiuYs`u<){< z{RU3>t$g0);1J#V4$YGMV;mGsNuqISB`L~%qS!h$fe|x&EIKGgp^Qtktuy*4N*?b5 z*b=3I;jA~+T0)2dVd0xl0q`+S=PrQaLSUlrs~G{o`~+pCgQM_xyF#K+Qr0W~EJmqJ zVQ$E$YGU$!<1TrvAgJS3?WS*7fMSxob`zX2%nv2er?i2NoQwlN;|o>qkxo)&-(>&y0*l+mrcjY7SnUHW1Z(k5~ll8fIBQxxH7gbbe9eNlm(7)Gl*N^RrI~_=*X!-Ois6KAq0`a)!p!{pSbZzP>MB z_{}Q=UcYW)JLT&b6za$znj+8L9y28PL&`Hw0}R`|EcO1c zK^3o$siSv(54_*e-Sl%)xc<%NKnu433Z%{xQ0k=&ES1ydN*XXb{982mvX82zd)a>O zkCzb*DQ&a2igmoQdn)|rKFp7`_+>048jnn#Uc8)pKQu|0Eg2pk0a~Ddni*nM-=IVw zk-!rSJC)1lxLxMOrx)xr2JA5E+zS^fSfQM3*;8$7W*tymT#>%u4M&s$;ptDpx4>fP zpzqT7(%=06It_HPolMciTqdpzgVPbl7w%MKc=P#i3*f_w=8l_`s5b~BU!MT&W-y4@ z`1}s#J3==adO!=x)eEu^Fgo+x4eB1at!a)`g$BIm^Ihm176>}?S%!43W8uK(fDsVW zN6eN(L1RY9eN&io%by5X#V^uLzM8`mWLjj<(Y{$xad;Bq6bWcfJ^A!i01>d>W;O;o zm#^L~#bNVF7^vXW5sGWk>!3U!!k7XO8t0^7f(&j33C=^vyI1b;)ykHat?%cuDYCG7 zJGvNOh^V-O>3TW3SUox)2#1C^Kg#>%DRZiZP>pc{x!bE7BxKJ84CEE55R)BM(MnRU z0H=8dzw7E$C%~`Wa{(_RHpIyjcn|h_J2u*CJB<;gq|bUsSx2lgs&BZz1?Oha^?xj0(b6{J7n3>gJ#R zoe`KvtOxPIwMoJVFGc`iOL;%YL1bCtsQ|Zm2D|I#mrlG1m80Cvf=c99@}UEO0gU@? z%D6vI9Tl!xzRVQ{o%N}(H+3+3@C@MUxDZDw6lLxm<`NV7{I&_(6-qn3w7j!d>S$Zf z?t6?(u484oc>MZIN(SpfYW6gqxbs1?jn$0!BKHNoml$C7o+`L!o$an`B#)%QFbmWn z!+#JxIWI04OJPk?pVGFnGTN|V1yWdU*B4N0Ub1tg@2?!pkR1G{QlZFq53%8RlBz&h zWhX&g!*7ty*U8p%clr_-&I@2gEVcs_5K@@=b}@?#BUN}3MeFBuo%yCA>~~O0oU7an zI!NqBM<5jzu*@+JjL4uy28zwkRTcCC-$f%p;(KzRUy$9HRG;hn@CcK$g`9aC$2wdB z9R+A4K|PoQ6fI`>w|U_9NAp+J=+^RjF_?JZzC~~4N1_-zEVB7g(J#OL>r(n2G^RQI zh-Ha{7YYx`pxs4#fhHJi_7DPmg94?fQ7@ZE?2EJ{bUtpS#^S8TZS%*J8FZev4xZ?q z**Aj3=ZcT3SAf5cjOOoa6M|{Y_FZx&XN^Rx>;c z4F1l2*-3R}9DV;0(!?ltUYY_i02{i@078C^YiOdV)pT*;gf0R~MPyuqq(a^-HQw4V zr)Dq)eHqbzqaHIMwoqRD+7U7%s~FAxn=Z7ltfd0t3d6 z$w9w39XcMbM*_HG^kzWA+Q**j=CpbF*rJ1#MAz}W=9-^$=a=K3vMLwL_^@N()O#5U znVF{WamrVyTkU&=L?`77I<9`cFUGht7mRb00Z~(c$-^&`8Yn>4kMJF77gs?17XBW0IiQlyi)Z-Q8PqU6|By{%QNYVLLtCbv@1f#UutuW>m zlj>fHI8)Q3hWkx5PYQZW#VWRLsrtx;CDlm%Yr>$2y9daU_!Xo*D>1?zgzQE@s&4QP zm6e$Ll`TpRw zgU$zcBV|hho-7*KmUT$bxx7L#x3Rw28`)`t+m9h&K5@|*>hdoyfet;%qaVz-2o0rw z419oo4mfBHzQ3Z`l*+>tfyl}A^z-?)4@RADnUmBnch`+A9{g#kmDH)ZcSiSpa5dh#mtoHg=v(6mkNqbmx$-(Rqeoc#Dn-$D{OMsPj1Ww)NfIb z^qPe&*G1#Wky1)tzDH+2_nq{Oz3PFab`3^9 zmr`{$`D({zC}ETO@O!o(>);2wOo^p?1Kdc~(N6X&W_#+)EgY}Nv`9TKkB+l*1QH=g zt;_?XGh5U2I1!}m)SRxjor3l0ye}?$K0+5`MIT6g8YkqE6qlc0hImC-@ju|vg@T5j#&mt;?3t~Chvi+F5EpOya(cVFr?d36| zh~>z4Hv(AVi4}H3gV4_qQm@lk=}C6?BH|;4p|_s}3K6%t2&ViIq}(F9W(j;hg{{hT ze%+g3zWcULCu6iNu79Ge%8hI9WxnBlRl|>JV%?QUayoCfKkL?!+bz=aB%FbP?-1_0 zCkIy{-SpELRle2OWmSAw+JE$c=+2naT9-JMpRA^mc1?~0-?#3qvgdl1o7;81-}+o( zGQBYE#oM-!PSlD0?{lM?Z<4YHqDv682-a#F{~INtZqJAjnddjYYVH^xf1E%)%!$9D znYlZrPuC1tW)fCIw`&xXXEma}ark2dClbFVoO9VdR?G*iD-eARtcLs3=L^u! z_Ym=#b@#TOb&XR_@N*$Yie?j)*zCkP4Ia8D1+uO+ky$L<>2_EB-O~1CP4G>(+3`tL zB4T7DW7hBHO-=Bh98iCGiQRsB{YAr69Cb0xW~ak_gS0QI*mZ=GPduu{X;e+}4epce zQ7fCA4x929vf!10HAvtrhCVbD2+}?ybilj1@wYUmf5yJHHo>)t%Kou9CRbuc@l}=G z9+Ms_dpX9Ki-Wod?W`T`X#6QJ)3dIR3YeIh3UC*z9_gdd&lfEpZQpv+fP*BcvS^#X zbP+p^=vcIW;7!`2)P%4|I}y9}d-s&c(_g?Mh~2>J&*m0{(Gep{_{DO0Eku&dpM7G| zEgL4;UsJD{+kWwF;m1pV(#SLTqz#jtr2ZDw`?o@MF>P;7t`BDnn_SOKaL#{)JqmXesaKr8gMPrHnmG&hyg?Xnmj;{>%bY=h!-Y{vctF={n6 z%g&bTdLe-}bfTQq?_ev2keB8&Lg{`xS(*30~#foiD# zN7Z)+Qu&8(=XBIDs)OvZx9pWNj$H~NTPQQKGZW{OWRHxHA|fk$mT|sOHVKh=WR)n{ z3cdH^`~AJ|@BN?W`8=O--`92B*Zn#DJOnnGwm7Tls8C!siq&jdSYL7zS$s|UtQp6v zT)r@u=tqnrb%jDZ_vJrspcL+YNVB)}v z5ASXzUZ;bBh%=BWS~mLLm^Rh@r(BeAd+vvW{P?Gzs{#~#R|&(%CI6``a8>?PCs$nj zXgs6**@GDx(UC#*w|#F~{;5FT`~OeCTNGn2rhVLVKQwOUkT?2SK&3v*n{EADx=~M< zzfE!%8#_95#VIbnF}@o4&|1j$;Vd<>hiOSONnJ1+?1geX+tZSV1&OftKICvb3&`nr zmfFG8Y1;hk7aeX@OZk)S&cjH?yVAaST|dm2|iHaR#NKQrf!55+(0E3AxbV@b@K ziBCk$kq3AUe4$i07{YrFTw5R0%2_ytSPa95#4EZ&zmS{&>EdYZr;EzX1hvclGrrb%BQ0(eH&mybI4Pr)3EV@(gARv5je_JOsP290OT_K`|CNjJlL?%}ZvnphMLO z#?WeFoK+E>!)E#B^{6{YQgt9dOTmZ)LC0NaPe0`qSsJQ+kXm$%ylgLK7pN%j|l;K0%+eg?74~RxS~zI~W`Vf7HDik*LAz`EZq3|Ff7@ zo;(r>8&93`!!Npk6Qu(d=H=ZMd=%HGFYk2#QHPl%7U5smq0Z3w7y!T03-6*UQ$ z93Qt2dQCC)y`4rjycxVab!bu@UM_f2!0@z^Tz*@_1&k@h?i{XY>HH_x>a-wqG*)13 z&1WDytoKTMK)rlL0DVrdWUGz@Od2iOKJA+qoD_%2(2`c*0l5MG$ODBPk~C*ZsZHS7|qPk-l3lpY;59KcQsNC7CJ*1a;NuLY&tg{ z#{d-HiZ}4487-oNxYmkyeqrU9Cj*RO(DmgiCes=)BSQWIz52!#zbmZZ+=LS2Hq?H zZ>F<|IR|exCjW8uQpX~%{Y8vA-Pa}7B=(=T!fPTF(9v|M%FkliXIK>PlZl;gykM*& z+HDhY@&T;eSx6V`jy0^+uz$`_TsB@avXp%(eJR(lm<|F~4g{HtLY=XsE1^S(G4`E$mmSccwU) z`Pk~MF0m=M$b;`cdDc(pw6dmb5W~GsiQ(2>4i_1;^De;u6PaQ2oy~cg4;jzS%tlwI z$&z?w^-S%(#;6c=k*P^3al4<|2ItX|JS3wj@yB|3GyH1fCv(uBN{eYfT_YJFAuk(n zAl<|ROJp0V+a$6qC$>0cV(BKMF+r2#!eYxVY#k(eR2WsUL0ka+;v_pQikUXm{70hj zLdCXfSe+mMjZ=I?YrBUsOhXzFc&W~0kRE%#l<>5hpL=Tf<*>!~+{}Wz83Q(h(#xpF zz4J%?M-v1&R7rn?3tPL&GBJ#jNZ30UTz3S?g8JxnCwPT1>geK|xxSyZ=B|9~WHV?; zQ>#X7F)}3+%h;7ryh+m?-?Y<(s;7~EwV?R$%eH7fOA^CjBS)Zr$=FFbUr=UIIZ~*! z)o9G$y<}U&_gvD9q0UJYx69Ua!=auzT4cW_H`#!6o%4vT*ddhGAwoWC*C@7Al@CII zhbo@;Mm_BfiiW#&m-crqY877_3B!A$hDB{!A9%*N<=~4V6})D$DOrcY3prrrJ_F|C z|Ko}~1`Cphg2VA>G~$8mtJ6aj_la?X$md|9F<<5Td)chTAN?bYuHf)k`$9=EyC%g? zwSF?<5K5MwMAx5pOsyK0;16VO$y&Wli+`GA{FiOsES(#?IYnt8h5VPT2)9TDQ8(c_ zDtBL@Uc+ZU{vc!MLf~vkhJj$zY3i=@=u7xzD0ebcV9#=T{7nfIfQMeN2`bkTO=re* zPV=HZ)77c?s2ZP~wTIM-5Sft+aEMrCju^o)xP$pB$O*urLd!n?fe88j%)c?L>$Sch zx`MaN%`Dg7EA6VB#eNd2P?pK#fnlBlUV{sbWiLS?Tj(gHf=ls5{MBPVs{v zW?OY)_%P}pRA5zK?Se#scNs-#rI^Pn^U=E*uVpVH2k9FUMT5z)$|y`IX13tT&_V9T zP`az4gSXtGg2qQ!AGNWfFEiHJ2IOw99~Gb=W2KnA<29$dpEs7TkxvN+;1V3BTOEd? z@I?ls$ogE#>TyRHc)=3Y`f=(GZUA73dns8*d}&inza$GUNRGDYx~g8&M3-C*8RmZ+ zs3qx-gcK$uBoShC+aVcTBNi#q{50)#%KUn6)|p_v?DnX|bdu{~u)?xk^gkv6s9!QP0PT)yb-Kzw&J4GuD+Z0K+6)=5&J) zwAM{)JHzkch_Ng#>1R%kp7+so(Z2-n>EWT%%HOWcWk8BJO9=9T+Z-f+V9u3HOPWq+ zHhva}xptWOOhPucteN=N(<)|9_~Nk`#`d0$1f8P77{5bsu)6_`wvE^DdjXE1t#i9x zKUlV^6^{|SPoDnf=*R|PO+{Fw3ARUavaUc_QDXh#~- z!LcCjk0f&BY+m)II|F-@&eXz!bB_F()01{J+(CSsbW=qkp#9Tj!AE9BJ1X)G>co9i z(U8v@JKCJVr743!EE(EB1lOils3Fe|(O#P)K2umu#GxuKhef0%`R;A4#`S#rao9oP zTpnw*N^)A#{&PHJntNvps`G_Wm>1fxC9YodVgvQz1yQ7M;ALy(Li=mOZ|hV@aBKcN zma%_NfQ;-YMTXH+F=2j2`TX}+=?OH1cZrm9;{}$@+*}O+1?doC^gKBN5>_^4Qo->| z)~V5O6oqpvkAp5 zQ!`%6juK~sdn*5%R@9l0V&{y$Y^~6Ion&CMuybes@hBvCT&X6}LmDMY2Y9V>XziQ0 zpIIdfYwvnzQ2njrIm6M7hcKteTF~1=wx;*#;An+kruLsL4A}LbHisentUmWW=9UWa z3c+Rmu{K9$$I3C+V3V-B!;wPMZ1KWRQnHYc0E-}wodmVu52SeO^&o{p(ten(9z+ zOaeMS+@|_=LIrRR0fjtc?k}8;ishW&&-lCrZXS_w{#%6jnw#GJw7r$X1Aw_@X(UoF4{!?4Ia5dcIpvZN$9kt9vYt5MyovZIe{^Uqtb^D} zyG|7I#!Q6+FJJ@=!bmXpAHxB(BFM|^c>S|}W9!HTJ6qbXr{D^4aY|yk$GgXH5ya=L~YV1mKZ@6=jll& z4y4ioHjiJeSU;yt!=aqJ(aq=^D24hWmC?W>wm(`l(U(u!xUth&JWZ{}i-~D{DzzIt zVR9J$4U!4FUp0UKL97oLhCU3MiXSe&A?1tSYRXWQq8qJw7w;Q@Zg17dx7C=Zec=ul}L*IskbSRJ7P z7MY;D?0&5Giwp`QKnO9+d|Fw^B-rr|Pb|Z;k=1&j{_=+@ZZrP>wBjULHPgfK>U-xu z={vQbgQJ^r?z#iJ=uxyPxxykVCpWK}j{ieEgU#THVKSI~6_|awDlQ3-Bm@av3uUxz zg49;(m?fog-oM**id>o02owmkO3Sc%FQl1geYBNbuxxjo%S=9vUX|Ov9gqw^q@I}< zpR!9Jj~hZB?{DgJ%1qVFb8pa#*#umV%g0Y8wm3QpQ2fMVXhy0c>w{Hs{t&8TLE~-n z)X{tZih}k*_AY330TS0jUK2FRd0y$Kv?XDMCj21g4rx4VKQ;@a^dykeQO*_(M8R3o}+ko>b<%ib$+-!31bN*Y8q3; zh;sr7%z&g3=RF#k&V|QO6Ce;>(vk2hw#70xek*0K>X^)#&}79b06BBJW-iFP-+-jk zLj(?jRMDPSM;amPloc1k$$;6k$-8Rkz(=TNeR#qRB5~P>cFi}`eKoaJ5(LiF;M3TU z7dRjNXX9DtdLUR~Gm!w9ZE}Jap(DD;EZh{)yi0@QP_=P&eUQWm5egL+J%efvsnt+& zUebWUzC|}RH)vC?7jMIP4ZmdLT(4J>YQZwxv;^OX8M%mW-t=`owg`d%1r>U5_au_L z_1=1fo%(QIoacnQyEu>eTXl|L!T?CE$>wjL!hc0jd_8U`))GvsX6p{D>NSLKGk25_ z49S+J&dlexxbizw^u*nzs5|MiU7wWvj$XWt;o&Uvq*t~|w3)%8?Xt}k*GDVANzWnl z#u3>C#hckR@O?a2^7wcf^%dteR5{XdNc3t->{sf))h=aWV{Fuv{-UN834}J(sPBU> zz6PI2+>0i}tHJlTKp3)P$^elR^v&-nM%oE+api*%ufJbb4gpc@+|hCRll>g=C}6fG zmvoOSn;YUf1gYusg;v`~&G|Its&*(?$+Rd}qB;_KG;<+~m1?d{kW*LBYln+|_^8OC z9FQaNThoh1+%(GqXvWsvJ*x9FRl(=<1hOD_3^@_o0rKI_4_zVG!jfN=?Y)|C+#db3gbkk*k==< z!p4Ne@?{{r<#*}yRUj@f)_H3rZ<}h%WIWj7CubY85}Vjl^M`<$N_5ct6B@C?Kq#m9 z8gOJoz3(8J|A1*6X?&0mNG=oudA!r!TU!kZIm9yJ^AxL{jmi1{iRPlu=;J@j1cM+3 zN@ifwKP)nzr4XJ`RtdYo*X{Ho$b~eh?A4&12QKlPkNk|8%$SauW2hDs``}i4 z=4S2j<~fj1&kC+A2iQyIc0W7{F!CZoC%nuHftk8RbvkbB1?+8nDW$>?vNY>C}q-1K9zX>J=AV7<2{6s|%+ zCyac4aJGwlAnM`Nsu+qB3;=!|Q_9XKkdZ;5>57G@%)4lHk~xlJ0`)kLOCYSIkoRT^Nn*YOZL5 z)bzPK|9~f!J%@qI-a1(b1LsC{>4_!Hy=065BmjZTMCn#yqt=`IUj9Ih%@NFM@O93G z4YG0?gQ-;Y1qMB#_S@x24sAyr zIO5WhuFs3E5YBp>yjd`p3js3z^Z_q8C9{e|aLkAXB&j!5=ndEe7{W2(dRqp9>U&SZ zB!M%)|BfOHyo4|7apL}`rAK9!fPQg#_QtW!ne$A`bUk4TmE;W3TV2D0ULWD$l|97oi>k>LD~{eX=deNIX`VG%YaIR< z`F|4oSee7sj^n7lrXikVPY?U%Q~KbD#wcaemYMPzXRMHI12G!dK$4bNO!M(WIO1{S z7(b`a2b7dS`(Z7i1ZwJnC#*s#t8SlY*RyiR^vs?{VcHi3A5<*e=1YVyOmqXYPCo64 z=DMX%X9kagp0mv)S<*qlM0SVDQt9Q@yri9_YZsj#%aBcc8~7xZTD>aBMD~Vqyz%w> z5~Ca|43mh*>)*QGLk*HS$1rTL(V*6+w>gGqfs=TVtn|0~@21yMnn+N;rq#S_i3IpV zge!#7A_}_=^$KJPUJ_4t3BP;!P#OsWpcI1A%X#Qa9sn{u1&62mrVEz_@e^uiQFr9E z{jPtx54U}}*F*C_4Y6t6gAoW1&Xf-brBW+jec*=~(X*MTpcEWbdx9_n90PiLy#sYl z12)DBq7PM9Zr%u8J>e8OiKsTVh)8C241h~M#Knp;2V*GQv1PAt0R*k4+Hig48dnaX zFnkxHJq8@6V?*g;{}tPX5v?jttjZVaN$dhcXq?9_0boC_^v}=WksmL;>A$EdaScf=}&P2kLc8&_soH zB1QOVvNj6z>e}q5+}&m$%NG6XXiF$p#8I&%V6h$*INZd}>x&a)P?yxP*_syJX=_{V zmrZyG)G09A^jvogT%R!c95q~|HC9BqcFO!tVGIBbM5ZVZ(Q2geh#>YL6upm;US4kUO^x$2ujwgI=a)P(ff>Jj%slX=IbxFjA38Q$ck34QghT0Jx# zg@>B!GDf`yS;-z!U1gl>@EeMhMqc0GTXxNt4Gi7VtgGD&+r6Ld4UAeZkX4qVviCI76*#{k1xLg>nAiwEa8NnXH5uSPU^ z>%f^J&YWM&u}?%6(V3H@-l!)^6C{zX1G{^u@$5WWj3Y8anDT9KtVK=yFsr38B6#-u@;vQ;2SL6|Ma`Eq( z{S-dc0dmN@XO*Vnz8!Ck15}5UBNR(AAeX*H%NF$io6n9N>!3T&lI_ZMf^3G|&FZZa z`-~XsP&81M!p>Av3aArG0^QJaXcRm~#!bF{t=Rh|JuCBhl{nFrw!K%YDu;9KYCgVk zVgS8^Moh(ciiJSM|$x6ao#pL znBrOb?ti{>5P}s)9T|lK$P-p*35F;rPbYraZ7}>90BYtuMX`mkVDO2Y{ce{4E=>+a zWSg`lS8~}j5MjL~{W7Nfj~>bf+qrJ#Dd+1|dbdw|=`gJ1yF0kJ)DO6?%HWnTPJ~6S zxbb#JGDgUsFF<+EJgMTvg2pn@eAs#^TWrT$N<(8sPPKkayjHL5XekYbq0Ulg@thLr zU&75KC+DKWu3Rj@4=VoO)ZK@_ufJsfeqafkKFDbQ!wD`J!yMYpodp!lg)T9^gw;E9-QXjJ zNivp7;g50RpbvG{!5sWPI^jcQ)&Y0x*8-+B;IJ0i@>`$;OOgsC<5gHm9{08c_zIr* zQogPFyUdA?jwGx|UdSo}$vyyXin@2fKLW&vo}!}G(qT`2gd;PcOnOBU2ej+!9sW_1 zg(?phu{K&CKLSOSUld>849UEsR*Ti^YG@4J1gXncVzjLxh5t)Gx;KBN1C%1Iv6yAR z;dOYE37awE7v?Z)^3b^dH53nne~G(&y}oGpR}@qeS1aY79Jmo$$b;+h%vZgJyuuM` zWpk1ca=u7Cuc-G1vMBNSFsl)fLMRRyu9q}y% zx!3+W&R_N?7)YQ5RxdAK;c4VE=xT>Z2<4zcMdY@~!rnB(iKzq)+|c|JO=c>;Sm}nhoNtVZ)sa| z1k&E@6b!TG<~n|`JjA6%R3*6#MA`p(p&Iqd4uQ9y0EPJbX?Q~M94`JE|BGB8$={>_3pVWoHPZ-BQuvN^&nY-S3DrdiF#EtkUopXCoM za)wW=kY zPtLhc_vCj!cJC3rIJP4%0g9g*qPJFY>vP0Q5MrF^BN{A!h}ct)Unf_5-DnkfWSeZJ z%pvDBGj^ovwf1TnqsZZpy@J^VMb^RN#O1YBOzZs2yA&O@Tx?kk z?4O#OltTCLd*luUa}S#(q8vMxrwl$9OoJh4r>7}p>7;G}c%7%%l}ofJ)HlQkdbcKnnQWNi)~CLN z1wax$$DU;4*(Qy}8lyKaCh)!VOoSq#xWPvq_=QVTq~H~$l1Ta<(7)eh9ktBv7v?qlGs+eW*-5WrU8WVK_K$?|RU{p0~ytvKacV^PcYtvWqQuG^ZTAw)8&RU2DEFg zmxOaabhR%f*?7^SEfL>o7fX@{lqZRgE|OO`YwMrW?UBF6krnb{*Hby?uH}cM(D)mK zQeCkWsG%7K8S5_GCOoVeGV@LPE+4YrXW~EG_02Wo)o+uNfwNPf3l#Zxg4c)@k!5Dy zKX(RJZIfu;0Y0x};(ceg%wVYS{EBza$2ToBrS<8mSxlW<(tBd0wR7jyr}OzDW6D31 z7^fnXxCG`2x72YI<5fUyR(B%W-oep(jtUYA4P1f5@D|;hhDcg7u|7;x?s}9#dBLp> z4a%EwOl4Sc{e$yw_$u^*Du*eFAmT&YcWV3Zp0&^FgFv2W#HvTwRj4;oT-89*=RG*j z6rV_-eIxZ8Gx`LrBcL7q+7D-*%nYFsSHYDQdoyM}<&*YNl?c8v#T_F5fniwUsq@FG zzrSl+JeRw40o#t3m01c=t-Y-b0MvH``F4*Px^t);3d^%Ort>jkOs;Y@tU@anbAHpT zsRSwCpyuVerAGegQ%YATe)z*ziZN}{{wp6zyJ`vT{S-a8OqLK^ao!B*xWLMM=jkpc zxXL6HlpA896%xL%*KmD$<i29{6!mB}?{E=Mu$HLfw87(WJ+J<;WjG>z`yOZ0 zonnjqef>N4v?6w!{{~!MJolSUDfTe7ZvNH;*)kbaYT*ZJh~ey_Xb4b39vx=9l0}{D z2+j%``M_N&KKhV~3{c3}(6vni9Im3&OXq}dslm7nssg-yx<$gEA+5Pr! zlx)Lte}p&!fsBHVRu29Wz<=#VEq8x>=|T;wQ4{_QqI0P?Bgk6$xXGQtZPA!+aX%Cc z-xk)s9q_^TDrp1vIv={O4xdO>SLT%VRTA~s$uLMWe{cKJ@;GdJg~fN!wTwzY1hp}E*O>SMCov`jL$0Ud)7^c>}Qb$hnrbMEuB1}Q6C z-1_HV0Sl?87tbG!?c0)?r}IS3pqu1{v)Cs3K(rzl@KrTQd7z9 ztCk%a@vE}fUrU^2H!C|U@)>j1iu6ooDEntgBGhpF{VndUj7c=^PrMOX9aT{ZX$y5C z{UZ+vJAhtpo%&XwswM|wCc++{^W5TuNE1Y zN^X?gAX19B6cQW0B4kPeLtmS;%FUH}@DMyf2m2rQcB(5-G^Fop88d`yf9rIuYc?(k z%~P`|vHwly_SkY(df9!`l}m%e08gWwz&Fcb^8)ugeS86=MWoLC3tZYV8_GAn2&Xk- z%SwqRFB!YOTzs(kibJL=l4yOrs`E`-;75Z^Z+DrFN$!Hv7n4%phcGQD5Z>0MJ5$+d z9HVNFZO?*;&)k|PfL=P|G4RDpP|T((?&+Ao6Q6;cy9N>}H)U&qxUSufTmi`26&B_dSxJZ()UY4J-Lhs`wk+RL| z&nUReHTtPwn?k!2yJ33zSvqXp4Yz`ByXp*dYxl?oXfK^dk2T&rw%PcaExRch2q(WeT^i1kH11 z0g_?jw7Y4Cw)_#mRRyK!TiM8o7BF&O>It~I}>b!PCFGHQaPMES1$;THvs5FOa{yfZ;S#6gml~)vU zVSh|HE>Gcz?pHTLW#Qgxk6E$m*B{1w=g?dThLPqxN|u8LbD&`04MzkzSSgN(mWKSp zlZ{kROgOUG;)b|+FG3>&;Ak3DoYWR9*qa zNf6&Woxg4V>=ug}#~lY*lWk%W4+V!%it(mmgU($AHrDqycE=dEq$8}{6bHWj-T9;c zZJE6r#NP!Q!cF=efF1@%wQxgFqS1%xq@@QdwIz?oLZr~IfNdrZ{lq-!q>!L-QFnHz zI%(_6r_GSxekdQ;cCLxpkPR2N=pS&y(f3^EMvgsFfti!=RQ03!`V&vR?d!)%cN+B% zYWE8j66%joWEd|d3)hX=UWwvoTeQ7t8GH8Es0SLgiTx0FVf&kSt(NUbK0{yQ1kc9P z!)8VOaYio(`Zv3~MUtvH1kdJz@v_0I=X3Y3RD-L!VfYxB&T`M0D>CShf>|LA{zHWa ziqY1$c9+qvb+D!zZcvP8gBA*E;) zZa_Q{l()=tO}t6R^|Iaami|6I{A%?_Ih)~IUS0GT+We=c9M(^JY1=)oGglc}^x=0e z`|OVJ?$w5{RbDd6f2f9b(-%I;u(N&fYy5n6I<%z`|E|9Hb{0$KNQqc)7<)-=QBoFS zd(~_#ERWiJ?HuoYT-VZ~!0?D=Y5j?ZT9d!C$^EL05l0OXMb5vzNlW&6=svF0hhql+MWnZKcPzeQN<6F|U; z?ax-@8Fc#V-*fe~Ph1({me@u6D?VQmvEe49DaPceABVVLxBlmKre%f#3l;zNT*J+G z{}Sx=6A!dZJEEe<2w1YcD@!MUV%>QAgbo$Bv)QZk2jYp_;}a}_v-UDmCnx}OM+OB{ zxH0Dhf7!U9{J(6k@h^yZMhb=TJWpvJ?oBru%a07Ys28Ina1AJvZjof>% zY*Q%^QG1=moTH#2B9Is8mocjOPVEB_XwJ^E3PU?qJv3_}zMCAXpn`Ko!Ii^JId{Wc z_Uh8}n{$SPx=uj`WP}XZIO5@)x#FyCJB;65Kc?xWNsPZPNHWSf zB`4n5|MB55%vm#F5&X4mK3W{QP?1R+|NF)v^-_5zma{kY0Lu;Y*y9A&m2~t8qpGz?^z?YH`Y0N+u4({H+^mb-< zvxrHi;sGu*J?BfrwQ-sn4cjXeW0=8w3SIXraRCX)Xgyq>j`+i97ul?Rmh?B;8Ieuk zAi!cVjaqG3YO^?qyTy9#E|5- zkl<#Kvt2Z$)>ji1*qyJx)6|kq(Gdzj>Tw0vWz?QXA05JR@S=i>E>dvws}P@|0_#M( zuw#|3OIL&&2Y(tyZG%$)CV^3P=J5VFV3fHO39hM!(*ff%p%EqJ`=}_WQKb3i1LO}_WiB*ZW$9QAZ$NC z6f|t2A*T&LStHNFtR&w;rQws20#U`i`>TWBV|)u~eN;HUADq0Rfx~8{N9oHliLgiF z1_l~S&nX{ySxXpL?8|3=S`&G}I!VA@U=j|4r_f|d4q#MKfx|Q!^ivKmF6FX9;eI~OJ~~~QHYd%X)x{-EovGS z=*C|Ag1s@YTjV)bD-11L)bRsRt%D#hv({knU&-?XvG&`1*K8l3w5d(x1&1$h0A4SC7g9tzu&Q84Z2!=8& zQW?K_WPTlEXHI%Q>Qx}LYWLc-(>omZ(tL6*O)hP$jk8BXMvBMd-MB%T(YufDn7{a5 zl{n$*J>_f~*py9B;V;pf&%e}Wlsr*ZVV=4=X{4hqegw9@txXPH#T|%W^y8|1x0`)e-~Z~18=HG2_QoJU+alw8VxCpN zP3tI;MOEGmPH{t@=)YrTyuJBns-ZaL;Iw|?72rp{ zs?HnYE75MIro9Kc`Y%I)fBRso6ZY~;Qr$I87dG4J6KW1X~k00*2 zD`;VBwL6b~;`6d2nL%TkmeQ8IPdYB>?BsNTZSoT-ew5F;hu=@;f{R?-KA)J=WWtoS zmEiARrl4@0)r0-IP|XY-0;*Ys;(8?(M5jzpE_Epsay9={ZHSL99E>?%Kan(*#n-~m zi4|RWiD>$2dj2)vCu<9vDU^+$D)XSf&@-{Gbow#EZ-rTjZK@G zCmEmh+(Dlqhq8gHSWfU6!U!I(L%#jqb|CDzIs7h;I43E=W!`&>*Gi0TYoKN4d>~}l z8)J0dw+T1rlsgys>sd)Hzx$Kti4cGN|8F?gOO2vwr8m@mG z74f2QFh25j1<*Vsy>)&uLy2(b)748>qJG5#{7cSql_lR=S2A(9|XzyG|&vrhzloHR*6+#|Tbwf)C1|tifH) zfU>deeTfz+Wn5qKuPov5F#wFp^=+@vqiN|scwhCeb9pN8MB;*&sbF8APRtLM4 zX0D#7QY8;!a|%R$TbBQIqT~0!+ zZ$nDDb?P|Xm{MBm-Dj5dF7`%}551_t3SkNamioToDO3T-eCf&+1s=D*%g0u2bhr-+ ziNFy*%Sr0$qEX~$p&)n=YMznpd}D=q@%?N)%pEd1b#6B$ga)K)zd}91wb<9}=Rx=K z1*qLoqMXYeUxx56c}Hi@w{4zydAK#Tl(F%Q1bAO`F^C$&hu*&ab^|L2w;r(ORy&G& z-Tz1agxidJ_&+_F9$y0!5{^0lCWB=fLF}Y=YEM+Faj4?ug}5%XV`#TkMKpHb_%Scj zslUC{LXHAsA1hrvOEA6qiRO!nlN)Qb>DlhmXSc3g$~32;WxOv&2vuwLH^w;_uL`)n z^*Dvc9YKc4A*;bDZcH#rq_Srg{urIyKh&mN{9~}tC<1n}hXD!cSebY1xGoxgfvhp; z&9jlTlY&_$50<{Z<1+}$a6-vDmnZaGR>Okv1$_%a7q02|B00og? zyK`zy0Vo*_w)*m#@djdq|5d`d>f3)xnTBqBd%^$4;Rp96Hd%F=?ZtI`hT3ycNJcn&un8G13xwP1`AQh!`nrgNN_jO(o?3JeX#WfqO7WoILYuWPyYH41 zqj}w>l{*u^21Y)!XS{Pk-=GvfePGy$HzI2~W}lq-mL}ZVhFkb!Yn(ml+0iF~(DPGC z8%5C3YiB?Wbl&xPVD1_g3ZnY;>ecDeEy;6|X8ir&z^=U+Z@f-f_ve`wcbL9?OT$*d8gJ$pUyGQ&Q}Ze=wx_e#w($4GU)v8D6bC{deC-l zi50d@AFp0FcXK=pxjXtd1>>|LkUL2FY|A(i@I`!RwvdIU7{s)zNG%&_ zq>-?*iZZ4BR8Pn3^6yLX_wQbALWL_@B2i=7tU-%ZM^x*6kg}4zvbt{^&|GqUdgd(A zl8k{{qINlAya@^MwXXThsO$xvMi&iUT8t?r(&A~NWLFi(i)D?b+m1}fgmm0|@cx|+ z!1J&D39sqyC#B+>$~}I%_(Ys;oz$f2F8S3ZHz-XGr>iUu#_$PUW#PvVpiHb0zCS_V>`Q0RhRwqzg+XS-@rh>`$W5(1VGZ$cB7!&gpz|h<=Ev#q-@{c#tHPiJ>lpO6})E z5ilK>E}pUOQ{T44FrZH4C!e@zOE+>iw70UHBHeHO+M?9wu2P*fmS^Fvg2WIF3t6gU z$!q`B*s9{b4XYGyZ?S!Q6jJbf>mb^Aw7m67GgWnzA69!o{Lyl4H8XQ1fLSonKelOF zT)f>2715Oz(8{xs7SyvViS%BeZy^GC4@)Tgf&{r+@vHbq)e}sxEaQC>E|*wkQ~4o> zB>QxIJtJ2-HxrfGpYTimUFC3F4AL5$;_qwEj`-jqUGK6f9%hBuR3QjqR(3;>l^}rn zeYCqLN;vx#Cl!GXN%&{+{(lyNte&AR{u8BD zGki(!uMN^=av7U1y4t?stj>o#7cmU@?g^d-z1AY-sy2nP4HuFY>*DNvw8Z`G8}R%3 zp0*;&Leb6GGl$r0M5U?HW-H@rwz~SJoVEASr*38tUln>cwYT-@&agbgyYdqM;w)s2 zCQDoc*(r`)JkJ`P)B5-I>pMMe-?nQ$8}qD&{K)R{u$~`>&&XIC)U32>gC?> za;+e>xMeov3IG{XTwHrYME}weYqC(cqhw8jpAU>?{N!#X-d9gn?2;kk@X^|#wvkRg zbT@6PG4vr0(n2E5rch{Iu$N9rnyucXj{E_N8@80V#gJU2<+byTxs>q_XS_4n{#LGe z(i$gl?z$UsmUm=@*?Bq~Td!LvFUMbVZas0340dfmB$<=EDS)gw=vRp0I)SzQVj4bp z{JZHL>(1V`h;b3{71+Kr_mh+8|7-~f;fWk}NJ@*;OKUOwQBe$8Y)Qn`Hs2#i1AmG5q30~w zOv0g4BFHww4lc8k>2UKeuS%E|I`cQy-_u z;ch#WN_rk?@xykm3O&?42~Hw=N$|fTj){13NCFjj-2~A1y7SjNcpa0`|2C$F1Py}4 zN8CgM8up^;KS=!s>)t3>JQQjUx?Jw-G?x3_aM32W+9|uXdFuK`RZ{lrZ98aKGAniS z2O|SVC7f)#PftDqRDf#}%)Pu*Go-<5ZitFovYAUVWNHw%B1wXk3E%|uK82tBYUtgo z@cL*;3WI4cdWoT147%K*HP2EqBI|hB-~~z z$9Z2}WYR@^^@1WAiq{0e?5b>I`MFGxQoHnW%ea!@+ga%nY@Nz0j8sBBr)7Ui!SU>u zAy?QOZ`&1`(Vd$@`serj;LZpI6k`u{dj^Z9`+M7WN|+a_0DG9z0>*SJ0gGGcO-3WPJ|LyU+~J`zDu%hUXdU z2|mg_qj73l^?f+L$>q1oVc~)3@Rd%x1AB|9g_|TDgsi!gze(1l zVQ-$X8qHW|6IkBO+E->gBNobZ2wE46lx~76kD5!CnMeH9I6pWzWCU)g>Zp^x$+6_- zKGY3U@BY|W%650da7Yv(RyHM>{<#1zW&xvJoQ&nBxnRnnb-J7ou|(@D^$m~Mm2U8R z|4AL3Z)LF{9d#+x5Zk+?%3+yRe+T!>2-Kh@{;c!CEa%g1jRlL?_ObMs$f<9{=mVB~ z0rW3({j-+?%;$j>*QorqJ(`;NB|7*Iw7iav`z`T-*KST+WV)Krm1pvle1mc=p?+#;V6qMU|+X^>^Eey-C1g zC*J)TQE_1~l~J0ejrU4vire2gHxNl5BV7{EYum70Ij++bou^7x^@9%nph^aWe z*;4l;>nwsxHFci1TIA~>KFJhO3O%^}XviH-f#fiP-!B>>l z=xJj2E)fcQc`hkeEhb+5i}LM09z`{>>$TGT10H^Yw7_FhF+y}P{ zaguy=9~9htPre?PDmIpik(yaK>2!(EHJ>EOo-ShXo-H}>_MOJ{f?lsjd*ru;C;pf} z-4oV6=E}XYH%g3l#k?$r*QjcHbG$dzZK@0>D0ESIss zg-j2(>>)Q6Ae~w3Jli}An1;YIHQ&>Vw~-+jlfN%w&7Crmy9^*c-W&p`!S{~O5a#O2 zYb*;R6%Z|1{0wqag8V4786P~8^qYK6vOJXL5mfI!^(z-^{>g9hdenmcad#uN#rd*N zr6AIWeKL@FBe6#|>(&-xUTu=)gJ{lt+Hp@~JtZapisAFiLw3 zT!_PDN!Si73EzsRZQNIR+Y7ybj$T+mRPI4~dd+C^&#bq{RMVRYK2fFdD1y`O=4$q( z;sLSelF*C6Ovk9#S19gK?yfHL6?|tZbDYbkM=>+h=_oQ-ymxd68+vLue&6{)zDnMv zr|$g+L+kq@K9ab@VFfFnv`O>t=xN&*UKS>w*un4!5&!0?zZ+pEoP`B?Ts{6O+_0;A z;`N|;t#UwPKa?j~pd-?Ged6Gt>yC%TIwCE}>0oU0 z$UJO&Yz-=uWpK>syM=%u|!pDvUb&ft5a9*Zw@G$2uK2&~K+xbsE5_+M433PBIe z2YO9D#usIaaGzxvo%CDAHW;lSD24~wn6aZpR%TP1KYbeHbA0C_!o=^`*6Yysu0-*bJf3vF?Im zE#Nz0!cnu1uE_WFJ8-W>f*#8qOByPb7uKXNQimlgC6)Kc^-(BM#L1j{?2>63W9l`i zO!e43i|p+cf32;A>zqXFU=g)0U z?85A;Jv1NIvRq9&oY=k170NF>3=5l8VLPl%-ShlTbN0DysMC2WJ(RV6U$iGvjV11S zj*GX$Rcx;TM8LIr+$we!Hq)UfxYf$DrbuN+mceOh>G6V}RBc?$lkh_OD+jSf{}mp* zF6qR-=RQDPEC4N#9>co!<8CrV!^B9*x5%57ulvC&3cm;i8E-tA8;MqTWYqMyue4+S zRn9hwJePUU)Qkd~bDhgw!pBTFg21?|!napl;pwsuE#0ey>FrMtq*IhvCV*4WUAm7$ z`mm?D8nif62+1IEeEz0E!S?P49|cm`qPmkh*7ib{-`IsPmSU`(rrZ~Jw1!D!<>8k3 z90W^4;}Z2(Tcc>#lTAO58n5~@SG`&nB%3Bm0HI0`wm7~y-o>* z3nhj$*v#JKkKaD>r55|uFf%c(u_&D8iApuQ@Xo5hTdy@KHp=1?>_c_ z@c0vQ4C`LA?zOIU#d)5Wm8v_%>wGZ7HeMlEw%FVycjy~f(rhpihBN@Qqa5tQew!ZP za>*KBb+P~m-^NSz3`U$Sxr?M_WjP~=RB>K1lxj?uv*z%h3A+E8APMf=QD(Zv+O8WC{@=%nJ)MLtW~&jQ>cH{Ixdfy0l!$aH1Fq7X;00F%`I zB8xVgvF2el|Ei$-kDwSYolNYVCD4iy$R>QRM(5RlB=j*8Uj1W{#o6l)E>6Mw?hN++ z0o~!x1|)uG&{w3cI62Gvyaca>ey2zevK665O6k_tA%xr>w~}@ zUyGOZ`8PGu8;5Eq2@Qd;xhW1D_ZJZ-Dr%b104^z7^t1G+Obi((aP$N7CxQfGQ;0hW z{zl_k@p@dgAB*OtnyXa!*(kmxhT`pi82-r+nAI?Kf(-o(K}v{PmB!rAis9L!7n|JX z$O(NR0`(|dSVY+g6r;g;UFl)4SpSkR)!9CLNqoZ7L2S7MWP{wHwnm!tXCB?O#8HG7 zpTJp5}})XJKfpoO>i(Tb+ES zAKrk*cI)WU-P6MjY3YUuyq3-bm-n|ig@IxS7_dY^X|4lpO@swdk6+(gCJ~1U+it~( zH1wHMizg*>nV`Ak`ZQGC{Y9_f1) zm^7RSj)woW-Z?;c<@3fyiM|*0c>dYjPDMhR$_@aMkys?8un`V;O^=ulS6iooBTO;9 z0U++iA-xAlvlT@r$csFfOc+w>Z8c^G!dJT+*~wG7@7Z0=U${1i^gz94VBB z6)7WBiL4TS2*O570lAh=lW(^;f>cnoeO?1aK&}@^!@<0FaNHEVqWH91+%jY*;RMhv zUp{TVj}-F2!a6WdpHFzMKYszr?{``E+TOG(nxgOZbmyNi0C`m8#yXEeLtbHZQZ-9y z)Z7!u1LOqFin~~vbdu{q|0Io1)SuyV{#$h#DXfU7D$xRAgxrJR;-W?%iNI>&fbwoK ztv>^E&x*GOvl@+p_!G(AuiuAV{h~Bk0Beb??W4W7HZ%ZLTz+Mc;Y(L6SUQ#}f5C?i zdkv-1^t19>dm#2%UzxXRrxzO$*(XdY<7u#uw!sQnPhOB{;Sj%;C0Uh02{22j;4(! zui?kWk=#RvN%ZV>8UZM$Qxc&83?Hx;ckY6sl)%NE<-qAn^GfI|aeau$%s5TK^lc=% z(iG%5v&pp{ZM1LY`LDeLSJ)r4=5q)+@b{>6&>-_~0^rF;&YdHD0!W7*&VvLXbPNt* zC}1YLk8U2Oq{aBWBlEiG_Ktv>+w2%WOI z*AxROy1?+{lmZz@0ahoAC`~93=-^SmDWL%hYQ>9(_r~}6Gko&BfALApmGUDYmv)Nq z7fHqNSwNE&XTMZdB8j~H72^o&kHXP0K4EY-8uA-;0dPMuxG+uvE1FgQbU%_Mosmh% zPXIWWfseiK*42Q`1U#coW^zqOzTZcnI*v&5&2T6)7>g7YgEXi_-SZtBi|v23WnBoL z4wrpWxgT=*5sa11woqpMdnKs8fX_81Y7H(M!eg}g&va7(GGzV5 zxB0k7qh7Krf@{wCG9YYk#vJ9EiDdfH0re&gWE&@gtGy_Ri3F>4=Kv8!)i8Q>g9P2y z8iFi2>{6FUZ9k-l0CEdnM$3}&SX8{I=-t#tG)BpW6d#-AK%x;$5P1$sAfEgS6Cv`~ zMDN}!$og0NRAJ%Z*3@vCG~z-m{Qfh82-n3lOZs!-W( z#?J0oDef3YiqW8e>E*?Kp{*gor{Q)T1&T>PjfTFhO=->)bFZLh>xA(<7Gjh}3aImJ zQr)O8+0z@;0S|j3n0*CJp8d2V?obwKCSviBSgIpz1{bEgw9(P+cLyrzPy5RB#9Nnk-G+I6gJ9R&(G+GN@f2?$UcD?=w94>=7+b7Snr2OoGwk!^i zL}6^0nS50e2hf$Ad+-r~^&vT*7{K~4mQAejc$AP#3|!%HB-*2@SOuN=(H{{-Qo^%8 zMF{k@0pTcXJ1!Q}w-UfB9bde1j3J|yK^phiTCy`-Dq!51iJuM+GIIWG&In`wtOwUDVSQH51A77l~zm7{W=^ zb21kW#z0>Z=@1!y=9D+=PoZOk{&WP=#4AwwOf)7Cs8?Zy6Cou=02**L}ymicFK9K}? zFCeD?pNoF>Jt#63ye8w0Oo$Fo$bc{qNy%s}@^f}O*}}N#s-Kn_`WsJ$RepoM!B1NVMLM}kxeb8^y#bT0F0gvOvvNXe5GwSTelL6R@UVo;EbC{ges39oxEM>qJ7|^N0HP6i z#2gH+jtUs!DoTUyDFP2ii#KPIOJEOf@T`^XEVH4uNDD&eT!I#DWqk zjW1F%;M^wP%JAJ9d2mwLC*YFtF2Fw-+?Ebbn|ONffl_;LwU5{fcYK98kbqm3_VW7! zOZ^B3HK>+k5UFO62{jrYn}^QUO@5eD*@_fhfr8dn{8rfsWm`aphihi{h$}|~Xh4Ir zIau6i0}6KitR9alqb4Vw#O)vD3t*^22iLLPhMCEw+VQuoa z!gmA0CE;VHK}d@Ai@p#^ko`2?jp6WEZsyJFS)wy0huwTzIX}yRRr^{G(eM`j8qFf^ zazFkA@&tSsdj-7_1+wzE5Un28xKJ_SUvxlPguefqW~Rr+aS)OO>fZsG<*%Ju+6xi7 z2cx#h4Kv|%eGWt$J%{@;CV>+41^dezvZdvfU*#L<054#RbP16|(n2=4QwD@-AN&kq z*bA#n&ivv;Gr@3x10u?S7D%E-oSk|%H2D+!1c1LM%~Fwe5$&~#Xw>=3z#=wY|97GF zy3=f?zJhn3kZ42z3_@B}?$|&sWEN2r`UBWh`m?J$P8YBJlO9mefJ|uj z_M*ROrzPR8Ntu%774tI(6woJls(YC32p>Y+hw+I_*r67HE~tt5IKHIfRJkxaUD&g? z5V`LZ0BkTgcGmP2Ni@nUeX&J-OPEFIHmTO(erkv3Tl|BmTk{}^F@k*OPdwWRy|Kf{ zpBDDy?EO{Hz6YALS;nMj^bCgIvu!HVs;e^-MPA%UNEbI0Xaunc-#5Ze8PH8ETAu5V zo&wrT-OXyRWRU~nNzy5nnyY@1XFnvvWqqDR2d@-#nwa<{ZD-I~8V+PoAI z99xfq14)mIPt{!8W6JRrK-C3`h-P*`cT*sOztRZ`6lOm%zv&KMNg?X8R`9wTv@~y% zzZ9qAA}d!8##CkBm^m~M&VDg`&h-$gk1TatC4CTW0t}r}3X>}IU2y=~F|+|xT8a78 z5=i*Tql)!KjLSeFg@ZSGet*QaU4hh9RH|OPunW1?R@`s3&@DAu%eQb6Z~{r`Xo4rz z(o4CN>c40nJ^*fDKS%*z>a!z7B~W63E9AY5#o)4{`<^3ROW506N*v&<9?WqeWe@Wk zOSPxN`j&QI5uGs$AXB25Bp-*y#YH)#wz^FGvxIb;KbM2MOF_wvDGW;Zv z&*iuaG|e%&aQ*r=5r3}GbR|DkQ#E#D@2q39J8-4UHKV=+mE;(IGPr;dDtqNodg)Ac zy5MP#)VAC)m->o^_w*+GRjx|kGE$-5-BByU`DDGw2c#954p%!nJG@Jlfke{Y5{IOe zWCHEzI)G*XwO!M!YvP_IhoWw2HQ5?uj;fMtwXX{9)@!P|8lEAT0|z0hhcBPg#nYUQ z9LprsBNM~V`cx4L+&}1){L1R024W(u`B^BVKs>73fm)ZD~+14QG&a2Mgn4Z%t3KGt}mfkx%hU9*Pw{X~s!4V?J{RY%A12CTzmxC4v z_=Qbgd>EH!SG<6q-yZP=S1lA;_58yFX~kFo$Js{0anzWL7mmadwVN_~b9r2bs_#`E zcNCpEo=zy($g-Ib10mvAfhdCkA2at;?Mk32%wXdz(?MAG$!yq^O6 zk)fA^{i4dTGK}@FTqfx8;WJfVu|PYk40{5ySrZ2(&GozGHDiLsgdo|>wbA|tJ88il zAka&dk?OYgNVtrmtQZwqjBnwbe9%V5|o?uum_dqAO|9N z-=)(k%@q{J8GsNY*v5K#yekW1BGPkdz{ zSg!A=DZ`@J%&*V1r{h#~d2O(A;_z|^Z;%AllC(lt^7$#{byU>E(sr}K4rcg!ocH1Y zweLVf`m-w~>87Dh5H`5^{r+v=a?Xm^I`PZcSYx8&V!9R4ANn0tX@nU0Z6=0zvgHQL zWV&RS*aIEW?k4rX0_YX3u>wS;Riz*Cy7SG%G*%Fi>s7GnT-1kWCJuy~&SuUC3YSQg zn%=~p?p)M4IX^q}+4b7J_Pg0kJiDGXuwFNuKQhCuX;LDx$alr*T67Lf^9V$h&6~z- zixRdOvb0Fs_j#sf0Q&E2tX%8IqzU*4=wIa66!g7WEZIOKIaW*6SK=FF)ndc42rYXj zs)KqoQ);SMYlP!IYZV`esyX4;*i6icu@y4;W2WSIDeOKX^cIX5I`ew$9FwRXZN3-r z!%-@kz2G>|5)+KJeW^rrWu>J!a-Z}(O+fR!!IA6o%vx9xqLGi{U%-!@BxXFi;;i#YshWE3ik z?2z}}*s$^}hp`&3dGL2`s`Be9kQ1{E?B)VDzuxZFA}(HY8wO5eN_5X030a7b>$sCy#qRT*=6ytWW#C zkqBpTUAf3qEhuaYf=3 ze28GIm#<^mHty`N$R()rxyMAY+FzLn*HIC-<3qptix|4Rd)PlNf>(e2f*cD1?WHvy z?&AlYyZR;MdLHMv6EnFWFhss7rnNQr9=}3^_Tr2==8HTl&bt7sD((+Ygb=f*`mNlq z&&!05QrTe|+|FhptxZRlX8_~87W?hxyYFq6nwN0w{b0oFu;b100|1;TR4QOiIPSPz ze_gT|o3?}nAtc_*ebXu^iglxXYL8fH!>0iq#F72g(d}6f!yiy*1hlxe#{=?DO=CMZ zP=T{?>?o|$lB|2)lq4h+o4Vowi6hpwXi-|efG3Pr23^0Do71YNyY)$V?o34_OI-22 z`WME8Zlw!!L9~ycv3+)m$Iu4TUMOJ&-3V?kK~?XBu-RKgB%B`_-#YLCbMc(rs>b9!4!yI2mf3`~~Ur><4GX-+X$x_OW66(C~9or7X{=sFcUDrJg!{ zjIRRIyd8C_r8$i1(;XjSx_iSm^S8N$BhAplaX(oV_mM4g*%__D;TJbR~(=-@8m4%yg$SNC!nHk0^s*a}S@*|^z} zS%hxRin{#PH7QL!Ef*?`r*|>PW#l{&qA`-qbon}UX6v0AqC>d#>`s;Uz)$aY_~T{J zt)(aI9E@7}#=R8czTYQMd8yVh@9W)a_`Xhj%7&VM%q<7UGMwRl-J*2IBHtPf1G(eQ zZi|R;C~wdd+xjWFUn*0hfxcW!>08_qIao@yFKw)XRM102O`!S5LnNyYX1{rXx&1mD zs0cy*>GXN59mtVA?dv>8?NwY%a!uuX9MSBW%?7QxD_Qzp9A^lBZ1@$*<$4HzGF%Qk z{!lfRMaXe&Nja{G(8?4XyhQpk*?BgDoC{6n+11eFm^vZa?UiNl{x%?~`}lH#vGUWb z3ET~3pnR_H$Q-$XuDzM}T;-H6&ym0I$Q1 zcOiS6)c$51bf@cRQHfNx(>cY}Ik{Y4yV7%hYp!(QVZ5^V~X% zn4+p3QM;JrY#S&8p)w-z<1cgxLi1fp4$GmkyhwD9!@DuZ1#~*bUo3w}r49vdQsszJXqC>`x5xzCdj1h!|~{+)->t zgt+j8FPcqI%|jS)m@47DymY>usf`_a4Xi7O&)bq0@*K-p5N_fsR$r1g8(ZC1od=zb zRX^zQek9Lb zLruX&!EGf*Y#tB)j6afN9`BQ}n@r?<{VrHnY)WPbtZ@X;XN=QrE}Iin9HWn#;G~u14(&~EV~A+1qs~rGk@8ZN_;_g? zu=lp1Y!E`V(Q4cCBS#o+ml-P6L?EIkoVgRSL)JU-7_c`w=(K7~W<>WcFV*)=biP4j6`gp_r`C#Q3@ zm%CXCS!_!wCM8d_=6KUy^YWoQsrO3yV2{X*DSl$=!VgiylitMfweXqz*z2Oo53Mh) zFQ>r%Eelb@0h<)^K`oSZuZ30vv0!TE%1%zfPbutjgFA`a7deoJF3(h4WWw}peRVBB z@{Q*~k?)jY)`}%7Yzw**Ub`+dxgg#_ftT!hzuqfZBGu{FHs+9DGcl{;46I3gq)7OD#=VFZx|QUz1yU<$f*w&?wA;HeNxyjV*P0D zTyFIA-YnNdc0IgM(Kr-{!VU_4{z0=xP>ta+Q6JeDEosM(w)7D6UR2{{CF>H}R|un{ zhN)$zr7b*J@-Y{dA(Z)oI(RZWJGrGG`z4k~W^wRi0QFXd^65kQ&kX%su)g+bX|F5V zA+BBB*|i@c&xi(-6z)Ozku5$y_Jp>52&zcliBmdgDBpkKUeoFv=>3@G!(%k>tNjWt zg3;fOAJyilJt4OA_%qLIkY3gZ|$Wm=rr?_bL;Ul)c98=bN#e9i-Qw0MMnT27*%^Yxy?v`KsOz|K5gxwaa* zyn*Pv*@k8>TQNNA=`?GB!yHo)Tp?MYiSEL)liWWZpQ#!S>-}Os${VH=&m6&Lw}I71 zd0JZ}&p$L`iN0k{j^;<2Jtz>$Sh=U$>1Os1y^nUpiJUoX zL(B5-fQ5V_>KyV4Ewg}|{XjolUp|}godfxL)RfOtTSlAEQRk@?xpXQByTkj7gi#%) zDyoX|`W5O{7bbyoDN7%nQ99A|Hi(5&QB=!UFC0 z_%U!hamI+lFh(!amyymo7L2E^OZQ9buRzUVq?=z~UpQk7)2nc9lNeXuBJUPnoV>DE zazjjJcV~ZolunG7^G=i^fcUzdY`+88u9s=R(VKHy-1#;cskfUw?je0DGQfu??7jLW zI%|O+O&yq}Yz&=3?eJNTEV{DeiO`Z?79y0DG}rEI=Vk3iHK6BtDDz%ol|t2%*P(IA zdc1A{f&@#UaRddlT-dPss?YOwv%cnKYg`ct{N-(s=4{2_8`3c0;M$)?Igbju2(`iZ z@Q#79M0Bhc6_K=y`I5xCu{^Q|dlA%?!BRHUGdNa!fJX^a|8%veed$0TPE$~OboYLT z)T7cbgiuos`?aNh^8s2yYKk6Qgfj(y^}Mm2lkH9+Gzr**p@;631x0Y1zEmuWD-x-F z=*ZU2J&WgNz{Io?ON+=+k58&y2AR$@U03Z52JZ^IK5gzkWc#^+mie{$$%v|zY*WM4 zQOMrS!q%<3u`-yV-TFVP) zBSamBjpjPU>*oDhTznQnaqdCl2$T`IuP5^GkKA}&0*>#>tC_pDT`a_-H@HkT+Vc?k zfxrH!G`F*DFrS4u93IeyA#Ifv^)ZU1$?xe1Q4`d`34>E>t*L@+&k33B+xxry*Hu*& z??nI3xAm&yLQjn4*;62M!m=}W+|c0n)LPi|VsPa(+yH%8lE#LvKEoxQm)I#z2DPFk zEs;7*M353pnLSz-m%8qvctI(3@Mo2wPcNUmA_P`VDKxQovL|8|S*Q zHH=a4_aW#w?;>Z05OGLKeFqviL+`aUz0kWnIug=KP17<)eVz>;`5^Ib6AGk^lG?ObVNSFKn$TzJwHV}y&GcWzT}P=k(|UC zeGt6vHrP<>;5RnCxOy-y^pmd5u%;U>RzdLO~> z6is^Y{aD`U z*;d+ArYJ_NInSpkrHer<$PdbF7bN6XgZT^>R=37;{VKk3UV5f5TsVg2JI;#Rwp z15c(C1k!-aE_7sP7n|{}WK&k)iz?Qbz!CX^6MUL5ER2eHINWr3|0niDLCr_|Qyw=K3xkUPZ;UBBG%WtA||P|^22 z#zC`~@W}AX;VY?y940lq-im!86bLuIH0sH$@9OtG>g)Nym(S0|c^6HZ8)x*2g%C^!rBZp>%iHYlPdb1+8ObBUEAU(L!ttU6fg<;Kot+)x*ly?`( z3j5k|h2sf-vHw`GCc~T{TQ?Jel`eC@4L6C^q+9lybE<9#T3b z;?3es>#LjUstBS&QDX*3?k8UbimSsPMfq(4mh1-N^HJ2oL_6hsSMoJsqaYn52jeR= zr0_jvCk{%BqT?Wrx5gM7S-hGr@|&XCnetR^G>gB2ccF)a=$kZe-U_mc8 zZ&7k*qO3NU7(}clu(0@E%!Y#%!g-?x3yCUg9S64}%5ZVdooV04m(xR(L=8m_)b{)k zKhE^M!&AI#Ib#T{+5yqWSvUXo>A9JPwvVrapMxS!Q`zT<0Rf!R1Aej*%C*#y%@le6 zTd)r3AV;VL&$8XmtFOfm3vYjHencWA>C0E>;|jci{t>|s;?l2ZL}|ETl}05GFPku-*b5zvevb?4kYC*g9 zn5s@ZyD4IS^$pp4tgo*C8dwZx*x7#jwn&Y)MT)1_%v!^!!OyCf3bvb+_pDA&1KLlH zgMv?;!BrS|5Hw$_vJCtemRZzW-7jdi5yyHC7u|jr_^hScH;e^R`yji}tl%)3$TKza z9IS&RdcO8EzCsCHOi>4cra-wBzKx0>HGPe}jRR>3xoeAR8#*eUk5C{AMACjk*Gu2R znw9ix9{%LNJ}u89Y9HACaijALM@q~#jP6$@#rFN=sN-NW-Sg%XL><`AD`s$ zja&7gP%UOA|FY{TwzY=-&oXN%&%=X+sLEiZTZ(8bN1kv6d~KX;|GLwS!)DI*v|N$n zgN5y!Pgza{A!*9lHsjP0X_|syF{1O}AD%W{brNDOHlV)zJ$Uv~g6&?x`=RE+op6hm z{uF*Eu?Z->!ste*RkBN~g|5MEx(qt%`>yu_kNRwbs!#4Y#AyIOBR_FyM+`mF=vqTx2V(8P6oB(!_LYn%Gxt$VMy%|;{Cpr#O0KAJ+8)t@i^b5kQ z$dLD3#@RD=AUuOpv&FBhQgDiRG!4k(D$?q@= zvx&2A|E;|YjOeZ6P_?aZ1^4wPnpACpgbE@u69z>K$Iz#cQmYp%l-RFGV8^y_7-Z$p z*5*t6^7Qv{Vtjq^yz%bZ*9;}f9qy}g+X)V^lTjd0G#**-g;WN2)MQ{{ouMX&Ec75qgFAqg$zj5Eca4(>T8Caem2w+(@={ z=(m1|#aA)U9|vkBXdkb?84y^OxX+BGPcGV`3r^l?irrV+P~Y<4sE8sLQkK zepz;|-6#E&KwNTOq=?@ANTLH|9h4GUG0B2$^Sl6{~pQzJ|q8Hl7H>? z-!t;>b@T7#{(t*ZDF6B-e~-rBW%<_w{Ohm&$36ddll;3${sw~oMmK*A#oy@c-&pcL mhT`8%0@4-#AAfuA2IbkU7S~TVG_tqAKN$(d7lmTRZ~qU)h7JY* literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/favicons/favicon-busy-3.ico b/server/jupyter_server/static/favicons/favicon-busy-3.ico new file mode 100644 index 0000000000000000000000000000000000000000..00ac191e771f9df4dc853aae6bcb85f89f94f794 GIT binary patch literal 61209 zcmeFa^^VWGy;+miU_ELG*T<5h!PSa z&7w%h(hcA9x)wjreZT*J=coGzU$Sz=oO8|`Gjq(GYnZOKDg_x684(c?g}R#ZH6kLU zKlF$6DEOCG<;$nQU&LNZ!d5S;-(lM)e!+7lt6OTZr{@P~-# zNER{C5%4!L^lw>+zrRKLXC3+feugeAE_!G{M1&?%SH7U{Njx`)bg1QnB z`IAfaQ7{oP0!d2Ejt>0$1G_AycZ!pS`hWjD5POz5n2iPV_qx9~Q0E6W>bwl2XZYV6 z&||7>|9CFpdgU`&cv|~g!I%Hu{`bYYo1Mr0B?olD6Egl?*Z6*7cu}{I8Gv=aT;+$zS0854Zm(O#T;c z{tNE^2F(9*lK)M~{>uRWOI-i@$bb3vfBE)*`8I@*|D{s@|E5ykS?x_5ISIDM$&ftd zKpt^SuJk~*@z?X{V(uL;?{$j+TZgIXFZ&KIs9`;SRvD(o`bVmb^_Nr|GajkN>fYaJp*_|VzZ0YUmQRAP4t5^m;LL;)VFUQ1E*gU=>@B`^Q^)9C9-nFA zUgfQ;9w(>nypfG-uc#0^c}A{K|M>Pm$9hPSQvF6>hl^7`8zpP|YgL9rt)M?3VQ!}W z6P=KKswRw|w-~EDFL-!PdmS0OY`YQarXqJqQ&Us#l42~qAM5^f20mp`fqsANm`-S> zl3!l_{N1~MAC?0+k*6B{F$C5o?+rFQ_{KZiE%fG}m7_7kjn{uhwEZ4ejbWu2-Q23b zeL1Vad%UICx{~v|yfljj0lRx{pvQ9CvD!L>kF6rHSDI)G>n-_f@BdJf4tU|S&f@m| zkFKQGcO~D0e0)pM!f^|KM;C3A{2PS&3H=oaAz*7U2Y<{`M>GvPjYSEDlpd%wDqAq? zQ7V>LTWo%8lDlY6r)z)Cp8PP6V5b82d3|{(3DZ*;W#sz-<`HRj`jU&q+%jW(aPy{8##RXGP8EB%kH07L>^Ru;>~s4**P&-t%S%|@ z44>skoju3&6!!oD)d7ZX{!022 z!YzXMSKm|`kC~TqN3NFfIJmyXcAT1(HQ4p)U&iRo-;q1izU2te{tB-8JP4t%6F)qIEE=_7u2F> z|MVEoB-4j$PO4Q)%kPSd8@P5m0uD-D@x@*Nuh&YI2=8^hL(Ju`?PNtoc$g&v=33_@ zSCQhSPFsdd4TFZc-4FP8KcAgA`rcRg&Bt$whbDF(QGZi^(%OY^7w!h8y&@66ZzzIt zNL)Ral8dEJ8RILG6VzHwIc=YjK*>i?rZ%p#}N7e8B@7lkdk`YCfj zP4>`^T}{zrlT|^Phmu}K9B8hu77AV&H+}NM^ue{=l!(;ZgxmnrkEHXriGP zTzS&*;L`@t8D%5k17r!|N^EVsZ6XTm!xsb!%gDLy6siA#~>GqY3U?fygoLaIxXiQSljc@VRCCMh%x55z_dI;y3-g z1K(d02m|-^m#!!Ppw?dpM_r}!(MjL1Oy>Qketxn6i|C15f8sd!Wj3{yp~;vbxN z?V{?iiF=b66sEW${Pbo?7-1WdIkWKMBu$!!Msf-hWxEz)Xl|#SBEKFAjI*xDh`EF* z_Ais}szGdVN?YCP7<_$fBHDKSWRZo>Qe)Xe5u>ss;ThCH4Kuxv!y&Y%a%JJWnLP}7 zV2iA#uR5RBy7hLfeW*gGDdR-dLf4q?cE{iC=I6`8>uEO=94$5(+x4W~b%jZ1>5Rs} zK~n@n&^LyZxoZBcE^YYT-;aa9WRq9J3meCmd~84~ue-?bjI}@rTEKA8%gerGvf`G6 zksxddM3@kA{#}V@LnH*lP$Yqr{#L0}G&8FE^$im3S(lD*C0?Hl`Y%TbOREB~@{0w% zNw5oUU8dhFi$LK5x)$UKZ0=Ubu8XSbk zkc#5p?TPFPgS`lSmc-dehK7N{ovw7rSm8?6o4WJ0lk^cyu9Zce-IlckoJNsv8pOIT z2W!L9c@LO?z~bZUqor_j$x_v``Fcc5+aR5u>8(RlpWp>HASIf2^xmjIk<(0Z91g3u}ep{{S$j|pbr`R(-yG8F3Vjjq#*yLUci z23-@;houk`04h-*BOxHqCCj#78Ll6cJ(xa(-@ArhvW;sbCkUGWguOP+KtL$cv5@N- z<4^A>>F%nZU-<>aG)w{so&UA=`abA=>@t|HT1D2Sbkl)c64NvLyP1X}&nnta!;eC< zn*+6ax(O0sz%>q=CUTmxp#OHgusyly{e4Smm$%|h$WFpKmBBJ-)XT&O%kWs&&vg)G zJ$eCWw6r)9MazA-SR{0nS_w7bDs`M;B300b1<8}hXjJXgGmL8sBHnkFlCU)oz}x%8 zg%n|7@NcmWqGtzGZIeatiy00*9quO<3HIj;40q{|zA}6hjvHgml|HuosgAFplG6#Z zX1gAIKEkI2FZe{6sxh#@SP(BYtz*SYIk{S+j(FlkhDj* zm;>os#>jbvNgejt*R=c!Ewk(8Dm(hQHj6x$(#O`4C?6f>9Go<|n_>JZ7YXj$Oal3< z>$YX{Ql&plo)HejmWzhdiWd$eG<7hrQ964%B)o#nV6=yiCjZ{DGlv)>rh8kikwQ>) z;$Gh0Ak7S#lH+0msmibz6I{C6IessS{1kIt9e*LD3RubOa&}0B;S~j<$4Go%Fe(Nk z2y!)0Pt7_~M99k}9*~y5%2olM!073CNHdlO_{`l(fq(c~KA&RNXJ=yj|Yu zPKdJK<-xv8^w*q&7nzrjw!b)8ubu68Cp>ktWh7RC#ktlnQ=|4YDIA2!L_nm$Bk#a3 zqUJ|wt(FH%6^ae0y5tq0btFHmu*`mT@VRDth+#~it=p~Z%t-jScB0B}-gC-%T8;pFF*(=3n|4Apd+DusC*8G~>FeF2~kS*mm!;c_0#mD8UuO()$W$9R*Q zdM%)^N=L^T(9$E+#RtE04`>k9&k7 z2zDwGI*@Kyc1aL&^#r7(Z9`C!|EWr9y9De?(pu;@hWk=8_X;l^Mo2V>kVhYDUw}U{ z34cMek6noQaV*|opg2AHCPBSPyx)}=JVyD#({{#f7wn7EvLf|9w5 zTkP0ap0^+b8PcblB`a~Eq=t^`6!OOlxV0jxHCwb^wDsOLjdU$>!dDJ3DL@;$PM%~R8pO7*F3Ifb3 z`Ez9KAu6V;mt&dBwfX(h_ujb7bvqRNN_JNe|2kTDdkMjR2vug&U^-eiQgFUnQ|lLn zv253=!a6JIyCm=i6JdM-&W+0{8LPs21-L%b_`KM$EaE6;2sKpkK*;^%dVEc-9bw=> z8rZ#?X`>FT+gLqJ^j!CL`~BN8PU$u|B2fKk+R&Jn*|US$gx1L30PJC_Eh>?KjJFX* zHSW?vtTQh!uva3aide}N=6QEooE}O~Ar#Z&HLC+5sj~PHb^YUtQblJgb*?Ty^_B~LKe+m{NA6JOe<#n>&b7W!A^4nK z_asKjnyY8EzwxE!C3tlw0AeW92y4Lxl8Dtcbntv5qZj^EqJYA$zreiGHpZj!Fhp#D z%(_KKq$~)II$C52&Py|k=LT#+glHr+O;QMU#(3qE#fwrvUW(ZSPWP+h9VslWCE|CT z({qXZ(qQxnlJ^59P3g1pVTF&YyKSKNJ_dOsUwdtMIP_pLA=lY~rSI`q<{UY6^)ZGP z@^tMdXZY`|l&8V(Rp!B&SnE&I9fu5s&djnC?ZZEO<~E0}6=uCR(X}Haj59Rr5OkQp z;_R*oP{S{|kmVks1(4_X^T!-_F>l)0NGGH3^A-|(gZ;ZN1f7#;azX$;izy9}YkVWZ z^x^_L8KyTe(REQJ+y8~yD|YDjS>J(81Z!u0hT%mE;bB|(Il=kKJL<=mai1oS2SqTt zH`7EDVmX=vLZ1}LC^!-7L$;ruB{LLedn_!pL`{0op2t>4fCdQ=({uB8MF`ny#)-b# zyYgjLm7k4BKk^ojy{#|yzN_PSGb|KV0-XxDSS-Iw7hc(QT-TgIa+mN~2E;p%Rm|}9 z_7GA7GIoHIf!y|-kgh%oLswBtixl(L zki2pij0{H4Av}sUf{Oaw2no@bTmT@7D=)4?022E+uv0ON+c>#7V_{8@SL`P(A%Zu7 ze94}R5KBlUjKF$7;6w=it$}Fp9l^dcv9P07yMhC*`+QjHdry~$ zIrJR67lARuyuWHBpw+w-(0%{oPsZAB`=g>W<0Og4?}yBauxS6yqwJu>HT$I*L8$be}JSIU9|BPG=q41iQtW&5y5p= zEPNCJPX!WZoZufGxVJNKmKjNEh2T<{9|JIO)Hl7yLfhNi$Oa89@F+tUhMHu)yO>Zd8bU@MPMTxq_uZHshwPOY9fR!(jIlNJndp_0&y8le%z+A zB>CHTRY)fC75S8cG?8fs3akC& z7LbGhkqW|A&$AvBgpq-0Qja1ws`>r`L6FS7ruF<#vJa&P_mA}xwvaj*AcU;}(`k6= z*db&)%j|JO3o+@+M=X~E(S5a@d2GW3Ea}kzIy8uDCuAKN^i{S^%64teH{MaRi%DVb zgZhNRcD;Sxl2GdsBf3Ele|6WBaQOtE>(avo*FV)qR^-C6QPiY4h}VZ^lH35I@-9U> zVW+gnJek+HgBg(%JjJ)J>Ug%u?5)U#K7toRgrNq#asscY%Z2&IxDhldvKDbCN;*9R z`%b;GZVZ9XaM^XYg743%h>Zm76W2Z2NWo2{B;Zo?XcOQfUdZo`96ms zcOEI3@q^Jo^z138dFAvltT@8s?dc9)&D=u^ZG}`-crTh@fqKZ+d6?ZfPiG_vWe%Qs zA-3*k;9ZJM1eR~S#UKp9Mb8D?SGS!e9&(=LTaP*9nA#Tm@ncU7Aa>N#6 z2Y*HeFFeThr&&N;gwq8X$d*sE-z3ltQnHh1dLInrVV2VhP>n1->3U4+s$%xCgijx@UmooLnHqI2O$8b;6Vubog;Q}qx} zu-ZQ$6h1ZSCm=dApt!|+0I<9bhVEUpuJj%Tg!%!K=wdkR6aOq6v+NPTu~9i;HlUJ{ zP{pnFKd$NCRqA8*g_$x;BPdDfM68*#Afs-hVk5galpU@f6!P)i+%4Lki@72=sTN{8*<033a)PgKdIeXnl#W|uj{ zt$Zntv1(@bgKN8;?J^}1{C2bq2&eaCD+qXJOY+%guyIW=a=nv-bX+~FJO>mfcR6*g zbP-CnSUGU@P>%8==w9|1L}eihl>&>RIUi-G8R5=j@a8NSg|U2sig2YF2C zxLFbuugExDJ=UwHK<0I^>^c|frBJcAx{K$2SA7ENBO#Nq0q~#l7*}Rzh3~upu2Sfs$SrQx%2Gju@ebl&AD23rPYwadB1ySM7Ak z6NGBkG{F!gfxgP`5(@vvXdKitq>u3Ce5_0gLLbuET%5^UE^@mQjuz}rz!u6@M7<%| zlQ~^us{LNsd8%h^{>?BV*dCL@#*38)pU5D;Hx{lo6$ z%<@x157;r>Nqos(rTIL8Fd174x+LlB9t9iRqAU;5k%%K^PGj?ik)KYcCKLSPGLV@5 zY(Gr7-(mp4`4VVi6((PzJB@isQfP6O=Ck);oFuh8glR89vK8;Va2d9t6tvH}y62>9 zHnICr#98uH_p*?|pG-d(-J(CKKzb?#{@Ln$8?HgQ+3^q<#yxLGa?*2R%18j)=4vGI zJwx*06to}g>Od9T-6D37D)>?E73D#Ly-f4jQq*b}vQ`gvh9{OaIDXvXhAfK~=t7%l zDG@UCt`k-MpInW;k3P}Ha`MDhq3soma+>xtbq?8-764Y!i$YE!@2ztyEQs#=`4~Z~H5@TcI#yXWf?qn|x@KSa&ur7Bu(R*NB+=8xp z2CJuLoHZr=F|=xCxM0XQ$ZPa$so@SBHvr3cwHLv#Gj)(2wh?A%6HqZ`(ssML%UNuHuK%v4f)M7S837Om4A64-Q_NfIeNf(*^2-FFM1 z?p-pktF7cr?sYTI)G#Snh!}t$^3`-OFVV{B>LH17NE@B8>3o+;1k=9f83ie4QShZu zdGySSo*oiOUa;7=R4)L-o)At?xVfbh9*Rh=!M-AZ9;v<{U@#d* zD!j`En&t>p^F7Fa2Yc*JLgKex&ywh}U5gv3W?hg*fFz=dqh|PxK2Zr|5k#1iz|C%b zd2temjToIm5YYAZXD#be1_BCPlAV1Cl_Hg+o!3LgyvEf?U{7~ z(SSbnY0q`QU&u|ViAMn3`9SN%KCK0C^&Wd4Y1EO zv!>T4?&n=9B@}gX16mbuoVImWl~$4fXn{xCy#kLHUo(S~`8&L>ZI>@WwMkq1B-BOV zN16J4u#~p#X%O~?Np?Czkiu70cSHV6?P-w3wIfl}WXik^etDnT&dDB?CdG8c=$J9c zQh^GO0`!p}9k7N(%kn>@s3fRV_kz+>&;y8^ToQBN)UBqj1(Fv)Ui4icdw80hFf6-P zmSEbW-@SsZAD)W?<59veWc$yA&bC5o(DeYO^q#f`)_#v3ZkxbQt#7|VQ*n{ z-a>YvNCv z9H8a#KL)C~K$DXKz0$P=Yhn-;^n^9rPdo9Q4KJoiH9Ev&w326NDM<-+@KebJb&QfF z#a?QbwDk;}ZJ0|gHy>^|$P4sdI@$|DR;NXF{6TrhX;CSdp(;TPK42r0+Ut*LQe_>w z>)4Pfe^+dG-_Rpb;j;=t95qxr^6Zv)wGj*XT)Dl`>r7+j$W;Y<~Li$Vt7ZHaqDYO(`@<_zG1pC3W&Fooc4LG=*62Zwxr1~Tq<=Bkx~>|pSG{k}>Jh##avQ;Vbx-YH=dZ_bvA zLZ%{N4xDXiqEHRWCful{eagD6Uc0$!PRHsf)^hC*H^`r#j2%r=7@M8m<5SvmXy|u z9K$~sb;|4zU{1@z+PV06&c5zMaxua^8@kIt5dIXnxz8=+I;Lz04TPb{N$Z|rpJb~x z1#BO%QiYyL`&K=a-^j;0a>{>Md95=1sX~5KC;5=)mfgv@<#O3=W%5`c0|`AIb^ZHC zdr7dwYczdjcR9;W&c|Q-`X+o$&tjCmJ|${1|I-P7pSk(rZUvwvfVB5`5OV9xWQ+U0PUPnggwyz@#xfW3$CuYRK(ius z-0N+n({ddnWG`rTQLsZ^EkO?;*u+L6(n>oO84#tY-J}jD3Y~?b4+`ddrnrOsT;Vah z2jnTi=p2LsOTieJA9=dqtORW<`*$&bEFo8GVE$fTu-HZ&?s1fd_A8yhur2hl?Jp1N z^zaP0SXD;*ZP)1Z{u$C{D#G3elQox*%r70~KL%eOzd5^bDQud|LlH4wAbc}Epkd>bJ?utdhi3Rrb;&e1k!?WGFoZ$ld0s{qeM##uyJ4 z>HS_EL@u43eT&MTC+CTmN+%OzPS5Lc7~ZR4X+5JD zkV=1xsXAf8!@pzCXla35}-V&*m-t8bcg%f-k zLA@Mt9bnY3*r{Vo_F46=fYRP@Zlg!Vf?z{@0aBZ~&#|Wi#W@1^MZ`h=kotTf6|xku zZ0vbPieS)>7k4g=13^2cp`z5DdhvZnUgoPY#?{8zjydw#cD#Gz!&f?`PzDqQ^dON> zM+#&K7yh2%$2t7&!P~wIc7A3g8Y^@1n|&ki&9jGEls2Xo@Hv@t0+kU?NNpN?P-DE> zTNvMug6c!!U#5%*ja*N;(L4}`&21P==JR^m?Z4q?^3|y#3abtc&Y*I( z`~gD=^p zszu+XTzMF_;JUVT1B+zhnU{q&O6*G3>Ng#W+)l-A-C7xST&a?;>|9^k=+*NL8&6qo?qpoz zD<32ScnI3>p{MEUY=0AG181P#%`tsKezy$u&&HzqxrT(1Ujco0f4{(y=8wFn4<#!F z+wR5s!DQP8{lN#_GuOrjf8zC5M4iFjN*H7Lw6i~%!5!B|Cx_-z6Bb|OG}#}L{h23x zO&*o=q78bK75+1)YZ`Sjq+=f?+FiVg3@NQ{8%b=tYeVQLM^WPoYEFDDgo)L7M8wg7 zCSc(ipfP|+K~h?~?fBv&KAlu|6Z3h!by!|Vs*jk zaH04LHS|QGy{feB<11JnSrXwBsx`}Nq#N-}GS;6)t#nv-7JB{kdI3FAi?)w0H7Tub zKlBx7T6K-#0Zfx5(@EdX<1%&Y{66j@O4`a@eDM7VeoWLk(_YW{Crg|0I%~H7y|m0) z$vFzakW2O@wuiDnHQL{<2#^H|(v?i};uN3K;*p9=`1WFJkEM)+>)wcdDYj9cYw`xv z$yc%@e)gx&MsBCHHNy-_9)uZA%;~WGf}t8f{#9*1{Gc~y4F+q#;4mdyK%P;y->Zq% zyv%$ib3;yiYJ|D0yX)WY0tjvT^&uL{il7inog5EH^=Ey|aI@$K*-+_C&a#S=^Lhm) zcXk5CKd{{VS?^de{!nCN&(6bhGuMY~z`M;k1g$DL*$?m_?XLY#xCHpeEYx=`3_0KX z#;23(Ql+RuGa^j74^^;^O{EvT-`SbmsZ)-=I~EyHA@Xvk>Fo)-yaqVm{Lt5K5S{`z zzH*xHVgxW)ty-BIxIK$L*7bypD0BRu2&EC)`qkgVBrz9PPJ^KYifWl1k1j&)J;MR) zmFoH=EQ~q^&<#!GiKphpQNKP#B4U*8izTp*v3|SNqa&2!Xv*B+t{qS{RaW@f?E6#5 zXa31GNT^~h-lhFEFME?n#&e7+i8Y+HdHHGP0>y^HogN+Len&03oU2bZnAW;aIvPR* z!!2MJVLYZ)K$&(gv8g;k2cf!JN(vlEk(1YU@%xIg~R*Qq{tQ{3=|6Y2!Tx(utq=I4%$;EsJ< ze#?eA7!k(uw`e5H!CCYVkmBoSTU$We2}X|&Qv>=%mu3kP*8UPv--#U^)ziB>tbGj8 ztWlE;6?U{wv#}anJefX;PvT@W!d^k401uWCtn(aVkpV^PBk6hZilTUYkwM*WHTvFN} z<^mp9OeQHXv?Tjy(^jiizpUsdZUo#kk@o#)s+2!o!*DkvX-{rDocx%>-7}M551_Ux z7}Z-Og4%CrVG`p~C7scfy{7{gv!0>tOMZ9F>j((P*u=a&c%z!3C1CZg7Y(|VMP3;8e!oQyBh{&SJ=)gCI?H^cef0ii zvz8xEBvQ>^>d(tO656(=XWr7Ak03uL3z`HfuL_q_p+}JF^8jz6PfCYu(~=j{Ws&XD zwtvQnLs#4K;5pY=lf{6!bJA4h-dz5r=lZ)P)Ojknc?XqCE}<>{uMdlw-HTu*osq#D z4$aIY8Uay?1r4{ORpo@}F{1&Vwwqhsq1R9zz0yi?^bM3&m&M328YE!g8_btQ8Gz~( z4rN#gkoDR#k3cR0YQda_i%I)TuoxSdrm;%4fve6*RuxYDe*C^OX%pM(ykqCdRts*7 z*%p5@++eyZO92)WM}wb!qW_)*w2LtVG>hg>Q2S;+)>N=avQ8j@*%J71zs((g|z03!d+tDirwE9fUQmbm($ETrrAFz7x20kWVek~?K{aJM@i(RAa(WSgMEEl)7^a*BjC zV8(JY;fb*LfMeUl_EJ#MDf_oVT(00i$jzG#gAU)Q`(aq;1HSc)=oj2%)aP~mMxXt> zsM{{{YgyOutipy1`7uhiD~ve0C#Tok$k@|#Xr>%Nd#iv=$-a&rmcddI$k1=~7WS?i zB%e!Vct?^I8i#>nV6)|@f*SlkcE9<0>Z!`!$rIPLzf`0k&F^>WN;_2Des!gn(1?l$ zcB#$z4lcd5dEmZg`reOz=m|9ogG|TQ1^8B5D`FI@hE0`nZAzNy{*KcKIMhOz4$#rNwh%n3L9;$>wGoh+yD?PV9`*z|Z_3INrPd`}`i7G_*LSANTkk?6xARX}pUbre zqtP4A5!2)2`YQL z@2)EG<$3y?!7!$aQ;V2=_j^6;2duUKui_wGqdaOg6v ztu$M#1QmHv^LkBv?e=LR?^t(nk85FsCSuM&fu5*E1lkEROFhX@wWMoXwIIXw zcbe%HAZxXL<3k{zk0y5<5WBmZlZmvU>#N?-1xMt_XvKf^*!^ja+RlGsWt>}W4cPrV zE8L&UN<^TJW-B1I#R*=hX7D%G$46Piz+^oh1Wt=Fw8D5onTvA*n*y05Ku9MUC^mcMbXR867Z^6}Y<8w5eL! z+81Wdwb(<18<;%|P7`4(n4pFa)cx~YoRTBZ4w6HY?Q6fG$`yr@f4_p>1mRtc|JOw5 z_LAu7weDRZx*`&n8|P_4YW#*p%_qu|f#%BT@#?xJI?!-3pyCfBy^#BHCW`=!M}WVB zSYPgbyIv9G4aZdWw0M@@BQW*t6##m}0lV7|B`sG$;jWx+{sGuR);%xHmekE4@(IlZ_!)Ei>jd|8{JuN)8%@z>wVJOzX2Z(i)@$3j?y9tYd; zgwMzm7^W!#?|q2= zb>1`r$_+q{0$6~X?lizM=uB|i^ju?Q{ta|=auI7Tdo?B1enM^ZVl%nb`Fw_4$`$uPzRhbPnELHaz;5%&tHr+RuZ~M#!a2QJGU>AM8YOX5f>X$Ty zAw(N2#hxk5JQ$%htA&<81_ThWx)BMMz|ZB{1g1j<9Da42Q*LpQ^EK2|*{VAej@HO#^F5#W9J znz&3hq&8oR7;57@VxqQXOr04ANuP1U)K_CzVSKL%T%Fzp`}r~5q7&NBw^(o(r%Bht z1R%1O2@d}xdu)%&QS&rMXeZn0wH16jbA?v?ve4m3)>S31_9xK5Miwhr&6@6g=x`Ms z+EQ9@6HLOX5gWe_Yl9GO0YbRGR^F<0Aw-A#*tfjQ8=h}7F5|p3cbWjw+VtN!53T1T z7*D6YZSV^^%%7Id^W~sffY&D5XV=(z^q-O}}ja<&0OR72v>% z%HH_7v(PLsMiwAM531K5>~O3tCak`~bf@&lZM5Xyz*7t*cNPYCvIT5%@^>I}# zSz_eD^MRgkLmGX65b`yBU)jytflGug9w3kl8nVK=p^=By>(HdFCrE7E6o&4Gk?yWv zl4~sVQ;DPF7;Z=!Uf?mj{6)Whz;n7WL)ZIj5?mDg0^@UGO3j=geE>zJ9O!0%6Pxz4 zwH#vCEhk5s#%L#^Q#9gce*$4pYyn?PwW_tRX`lA^xgCbIi)}siyyTE3FXks(3PfZ1N-qvaD@@!zqy2fgv+EBy*87L?_(b+uW7sn8 z14ZPYE}(^aRsjf*SW(Rv?ANC?w2tRqRtng*-27d%_*Lgo*-K7LU1)SjjG|qS&Sj$q zsh}CWUTsM|B|+l?+?zTbl>P1k^biYe!ys*2CT=<}S|k43NKODhvQrs+Bs505e&dBS zB4D-L>}Eu6F+JZ+8T;uQP3dC;nyovD@$Pm+U`IL47z2zJ z$B1CmcDL4XIkowTtcbLg9lx+sarBX&O`yQ{*E)VHVGt|KkoPGy^%y4j2+NqE;mNi5 zP*XdIkp4&2j&4UvI<;mD0a@Bh;)*7GZD;H88vp|>qPkLFlTg|`t)cP*$11Kj$*>RA z+6b+dou+>+2r!6U1;o^orDu47nP{CbdRHt5&Osj}oHwVDzJW{69Q0n+a3B-FPmT`I z`!9dQT&yt5PU92n9b_FTxl`sv;c5-;qs~hM${n^#hp>H%y6;)k`a6)4;UO5@@h__x z5~-k90}LYd$N`Dl9v#1Rwf#&~QcMU&q=9vumji09VVF>xNp&i*+Mp!I$df^**?QCa zlbT=I>qR3+_PnzF?j=O0O8hBe^*XmZtDWtC=V8is^`kX|jcsq5X~K>2fD~$A_>?b7 zJ|d0?qf8=ctFv4J384chJNebVk$W|L251bIDsUGn{waE~`&V3+(+r}6Ix{L>RbDy* z7Yh#nfRvZIL%JB(fPevN5>(%b|2(dVo9fIStJ@Hy92XQUcW4{+-^j<;Iqf)jT(s4= z2mtu}P@=*J;n?pIs24uQYd!(m1s!pa2Ck0ET>*zzBBCQ!$E8IS;IvNgp6WOMNg} zM9&W7+bV-IxBLx;21{J(bMN}zf_onAYtHoALi3G0X!cuRXCKE4f`)$=on1>~y+d86 zqPECc~TG3NmmA}0^Zs-xxoT(_$Q7T=lhG^LGiuYs`u<){< z{RU3>t$g0);1J#V4$YGMV;mGsNuqISB`L~%qS!h$fe|x&EIKGgp^Qtktuy*4N*?b5 z*b=3I;jA~+T0)2dVd0xl0q`+S=PrQaLSUlrs~G{o`~+pCgQM_xyF#K+Qr0W~EJmqJ zVQ$E$YGU$!<1TrvAgJS3?WS*7fMSxob`zX2%nv2er?i2NoQwlN;|o>qkxo)&-(>&y0*l+mrcjY7SnUHW1Z(k5~ll8fIBQxxH7gbbe9eNlm(7)Gl*N^RrI~_=*X!-Ois6KAq0`a)!p!{pSbZzP>MB z_{}Q=UcYW)JLT&b6za$znj+8L9y28PL&`Hw0}R`|EcO1c zK^3o$siSv(54_*e-Sl%)xc<%NKnu433Z%{xQ0k=&ES1ydN*XXb{982mvX82zd)a>O zkCzb*DQ&a2igmoQdn)|rKFp7`_+>048jnn#Uc8)pKQu|0Eg2pk0a~Ddni*nM-=IVw zk-!rSJC)1lxLxMOrx)xr2JA5E+zS^fSfQM3*;8$7W*tymT#>%u4M&s$;ptDpx4>fP zpzqT7(%=06It_HPolMciTqdpzgVPbl7w%MKc=P#i3*f_w=8l_`s5b~BU!MT&W-y4@ z`1}s#J3==adO!=x)eEu^Fgo+x4eB1at!a)`g$BIm^Ihm176>}?S%!43W8uK(fDsVW zN6eN(L1RY9eN&io%by5X#V^uLzM8`mWLjj<(Y{$xad;Bq6bWcfJ^A!i01>d>W;O;o zm#^L~#bNVF7^vXW5sGWk>!3U!!k7XO8t0^7f(&j33C=^vyI1b;)ykHat?%cuDYCG7 zJGvNOh^V-O>3TW3SUox)2#1C^Kg#>%DRZiZP>pc{x!bE7BxKJ84CEE55R)BM(MnRU z0H=8dzw7E$C%~`Wa{(_RHpIyjcn|h_J2u*CJB<;gq|bUsSx2lgs&BZz1?Oha^?xj0(b6{J7n3>gJ#R zoe`KvtOxPIwMoJVFGc`iOL;%YL1bCtsQ|Zm2D|I#mrlG1m80Cvf=c99@}UEO0gU@? z%D6vI9Tl!xzRVQ{o%N}(H+3+3@C@MUxDZDw6lLxm<`NV7{I&_(6-qn3w7j!d>S$Zf z?t6?(u484oc>MZIN(SpfYW6gqxbs1?jn$0!BKHNoml$C7o+`L!o$an`B#)%QFbmWn z!+#JxIWI04OJPk?pVGFnGTN|V1yWdU*B4N0Ub1tg@2?!pkR1G{QlZFq53%8RlBz&h zWhX&g!*7ty*U8p%clr_-&I@2gEVcs_5K@@=b}@?#BUN}3MeFBuo%yCA>~~O0oU7an zI!NqBM<5jzu*@+JjL4uy28zwkRTcCC-$f%p;(KzRUy$9HRG;hn@CcK$g`9aC$2wdB z9R+A4K|PoQ6fI`>w|U_9NAp+J=+^RjF_?JZzC~~4N1_-zEVB7g(J#OL>r(n2G^RQI zh-Ha{7YYx`pxs4#fhHJi_7DPmg94?fQ7@ZE?2EJ{bUtpS#^S8TZS%*J8FZev4xZ?q z**Aj3=ZcT3SAf5cjOOoa6M|{Y_FZx&XN^Rx>;c z4F1l2*-3R}9DV;0(!?ltUYY_i02{i@078C^YiOdV)pT*;gf0R~MPyuqq(a^-HQw4V zr)Dq)eHqbzqaHIMwoqRD+7U7%s~FAxn=Z7ltfd0t3d6 z$w9w39XcMbM*_HG^kzWA+Q**j=CpbF*rJ1#MAz}W=9-^$=a=K3vMLwL_^@N()O#5U znVF{WamrVyTkU&=L?`77I<9`cFUGht7mRb00Z~(c$-^&`8Yn>4kMJF77gs?17XBW0IiQlyi)Z-Q8PqU6|By{%QNYVLLtCbv@1f#UutuW>m zlj>fHI8)Q3hWkx5PYQZW#VWRLsrtx;CDlm%Yr>$2y9daU_!Xo*D>1?zgzQE@s&4QP zm6e$Ll`TpRw zgU$zcBV|hho-7*KmUT$bxx7L#x3Rw28`)`t+m9h&K5@|*>hdoyfet;%qaVz-2o0rw z419oo4mfBHzQ3Z`l*+>tfyl}A^z-?)4@RADnUmBnch`+A9{g#kmDH)ZcSiSpa5dh#mtoHg=v(6mkNqbmx$-(Rqeoc#Dn-$D{OMsPj1Ww)NfIb z^qPe&*G1#Wky1)tzDH+2_nq{Oz3PFab`3^9 zmr`{$`D({zC}ETO@O!o(>);2wOo^p?1Kdc~(N6X&W_#+)EgY}Nv`9TKkB+l*1QH=g zt;_?XGh5U2I1!}m)SRxjor3l0ye}?$K0+5`MIT6g8YkqE6qlc0hImC-@ju|vg@T5j#&mt;?3t~Chvi+F5EpOya(cVFr?d36| zh~>z4Hv(AVi4}H3gV4_qQm@lk=}C6?BH|;4p|_s}3K6%t2&ViIq}(F9W(j;hg{{hT ze%+g3zWcULCu6iNu79Ge%8hI9WxnBlRl|>JV%?QUayoCfKkL?!+bz=aB%FbP?-1_0 zCkIy{-SpELRle2OWmSAw+JE$c=+2naT9-JMpRA^mc1?~0-?#3qvgdl1o7;81-}+o( zGQBYE#oM-!PSlD0?{lM?Z<4YHqDv682-a#F{~INtZqJAjnddjYYVH^xf1E%)%!$9D znYlZrPuC1tW)fCIw`&xXXEma}ark2dClbFVoO9VdR?G*iD-eARtcLs3=L^u! z_Ym=#b@#TOb&XR_@N*$Yie?j)*zCkP4Ia8D1+uO+ky$L<>2_EB-O~1CP4G>(+3`tL zB4T7DW7hBHO-=Bh98iCGiQRsB{YAr69Cb0xW~ak_gS0QI*mZ=GPduu{X;e+}4epce zQ7fCA4x929vf!10HAvtrhCVbD2+}?ybilj1@wYUmf5yJHHo>)t%Kou9CRbuc@l}=G z9+Ms_dpX9Ki-Wod?W`T`X#6QJ)3dIR3YeIh3UC*z9_gdd&lfEpZQpv+fP*BcvS^#X zbP+p^=vcIW;7!`2)P%4|I}y9}d-s&c(_g?Mh~2>J&*m0{(Gep{_{DO0Eku&dpM7G| zEgL4;UsJD{+kWwF;m1pV(#SLTqz#jtr2ZDw`?o@MF>P;7t`BDnn_SOKaL#{)JqmXesaKr8gMPrHnmG&hyg?Xnmj;{>%bY=h!-Y{vctF={n6 z%g&bTdLe-}bfTQq?_ev2keB8&Lg{`xS(*30~#foiD# zN7Z)+Qu&8(=XBIDs)OvZx9pWNj$H~NTPQQKGZW{OWRHxHA|fk$mT|sOHVKh=WR)n{ z3cdH^`~AJ|@BN?W`8=O--`92B*Zn#DJOnnGwm7Tls8C!siq&jdSYL7zS$s|UtQp6v zT)r@u=tqnrb%jDZ_vJrspcL+YNVB)}v z5ASXzUZ;bBh%=BWS~mLLm^Rh@r(BeAd+vvW{P?Gzs{#~#R|&(%CI6``a8>?PCs$nj zXgs6**@GDx(UC#*w|#F~{;5FT`~OeCTNGn2rhVLVKQwOUkT?2SK&3v*n{EADx=~M< zzfE!%8#_95#VIbnF}@o4&|1j$;Vd<>hiOSONnJ1+?1geX+tZSV1&OftKICvb3&`nr zmfFG8Y1;hk7aeX@OZk)S&cjH?yVAaST|dm2|iHaR#NKQrf!55+(0E3AxbV@b@K ziBCk$kq3AUe4$i07{YrFTw5R0%2_ytSPa95#4EZ&zmS{&>EdYZr;EzX1hvclGrrb%BQ0(eH&mybI4Pr)3EV@(gARv5je_JOsP290OT_K`|CNjJlL?%}ZvnphMLO z#?WeFoK+E>!)E#B^{6{YQgt9dOTmZ)LC0NaPe0`qSsJQ+kXm$%ylgLK7pN%j|l;K0%+eg?74~RxS~zI~W`Vf7HDik*LAz`EZq3|Ff7@ zo;(r>8&93`!!Npk6Qu(d=H=ZMd=%HGFYk2#QHPl%7U5smq0Z3w7y!T03-6*UQ$ z93Qt2dQCC)y`4rjycxVab!bu@UM_f2!0@z^Tz*@_1&k@h?i{XY>HH_x>a-wqG*)13 z&1WDytoKTMK)rlL0DVrdWUGz@Od2iOKJA+qoD_%2(2`c*0l5MG$ODBPk~C*ZsZHS7|qPk-l3lpY;59KcQsNC7CJ*1a;NuLY&tg{ z#{d-HiZ}4487-oNxYmkyeqrU9Cj*RO(DmgiCes=)BSQWIz52!#zbmZZ+=LS2Hq?H zZ>F<|IR|exCjW8uQpX~%{Y8vA-Pa}7B=(=T!fPTF(9v|M%FkliXIK>PlZl;gykM*& z+HDhY@&T;eSx6V`jy0^+uz$`_TsB@avXp%(eJR(lm<|F~4g{HtLY=XsE1^S(G4`E$mmSccwU) z`Pk~MF0m=M$b;`cdDc(pw6dmb5W~GsiQ(2>4i_1;^De;u6PaQ2oy~cg4;jzS%tlwI z$&z?w^-S%(#;6c=k*P^3al4<|2ItX|JS3wj@yB|3GyH1fCv(uBN{eYfT_YJFAuk(n zAl<|ROJp0V+a$6qC$>0cV(BKMF+r2#!eYxVY#k(eR2WsUL0ka+;v_pQikUXm{70hj zLdCXfSe+mMjZ=I?YrBUsOhXzFc&W~0kRE%#l<>5hpL=Tf<*>!~+{}Wz83Q(h(#xpF zz4J%?M-v1&R7rn?3tPL&GBJ#jNZ30UTz3S?g8JxnCwPT1>geK|xxSyZ=B|9~WHV?; zQ>#X7F)}3+%h;7ryh+m?-?Y<(s;7~EwV?R$%eH7fOA^CjBS)Zr$=FFbUr=UIIZ~*! z)o9G$y<}U&_gvD9q0UJYx69Ua!=auzT4cW_H`#!6o%4vT*ddhGAwoWC*C@7Al@CII zhbo@;Mm_BfiiW#&m-crqY877_3B!A$hDB{!A9%*N<=~4V6})D$DOrcY3prrrJ_F|C z|Ko}~1`Cphg2VA>G~$8mtJ6aj_la?X$md|9F<<5Td)chTAN?bYuHf)k`$9=EyC%g? zwSF?<5K5MwMAx5pOsyK0;16VO$y&Wli+`GA{FiOsES(#?IYnt8h5VPT2)9TDQ8(c_ zDtBL@Uc+ZU{vc!MLf~vkhJj$zY3i=@=u7xzD0ebcV9#=T{7nfIfQMeN2`bkTO=re* zPV=HZ)77c?s2ZP~wTIM-5Sft+aEMrCju^o)xP$pB$O*urLd!n?fe88j%)c?L>$Sch zx`MaN%`Dg7EA6VB#eNd2P?pK#fnlBlUV{sbWiLS?Tj(gHf=ls5{MBPVs{v zW?OY)_%P}pRA5zK?Se#scNs-#rI^Pn^U=E*uVpVH2k9FUMT5z)$|y`IX13tT&_V9T zP`az4gSXtGg2qQ!AGNWfFEiHJ2IOw99~Gb=W2KnA<29$dpEs7TkxvN+;1V3BTOEd? z@I?ls$ogE#>TyRHc)=3Y`f=(GZUA73dns8*d}&inza$GUNRGDYx~g8&M3-C*8RmZ+ zs3qx-gcK$uBoShC+aVcTBNi#q{50)#%KUn6)|p_v?DnX|bdu{~u)?xk^gkv6s9!QP0PT)yb-Kzw&J4GuD+Z0K+6)=5&J) zwAM{)JHzkch_Ng#>1R%kp7+so(Z2-n>EWT%%HOWcWk8BJO9=9T+Z-f+V9u3HOPWq+ zHhva}xptWOOhPucteN=N(<)|9_~Nk`#`d0$1f8P77{5bsu)6_`wvE^DdjXE1t#i9x zKUlV^6^{|SPoDnf=*R|PO+{Fw3ARUavaUc_QDXh#~- z!LcCjk0f&BY+m)II|F-@&eXz!bB_F()01{J+(CSsbW=qkp#9Tj!AE9BJ1X)G>co9i z(U8v@JKCJVr743!EE(EB1lOils3Fe|(O#P)K2umu#GxuKhef0%`R;A4#`S#rao9oP zTpnw*N^)A#{&PHJntNvps`G_Wm>1fxC9YodVgvQz1yQ7M;ALy(Li=mOZ|hV@aBKcN zma%_NfQ;-YMTXH+F=2j2`TX}+=?OH1cZrm9;{}$@+*}O+1?doC^gKBN5>_^4Qo->| z)~V5O6oqpvkAp5 zQ!`%6juK~sdn*5%R@9l0V&{y$Y^~6Ion&CMuybes@hBvCT&X6}LmDMY2Y9V>XziQ0 zpIIdfYwvnzQ2njrIm6M7hcKteTF~1=wx;*#;An+kruLsL4A}LbHisentUmWW=9UWa z3c+Rmu{K9$$I3C+V3V-B!;wPMZ1KWRQnHYc0E-}wodmVu52SeO^&o{p(ten(9z+ zOaeMS+@|_=LIrRR0fjtc?k}8;ishW&&-lCrZXS_w{#%6jnw#GJw7r$X1Aw_@X(UoF4{!?4Ia5dcIpvZN$9kt9vYt5MyovZIe{^Uqtb^D} zyG|7I#!Q6+FJJ@=!bmXpAHxB(BFM|^c>S|}W9!HTJ6qbXr{D^4aY|yk$GgXH5ya=L~YV1mKZ@6=jll& z4y4ioHjiJeSU;yt!=aqJ(aq=^D24hWmC?W>wm(`l(U(u!xUth&JWZ{}i-~D{DzzIt zVR9J$4U!4FUp0UKL97oLhCU3MiXSe&A?1tSYRXWQq8qJw7w;Q@Zg17dx7C=Zec=ul}L*IskbSRJ7P z7MY;D?0&5Giwp`QKnO9+d|Fw^B-rr|Pb|Z;k=1&j{_=+@ZZrP>wBjULHPgfK>U-xu z={vQbgQJ^r?z#iJ=uxyPxxykVCpWK}j{ieEgU#THVKSI~6_|awDlQ3-Bm@av3uUxz zg49;(m?fog-oM**id>o02owmkO3Sc%FQl1geYBNbuxxjo%S=9vUX|Ov9gqw^q@I}< zpR!9Jj~hZB?{DgJ%1qVFb8pa#*#umV%g0Y8wm3QpQ2fMVXhy0c>w{Hs{t&8TLE~-n z)X{tZih}k*_AY330TS0jUK2FRd0y$Kv?XDMCj21g4rx4VKQ;@a^dykeQO*_(M8R3o}+ko>b<%ib$+-!31bN*Y8q3; zh;sr7%z&g3=RF#k&V|QO6Ce;>(vk2hw#70xek*0K>X^)#&}79b06BBJW-iFP-+-jk zLj(?jRMDPSM;amPloc1k$$;6k$-8Rkz(=TNeR#qRB5~P>cFi}`eKoaJ5(LiF;M3TU z7dRjNXX9DtdLUR~Gm!w9ZE}Jap(DD;EZh{)yi0@QP_=P&eUQWm5egL+J%efvsnt+& zUebWUzC|}RH)vC?7jMIP4ZmdLT(4J>YQZwxv;^OX8M%mW-t=`owg`d%1r>U5_au_L z_1=1fo%(QIoacnQyEu>eTXl|L!T?CE$>wjL!hc0jd_8U`))GvsX6p{D>NSLKGk25_ z49S+J&dlexxbizw^u*nzs5|MiU7wWvj$XWt;o&Uvq*t~|w3)%8?Xt}k*GDVANzWnl z#u3>C#hckR@O?a2^7wcf^%dteR5{XdNc3t->{sf))h=aWV{Fuv{-UN834}J(sPBU> zz6PI2+>0i}tHJlTKp3)P$^elR^v&-nM%oE+api*%ufJbb4gpc@+|hCRll>g=C}6fG zmvoOSn;YUf1gYusg;v`~&G|Its&*(?$+Rd}qB;_KG;<+~m1?d{kW*LBYln+|_^8OC z9FQaNThoh1+%(GqXvWsvJ*x9FRl(=<1hOD_3^@_o0rKI_4_zVG!jfN=?Y)|C+#db3gbkk*k==< z!p4Ne@?{{r<#*}yRUj@f)_H3rZ<}h%WIWj7CubY85}Vjl^M`<$N_5ct6B@C?Kq#m9 z8gOJoz3(8J|A1*6X?&0mNG=oudA!r!TU!kZIm9yJ^AxL{jmi1{iRPlu=;J@j1cM+3 zN@ifwKP)nzr4XJ`RtdYo*X{Ho$b~eh?A4&12QKlPkNk|8%$SauW2hDs``}i4 z=4S2j<~fj1&kC+A2iQyIc0W7{F!CZoC%nuHftk8RbvkbB1?+8nDW$>?vNY>C}q-1K9zX>J=AV7<2{6s|%+ zCyac4aJGwlAnM`Nsu+qB3;=!|Q_9XKkdZ;5>57G@%)4lHk~xlJ0`)kLOCYSIkoRT^Nn*YOZL5 z)bzPK|9~f!J%@qI-a1(b1LsC{>4_!Hy=065BmjZTMCn#yqt=`IUj9Ih%@NFM@O93G z4YG0?gQ-;Y1qMB#_S@x24sAyr zIO5WhuFs3E5YBp>yjd`p3js3z^Z_q8C9{e|aLkAXB&j!5=ndEe7{W2(dRqp9>U&SZ zB!M%)|BfOHyo4|7apL}`rAK9!fPQg#_QtW!ne$A`bUk4TmE;W3TV2D0ULWD$l|97oi>k>LD~{eX=deNIX`VG%YaIR< z`F|4oSee7sj^n7lrXikVPY?U%Q~KbD#wcaemYMPzXRMHI12G!dK$4bNO!M(WIO1{S z7(b`a2b7dS`(Z7i1ZwJnC#*s#t8SlY*RyiR^vs?{VcHi3A5<*e=1YVyOmqXYPCo64 z=DMX%X9kagp0mv)S<*qlM0SVDQt9Q@yri9_YZsj#%aBcc8~7xZTD>aBMD~Vqyz%w> z5~Ca|43mh*>)*QGLk*HS$1rTL(V*6+w>gGqfs=TVtn|0~@21yMnn+N;rq#S_i3IpV zge!#7A_}_=^$KJPUJ_4t3BP;!P#OsWpcI1A%X#Qa9sn{u1&62mrVEz_@e^uiQFr9E z{jPtx54U}}*F*C_4Y6t6gAoW1&Xf-brBW+jec*=~(X*MTpcEWbdx9_n90PiLy#sYl z12)DBq7PM9Zr%u8J>e8OiKsTVh)8C241h~M#Knp;2V*GQv1PAt0R*k4+Hig48dnaX zFnkxHJq8@6V?*g;{}tPX5v?jttjZVaN$dhcXq?9_0boC_^v}=WksmL;>A$EdaScf=}&P2kLc8&_soH zB1QOVvNj6z>e}q5+}&m$%NG6XXiF$p#8I&%V6h$*INZd}>x&a)P?yxP*_syJX=_{V zmrZyG)G09A^jvogT%R!c95q~|HC9BqcFO!tVGIBbM5ZVZ(Q2geh#>YL6upm;US4kUO^x$2ujwgI=a)P(ff>Jj%slX=IbxFjA38Q$ck34QghT0Jx# zg@>B!GDf`yS;-z!U1gl>@EeMhMqc0GTXxNt4Gi7VtgGD&+r6Ld4UAeZkX4qVviCI76*#{k1xLg>nAiwEa8NnXH5uSPU^ z>%f^J&YWM&u}?%6(V3H@-l!)^6C{zX1G{^u@$5WWj3Y8anDT9KtVK=yFsr38B6#-u@;vQ;2SL6|Ma`Eq( z{S-dc0dmN@XO*Vnz8!Ck15}5UBNR(AAeX*H%NF$io6n9N>!3T&lI_ZMf^3G|&FZZa z`-~XsP&81M!p>Av3aArG0^QJaXcRm~#!bF{t=Rh|JuCBhl{nFrw!K%YDu;9KYCgVk zVgS8^Moh(ciiJSM|$x6ao#pL znBrOb?ti{>5P}s)9T|lK$P-p*35F;rPbYraZ7}>90BYtuMX`mkVDO2Y{ce{4E=>+a zWSg`lS8~}j5MjL~{W7Nfj~>bf+qrJ#Dd+1|dbdw|=`gJ1yF0kJ)DO6?%HWnTPJ~6S zxbb#JGDgUsFF<+EJgMTvg2pn@eAs#^TWrT$N<(8sPPKkayjHL5XekYbq0Ulg@thLr zU&75KC+DKWu3Rj@4=VoO)ZK@_ufJsfeqafkKFDbQ!wD`J!yMYpodp!lg)T9^gw;E9-QXjJ zNivp7;g50RpbvG{!5sWPI^jcQ)&Y0x*8-+B;IJ0i@>`$;OOgsC<5gHm9{08c_zIr* zQogPFyUdA?jwGx|UdSo}$vyyXin@2fKLW&vo}!}G(qT`2gd;PcOnOBU2ej+!9sW_1 zg(?phu{K&CKLSOSUld>849UEsR*Ti^YG@4J1gXncVzjLxh5t)Gx;KBN1C%1Iv6yAR z;dOYE37awE7v?Z)^3b^dH53nne~G(&y}oGpR}@qeS1aY79Jmo$$b;+h%vZgJyuuM` zWpk1ca=u7Cuc-G1vMBNSFsl)fLMRRyu9q}y% zx!3+W&R_N?7)YQ5RxdAK;c4VE=xT>Z2<4zcMdY@~!rnB(iKzq)+|c|JO=c>;Sm}nhoNtVZ)sa| z1k&E@6b!TG<~n|`JjA6%R3*6#MA`p(p&Iqd4uQ9y0EPJbX?Q~M94`JE|BGB8$={>_3pVWoHPZ-BQuvN^&nY-S3DrdiF#EtkUopXCoM za)wW=kY zPtLhc_vCj!cJC3rIJP4%0g9g*qPJFY>vP0Q5MrF^BN{A!h}ct)Unf_5-DnkfWSeZJ z%pvDBGj^ovwf1TnqsZZpy@J^VMb^RN#O1YBOzZs2yA&O@Tx?kk z?4O#OltTCLd*luUa}S#(q8vMxrwl$9OoJh4r>7}p>7;G}c%7%%l}ofJ)HlQkdbcKnnQWNi)~CLN z1wax$$DU;4*(Qy}8lyKaCh)!VOoSq#xWPvq_=QVTq~H~$l1Ta<(7)eh9ktBv7v?qlGs+eW*-5WrU8WVK_K$?|RU{p0~ytvKacV^PcYtvWqQuG^ZTAw)8&RU2DEFg zmxOaabhR%f*?7^SEfL>o7fX@{lqZRgE|OO`YwMrW?UBF6krnb{*Hby?uH}cM(D)mK zQeCkWsG%7K8S5_GCOoVeGV@LPE+4YrXW~EG_02Wo)o+uNfwNPf3l#Zxg4c)@k!5Dy zKX(RJZIfu;0Y0x};(ceg%wVYS{EBza$2ToBrS<8mSxlW<(tBd0wR7jyr}OzDW6D31 z7^fnXxCG`2x72YI<5fUyR(B%W-oep(jtUYA4P1f5@D|;hhDcg7u|7;x?s}9#dBLp> z4a%EwOl4Sc{e$yw_$u^*Du*eFAmT&YcWV3Zp0&^FgFv2W#HvTwRj4;oT-89*=RG*j z6rV_-eIxZ8Gx`LrBcL7q+7D-*%nYFsSHYDQdoyM}<&*YNl?c8v#T_F5fniwUsq@FG zzrSl+JeRw40o#t3m01c=t-Y-b0MvH``F4*Px^t);3d^%Ort>jkOs;Y@tU@anbAHpT zsRSwCpyuVerAGegQ%YATe)z*ziZN}{{wp6zyJ`vT{S-a8OqLK^ao!B*xWLMM=jkpc zxXL6HlpA896%xL%*KmD$<i29{6!mB}?{E=Mu$HLfw87(WJ+J<;WjG>z`yOZ0 zonnjqef>N4v?6w!{{~!MJolSUDfTe7ZvNH;*)kbaYT*ZJh~ey_Xb4b39vx=9l0}{D z2+j%``M_N&KKhV~3{c3}(6vni9Im3&OXq}dslm7nssg-yx<$gEA+5Pr! zlx)Lte}p&!fsBHVRu29Wz<=#VEq8x>=|T;wQ4{_QqI0P?Bgk6$xXGQtZPA!+aX%Cc z-xk)s9q_^TDrp1vIv={O4xdO>SLT%VRTA~s$uLMWe{cKJ@;GdJg~fN!wTwzY1hp}E*O>SMCov`jL$0Ud)7^c>}Qb$hnrbMEuB1}Q6C z-1_HV0Sl?87tbG!?c0)?r}IS3pqu1{v)Cs3K(rzl@KrTQd7z9 ztCk%a@vE}fUrU^2H!C|U@)>j1iu6ooDEntgBGhpF{VndUj7c=^PrMOX9aT{ZX$y5C z{UZ+vJAhtpo%&XwswM|wCc++{^W5TuNE1Y zN^X?gAX19B6cQW0B4kPeLtmS;%FUH}@DMyf2m2rQcB(5-G^Fop88d`yf9rIuYc?(k z%~P`|vHwly_SkY(df9!`l}m%e08gWwz&Fcb^8)ugeS86=MWoLC3tZYV8_GAn2&Xk- z%SwqRFB!YOTzs(kibJL=l4yOrs`E`-;75Z^Z+DrFN$!Hv7n4%phcGQD5Z>0MJ5$+d z9HVNFZO?*;&)k|PfL=P|G4RDpP|T((?&+Ao6Q6;cy9N>}H)U&qxUSufTmi`26&B_dSxJZ()UY4J-Lhs`wk+RL| z&nUReHTtPwn?k!2yJ33zSvqXp4Yz`ByXp*dYxl?oXfK^dk2T&rw%PcaExRch2q(WeT^i1kH11 z0g_?jw7Y4Cw)_#mRRyK!TiM8o7BF&O>It~I}>b!PCFGHQaPMES1$;THvs5FOa{yfZ;S#6gml~)vU zVSh|HE>Gcz?pHTLW#Qgxk6E$m*B{1w=g?dThLPqxN|u8LbD&`04MzkzSSgN(mWKSp zlZ{kROgOUG;)b|+FG3>&;Ak3DoYWR9*qa zNf6&Woxg4V>=ug}#~lY*lWk%W4+V!%it(mmgU($AHrDqycE=dEq$8}{6bHWj-T9;c zZJE6r#NP!Q!cF=efF1@%wQxgFqS1%xq@@QdwIz?oLZr~IfNdrZ{lq-!q>!L-QFnHz zI%(_6r_GSxekdQ;cCLxpkPR2N=pS&y(f3^EMvgsFfti!=RQ03!`V&vR?d!)%cN+B% zYWE8j66%joWEd|d3)hX=UWwvoTeQ7t8GH8Es0SLgiTx0FVf&kSt(NUbK0{yQ1kc9P z!)8VOaYio(`Zv3~MUtvH1kdJz@v_0I=X3Y3RD-L!VfYxB&T`M0D>CShf>|LA{zHWa ziqY1$c9+qvb+D!zZcvP8gBA*E;) zZa_Q{l()=tO}t6R^|Iaami|6I{A%?_Ih)~IUS0GT+We=c9M(^JY1=)oGglc}^x=0e z`|OVJ?$w5{RbDd6f2f9b(-%I;u(N&fYy5n6I<%z`|E|9Hb{0$KNQqc)7<)-=QBoFS zd(~_#ERWiJ?HuoYT-VZ~!0?D=Y5j?ZT9d!C$^EL05l0OXMb5vzNlW&6=svF0hhql+MWnZKcPzeQN<6F|U; z?ax-@8Fc#V-*fe~Ph1({me@u6D?VQmvEe49DaPceABVVLxBlmKre%f#3l;zNT*J+G z{}Sx=6A!dZJEEe<2w1YcD@!MUV%>QAgbo$Bv)QZk2jYp_;}a}_v-UDmCnx}OM+OB{ zxH0Dhf7!U9{J(6k@h^yZMhb=TJWpvJ?oBru%a07Ys28Ina1AJvZjof>% zY*Q%^QG1=moTH#2B9Is8mocjOPVEB_XwJ^E3PU?qJv3_}zMCAXpn`Ko!Ii^JId{Wc z_Uh8}n{$SPx=uj`WP}XZIO5@)x#FyCJB;65Kc?xWNsPZPNHWSf zB`4n5|MB55%vm#F5&X4mK3W{QP?1R+|NF)v^-_5zma{kY0Lu;Y*y9A&m2~t8qpGz?^z?YH`Y0N+u4({H+^mb-< zvxrHi;sGu*J?BfrwQ-sn4cjXeW0=8w3SIXraRCX)Xgyq>j`+i97ul?Rmh?B;8Ieuk zAi!cVjaqG3YO^?qyTy9#E|5- zkl<#Kvt2Z$)>ji1*qyJx)6|kq(Gdzj>Tw0vWz?QXA05JR@S=i>E>dvws}P@|0_#M( zuw#|3OIL&&2Y(tyZG%$)CV^3P=J5VFV3fHO39hM!(*ff%p%EqJ`=}_WQKb3i1LO}_WiB*ZW$9QAZ$NC z6f|t2A*T&LStHNFtR&w;rQws20#U`i`>TWBV|)u~eN;HUADq0Rfx~8{N9oHliLgiF z1_l~S&nX{ySxXpL?8|3=S`&G}I!VA@U=j|4r_f|d4q#MKfx|Q!^ivKmF6FX9;eI~OJ~~~QHYd%X)x{-EovGS z=*C|Ag1s@YTjV)bD-11L)bRsRt%D#hv({knU&-?XvG&`1*K8l3w5d(x1&1$h0A4SC7g9tzu&Q84Z2!=8& zQW?K_WPTlEXHI%Q>Qx}LYWLc-(>omZ(tL6*O)hP$jk8BXMvBMd-MB%T(YufDn7{a5 zl{n$*J>_f~*py9B;V;pf&%e}Wlsr*ZVV=4=X{4hqegw9@txXPH#T|%W^y8|1x0`)e-~Z~18=HG2_QoJU+alw8VxCpN zP3tI;MOEGmPH{t@=)YrTyuJBns-ZaL;Iw|?72rp{ zs?HnYE75MIro9Kc`Y%I)fBRso6ZY~;Qr$I87dG4J6KW1X~k00*2 zD`;VBwL6b~;`6d2nL%TkmeQ8IPdYB>?BsNTZSoT-ew5F;hu=@;f{R?-KA)J=WWtoS zmEiARrl4@0)r0-IP|XY-0;*Ys;(8?(M5jzpE_Epsay9={ZHSL99E>?%Kan(*#n-~m zi4|RWiD>$2dj2)vCu<9vDU^+$D)XSf&@-{Gbow#EZ-rTjZK@G zCmEmh+(Dlqhq8gHSWfU6!U!I(L%#jqb|CDzIs7h;I43E=W!`&>*Gi0TYoKN4d>~}l z8)J0dw+T1rlsgys>sd)Hzx$Kti4cGN|8F?gOO2vwr8m@mG z74f2QFh25j1<*Vsy>)&uLy2(b)748>qJG5#{7cSql_lR=S2A(9|XzyG|&vrhzloHR*6+#|Tbwf)C1|tifH) zfU>deeTfz+Wn5qKuPov5F#wFp^=+@vqiN|scwhCeb9pN8MB;*&sbF8APRtLM4 zX0D#7QY8;!a|%R$TbBQIqT~0!+ zZ$nDDb?P|Xm{MBm-Dj5dF7`%}551_t3SkNamioToDO3T-eCf&+1s=D*%g0u2bhr-+ ziNFy*%Sr0$qEX~$p&)n=YMznpd}D=q@%?N)%pEd1b#6B$ga)K)zd}91wb<9}=Rx=K z1*qLoqMXYeUxx56c}Hi@w{4zydAK#Tl(F%Q1bAO`F^C$&hu*&ab^|L2w;r(ORy&G& z-Tz1agxidJ_&+_F9$y0!5{^0lCWB=fLF}Y=YEM+Faj4?ug}5%XV`#TkMKpHb_%Scj zslUC{LXHAsA1hrvOEA6qiRO!nlN)Qb>DlhmXSc3g$~32;WxOv&2vuwLH^w;_uL`)n z^*Dvc9YKc4A*;bDZcH#rq_Srg{urIyKh&mN{9~}tC<1n}hXD!cSebY1xGoxgfvhp; z&9jlTlY&_$50<{Z<1+}$a6-vDmnZaGR>Okv1$_%a7q02|B00og? zyK`zy0Vo*_w)*m#@djdq|5d`d>f3)xnTBqBd%^$4;Rp96Hd%F=?ZtI`hT3ycNJcn&un8G13xwP1`AQh!`nrgNN_jO(o?3JeX#WfqO7WoILYuWPyYH41 zqj}w>l{*u^21Y)!XS{Pk-=GvfePGy$HzI2~W}lq-mL}ZVhFkb!Yn(ml+0iF~(DPGC z8%5C3YiB?Wbl&xPVD1_g3ZnY;>ecDeEy;6|X8ir&z^=U+Z@f-f_ve`wcbL9?OT$*d8gJ$pUyGQ&Q}Ze=wx_e#w($4GU)v8D6bC{deC-l zi50d@AFp0FcXK=pxjXtd1>>|LkUL2FY|A(i@I`!RwvdIU7{s)zNG%&_ zq>-?*iZZ4BR8Pn3^6yLX_wQbALWL_@B2i=7tU-%ZM^x*6kg}4zvbt{^&|GqUdgd(A zl8k{{qINlAya@^MwXXThsO$xvMi&iUT8t?r(&A~NWLFi(i)D?b+m1}fgmm0|@cx|+ z!1J&D39sqyC#B+>$~}I%_(Ys;oz$f2F8S3ZHz-XGr>iUu#_$PUW#PvVpiHb0zCS_V>`Q0RhRwqzg+XS-@rh>`$W5(1VGZ$cB7!&gpz|h<=Ev#q-@{c#tHPiJ>lpO6})E z5ilK>E}pUOQ{T44FrZH4C!e@zOE+>iw70UHBHeHO+M?9wu2P*fmS^Fvg2WIF3t6gU z$!q`B*s9{b4XYGyZ?S!Q6jJbf>mb^Aw7m67GgWnzA69!o{Lyl4H8XQ1fLSonKelOF zT)f>2715Oz(8{xs7SyvViS%BeZy^GC4@)Tgf&{r+@vHbq)e}sxEaQC>E|*wkQ~4o> zB>QxIJtJ2-HxrfGpYTimUFC3F4AL5$;_qwEj`-jqUGK6f9%hBuR3QjqR(3;>l^}rn zeYCqLN;vx#Cl!GXN%&{+{(lyNte&AR{u8BD zGki(!uMN^=av7U1y4t?stj>o#7cmU@?g^d-z1AY-sy2nP4HuFY>*DNvw8Z`G8}R%3 zp0*;&Leb6GGl$r0M5U?HW-H@rwz~SJoVEASr*38tUln>cwYT-@&agbgyYdqM;w)s2 zCQDoc*(r`)JkJ`P)B5-I>pMMe-?nQ$8}qD&{K)R{u$~`>&&XIC)U32>gC?> za;+e>xMeov3IG{XTwHrYME}weYqC(cqhw8jpAU>?{N!#X-d9gn?2;kk@X^|#wvkRg zbT@6PG4vr0(n2E5rch{Iu$N9rnyucXj{E_N8@80V#gJU2<+byTxs>q_XS_4n{#LGe z(i$gl?z$UsmUm=@*?Bq~Td!LvFUMbVZas0340dfmB$<=EDS)gw=vRp0I)SzQVj4bp z{JZHL>(1V`h;b3{71+Kr_mh+8|7-~f;fWk}NJ@*;OKUOwQBe$8Y)Qn`Hs2#i1AmG5q30~w zOv0g4BFHww4lc8k>2UKeuS%E|I`cQy-_u z;ch#WN_rk?@xykm3O&?42~Hw=N$|fTj){13NCFjj-2~A1y7SjNcpa0`|2C$F1Py}4 zN8CgM8up^;KS=!s>)t3>JQQjUx?Jw-G?x3_aM32W+9|uXdFuK`RZ{lrZ98aKGAniS z2O|SVC7f)#PftDqRDf#}%)Pu*Go-<5ZitFovYAUVWNHw%B1wXk3E%|uK82tBYUtgo z@cL*;3WI4cdWoT147%K*HP2EqBI|hB-~~z z$9Z2}WYR@^^@1WAiq{0e?5b>I`MFGxQoHnW%ea!@+ga%nY@Nz0j8sBBr)7Ui!SU>u zAy?QOZ`&1`(Vd$@`serj;LZpI6k`u{dj^Z9`+M7WN|+a_0DG9z0>*SJ0gGGcO-3WPJ|LyU+~J`zDu%hUXdU z2|mg_qj73l^?f+L$>q1oVc~)3@Rd%x1AB|9g_|TDgsi!gze(1l zVQ-$X8qHW|6IkBO+E->gBNobZ2wE46lx~76kD5!CnMeH9I6pWzWCU)g>Zp^x$+6_- zKGY3U@BY|W%650da7Yv(RyHM>{<#1zW&xvJoQ&nBxnRnnb-J7ou|(@D^$m~Mm2U8R z|4AL3Z)LF{9d#+x5Zk+?%3+yRe+T!>2-Kh@{;c!CEa%g1jRlL?_ObMs$f<9{=mVB~ z0rW3({j-+?%;$j>*QorqJ(`;NB|7*Iw7iav`z`T-*KST+WV)Krm1pvle1mc=p?+#;V6qMU|+X^>^Eey-C1g zC*J)TQE_1~l~J0ejrU4vire2gHxNl5BV7{EYum70Ij++bou^7x^@9%nph^aWe z*;4l;>nwsxHFci1TIA~>KFJhO3O%^}XviH-f#fiP-!B>>l z=xJj2E)fcQc`hkeEhb+5i}LM09z`{>>$TGT10H^Yw7_FhF+y}P{ zaguy=9~9htPre?PDmIpik(yaK>2!(EHJ>EOo-ShXo-H}>_MOJ{f?lsjd*ru;C;pf} z-4oV6=E}XYH%g3l#k?$r*QjcHbG$dzZK@0>D0ESIss zg-j2(>>)Q6Ae~w3Jli}An1;YIHQ&>Vw~-+jlfN%w&7Crmy9^*c-W&p`!S{~O5a#O2 zYb*;R6%Z|1{0wqag8V4786P~8^qYK6vOJXL5mfI!^(z-^{>g9hdenmcad#uN#rd*N zr6AIWeKL@FBe6#|>(&-xUTu=)gJ{lt+Hp@~JtZapisAFiLw3 zT!_PDN!Si73EzsRZQNIR+Y7ybj$T+mRPI4~dd+C^&#bq{RMVRYK2fFdD1y`O=4$q( z;sLSelF*C6Ovk9#S19gK?yfHL6?|tZbDYbkM=>+h=_oQ-ymxd68+vLue&6{)zDnMv zr|$g+L+kq@K9ab@VFfFnv`O>t=xN&*UKS>w*un4!5&!0?zZ+pEoP`B?Ts{6O+_0;A z;`N|;t#UwPKa?j~pd-?Ged6Gt>yC%TIwCE}>0oU0 z$UJO&Yz-=uWpK>syM=%u|!pDvUb&ft5a9*Zw@G$2uK2&~K+xbsE5_+M433PBIe z2YO9D#usIaaGzxvo%CDAHW;lSD24~wn6aZpR%TP1KYbeHbA0C_!o=^`*6Yysu0-*bJf3vF?Im zE#Nz0!cnu1uE_WFJ8-W>f*#8qOByPb7uKXNQimlgC6)Kc^-(BM#L1j{?2>63W9l`i zO!e43i|p+cf32;A>zqXFU=g)0U z?85A;Jv1NIvRq9&oY=k170NF>3=5l8VLPl%-ShlTbN0DysMC2WJ(RV6U$iGvjV11S zj*GX$Rcx;TM8LIr+$we!Hq)UfxYf$DrbuN+mceOh>G6V}RBc?$lkh_OD+jSf{}mp* zF6qR-=RQDPEC4N#9>co!<8CrV!^B9*x5%57ulvC&3cm;i8E-tA8;MqTWYqMyue4+S zRn9hwJePUU)Qkd~bDhgw!pBTFg21?|!napl;pwsuE#0ey>FrMtq*IhvCV*4WUAm7$ z`mm?D8nif62+1IEeEz0E!S?P49|cm`qPmkh*7ib{-`IsPmSU`(rrZ~Jw1!D!<>8k3 z90W^4;}Z2(Tcc>#lTAO58n5~@SG`&nB%3Bm0HI0`wm7~y-o>* z3nhj$*v#JKkKaD>r55|uFf%c(u_&D8iApuQ@Xo5hTdy@KHp=1?>_c_ z@c0vQ4C`LA?zOIU#d)5Wm8v_%>wGZ7HeMlEw%FVycjy~f(rhpihBN@Qqa5tQew!ZP za>*KBb+P~m-^NSz3`U$Sxr?M_WjP~=RB>K1lxj?uv*z%h3A+E8APMf=QD(Zv+O8WC{@=%nJ)MLtW~&jQ>cH{Ixdfy0l!$aH1Fq7X;00F%`I zB8xVgvF2el|Ei$-kDwSYolNYVCD4iy$R>QRM(5RlB=j*8Uj1W{#o6l)E>6Mw?hN++ z0o~!x1|)uG&{w3cI62Gvyaca>ey2zevK665O6k_tA%xr>w~}@ zUyGOZ`8PGu8;5Eq2@Qd;xhW1D_ZJZ-Dr%b104^z7^t1G+Obi((aP$N7CxQfGQ;0hW z{zl_k@p@dgAB*OtnyXa!*(kmxhT`pi82-r+nAI?Kf(-o(K}v{PmB!rAis9L!7n|JX z$O(NR0`(|dSVY+g6r;g;UFl)4SpSkR)!9CLNqoZ7L2S7MWP{wHwnm!tXCB?O#8HG7 zpTJp5}})XJKfpoO>i(Tb+ES zAKrk*cI)WU-P6MjY3YUuyq3-bm-n|ig@IxS7_dY^X|4lpO@swdk6+(gCJ~1U+it~( zH1wHMizg*>nV`Ak`ZQGC{Y9_f1) zm^7RSj)woW-Z?;c<@3fyiM|*0c>dYjPDMhR$_@aMkys?8un`V;O^=ulS6iooBTO;9 z0U++iA-xAlvlT@r$csFfOc+w>Z8c^G!dJT+*~wG7@7Z0=U${1i^gz94VBB z6)7WBiL4TS2*O570lAh=lW(^;f>cnoeO?1aK&}@^!@<0FaNHEVqWH91+%jY*;RMhv zUp{TVj}-F2!a6WdpHFzMKYszr?{``E+TOG(nxgOZbmyNi0C`m8#yXEeLtbHZQZ-9y z)Z7!u1LOqFin~~vbdu{q|0Io1)SuyV{#$h#DXfU7D$xRAgxrJR;-W?%iNI>&fbwoK ztv>^E&x*GOvl@+p_!G(AuiuAV{h~Bk0Beb??W4W7HZ%ZLTz+Mc;Y(L6SUQ#}f5C?i zdkv-1^t19>dm#2%UzxXRrxzO$*(XdY<7u#uw!sQnPhOB{;Sj%;C0Uh02{22j;4(! zui?kWk=#RvN%ZV>8UZM$Qxc&83?Hx;ckY6sl)%NE<-qAn^GfI|aeau$%s5TK^lc=% z(iG%5v&pp{ZM1LY`LDeLSJ)r4=5q)+@b{>6&>-_~0^rF;&YdHD0!W7*&VvLXbPNt* zC}1YLk8U2Oq{aBWBlEiG_Ktv>+w2%WOI z*AxROy1?+{lmZz@0ahoAC`~93=-^SmDWL%hYQ>9(_r~}6Gko&BfALApmGUDYmv)Nq z7fHqNSwNE&XTMZdB8j~H72^o&kHXP0K4EY-8uA-;0dPMuxG+uvE1FgQbU%_Mosmh% zPXIWWfseiK*42Q`1U#coW^zqOzTZcnI*v&5&2T6)7>g7YgEXi_-SZtBi|v23WnBoL z4wrpWxgT=*5sa11woqpMdnKs8fX_81Y7H(M!eg}g&va7(GGzV5 zxB0k7qh7Krf@{wCG9YYk#vJ9EiDdfH0re&gWE&@gtGy_Ri3F>4=Kv8!)i8Q>g9P2y z8iFi2>{6FUZ9k-l0CEdnM$3}&SX8{I=-t#tG)BpW6d#-AK%x;$5P1$sAfEgS6Cv`~ zMDN}!$og0NRAJ%Z*3@vCG~z-m{Qfh82-n3lOZs!-W( z#?J0oDef3YiqW8e>E*?Kp{*gor{Q)T1&T>PjfTFhO=->)bFZLh>xA(<7Gjh}3aImJ zQr)O8+0z@;0S|j3n0*CJp8d2V?obwKCSviBSgIpz1{bEgw9(P+cLyrzPy5RB#9Nnk-G+I6gJ9R&(G+GN@f2?$UcD?=w94>=7+b7Snr2OoGwk!^i zL}6^0nS50e2hf$Ad+-r~^&vT*7{K~4mQAejc$AP#3|!%HB-*2@SOuN=(H{{-Qo^%8 zMF{k@0pTcXJ1!Q}w-UfB9bde1j3J|yK^phiTCy`-Dq!51iJuM+GIIWG&In`wtOwUDVSQH51A77l~zm7{W=^ zb21kW#z0>Z=@1!y=9D+=PoZOk{&WP=#4AwwOf)7Cs8?Zy6Cou=02**L}ymicFK9K}? zFCeD?pNoF>Jt#63ye8w0Oo$Fo$bc{qNy%s}@^f}O*}}N#s-Kn_`WsJ$RepoM!B1NVMLM}kxeb8^y#bT0F0gvOvvNXe5GwSTelL6R@UVo;EbC{ges39oxEM>qJ7|^N0HP6i z#2gH+jtUs!DoTUyDFP2ii#KPIOJEOf@T`^XEVH4uNDD&eT!I#DWqk zjW1F%;M^wP%JAJ9d2mwLC*YFtF2Fw-+?Ebbn|ONffl_;LwU5{fcYK98kbqm3_VW7! zOZ^B3HK>+k5UFO62{jrYn}^QUO@5eD*@_fhfr8dn{8rfsWm`aphihi{h$}|~Xh4Ir zIau6i0}6KitR9alqb4Vw#O)vD3t*^22iLLPhMCEw+VQuoa z!gmA0CE;VHK}d@Ai@p#^ko`2?jp6WEZsyJFS)wy0huwTzIX}yRRr^{G(eM`j8qFf^ zazFkA@&tSsdj-7_1+wzE5Un28xKJ_SUvxlPguefqW~Rr+aS)OO>fZsG<*%Ju+6xi7 z2cx#h4Kv|%eGWt$J%{@;CV>+41^dezvZdvfU*#L<054#RbP16|(n2=4QwD@-AN&kq z*bA#n&ivv;Gr@3x10u?S7D%E-oSk|%H2D+!1c1LM%~Fwe5$&~#Xw>=3z#=wY|97GF zy3=f?zJhn3kZ42z3_@B}?$|&sWEN2r`UBWh`m?J$P8YBJlO9mefJ|uj z_M*ROrzPR8Ntu%774tI(6woJls(YC32p>Y+hw+I_*r67HE~tt5IKHIfRJkxaUD&g? z5V`LZ0BkTgcGmP2Ni@nUeX&J-OPEFIHmTO(erkv3Tl|BmTk{}^F@k*OPdwWRy|Kf{ zpBDDy?EO{Hz6YALS;nMj^bCgIvu!HVs;e^-MPA%UNEbI0Xaunc-#5Ze8PH8ETAu5V zo&wrT-OXyRWRU~nNzy5nnyY@1XFnvvWqqDR2d@-#nwa<{ZD-I~8V+PoAI z99xfq14)mIPt{!8W6JRrK-C3`h-P*`cT*sOztRZ`6lOm%zv&KMNg?X8R`9wTv@~y% zzZ9qAA}d!8##CkBm^m~M&VDg`&h-$gk1TatC4CTW0t}r}3X>}IU2y=~F|+|xT8a78 z5=i*Tql)!KjLSeFg@ZSGet*QaU4hh9RH|OPunW1?R@`s3&@DAu%eQb6Z~{r`Xo4rz z(o4CN>c40nJ^*fDKS%*z>a!z7B~W63E9AY5#o)4{`<^3ROW506N*v&<9?WqeWe@Wk zOSPxN`j&QI5uGs$AXB25Bp-*y#YH)#wz^FGvxIb;KbM2MOF_wvDGW;Zv z&*iuaG|e%&aQ*r=5r3}GbR|DkQ#E#D@2q39J8-4UHKV=+mE;(IGPr;dDtqNodg)Ac zy5MP#)VAC)m->o^_w*+GRjx|kGE$-5-BByU`DDGw2c#954p%!nJG@Jlfke{Y5{IOe zWCHEzI)G*XwO!M!YvP_IhoWw2HQ5?uj;fMtwXX{9)@!P|8lEAT0|z0hhcBPg#nYUQ z9LprsBNM~V`cx4L+&}1){L1R024W(u`B^BVKs>73fm)ZD~+14QG&a2Mgn4Z%t3KGt}mfkx%hU9*Pw{X~s!4V?J{RY%A12CTzmxC4v z_=Qbgd>EH!SG<6q-yZP=S1lA;_58yFX~kFo$Js{0anzWL7mmadwVN_~b9r2bs_#`E zcNCpEo=zy($g-Ib10mvAfhdCkA2at;?Mk32%wXdz(?MAG$!yq^O6 zk)fA^{i4dTGK}@FTqfx8;WJfVu|PYk40{5ySrZ2(&GozGHDiLsgdo|>wbA|tJ88il zAka&dk?OYgNVtrmtQZwqjBnwbe9%V5|o?uum_dqAO|9N z-=)(k%@q{J8GsNY*v5K#yekW1BGPkdz{ zSg!A=DZ`@J%&*V1r{h#~d2O(A;_z|^Z;%AllC(lt^7$#{byU>E(sr}K4rcg!ocH1Y zweLVf`m-w~>87Dh5H`5^{r+v=a?Xm^I`PZcSYx8&V!9R4ANn0tX@nU0Z6=0zvgHQL zWV&RS*aIEW?k4rX0_YX3u>wS;Riz*Cy7SG%G*%Fi>s7GnT-1kWCJuy~&SuUC3YSQg zn%=~p?p)M4IX^q}+4b7J_Pg0kJiDGXuwFNuKQhCuX;LDx$alr*T67Lf^9V$h&6~z- zixRdOvb0Fs_j#sf0Q&E2tX%8IqzU*4=wIa66!g7WEZIOKIaW*6SK=FF)ndc42rYXj zs)KqoQ);SMYlP!IYZV`esyX4;*i6icu@y4;W2WSIDeOKX^cIX5I`ew$9FwRXZN3-r z!%-@kz2G>|5)+KJeW^rrWu>J!a-Z}(O+fR!!IA6o%vx9xqLGi{U%-!@BxXFi;;i#YshWE3ik z?2z}}*s$^}hp`&3dGL2`s`Be9kQ1{E?B)VDzuxZFA}(HY8wO5eN_5X030a7b>$sCy#qRT*=6ytWW#C zkqBpTUAf3qEhuaYf=3 ze28GIm#<^mHty`N$R()rxyMAY+FzLn*HIC-<3qptix|4Rd)PlNf>(e2f*cD1?WHvy z?&AlYyZR;MdLHMv6EnFWFhss7rnNQr9=}3^_Tr2==8HTl&bt7sD((+Ygb=f*`mNlq z&&!05QrTe|+|FhptxZRlX8_~87W?hxyYFq6nwN0w{b0oFu;b100|1;TR4QOiIPSPz ze_gT|o3?}nAtc_*ebXu^iglxXYL8fH!>0iq#F72g(d}6f!yiy*1hlxe#{=?DO=CMZ zP=T{?>?o|$lB|2)lq4h+o4Vowi6hpwXi-|efG3Pr23^0Do71YNyY)$V?o34_OI-22 z`WME8Zlw!!L9~ycv3+)m$Iu4TUMOJ&-3V?kK~?XBu-RKgB%B`_-#YLCbMc(rs>b9!4!yI2mf3`~~Ur><4GX-+X$x_OW66(C~9or7X{=sFcUDrJg!{ zjIRRIyd8C_r8$i1(;XjSx_iSm^S8N$BhAplaX(oV_mM4g*%__D;TJbR~(=-@8m4%yg$SNC!nHk0^s*a}S@*|^z} zS%hxRin{#PH7QL!Ef*?`r*|>PW#l{&qA`-qbon}UX6v0AqC>d#>`s;Uz)$aY_~T{J zt)(aI9E@7}#=R8czTYQMd8yVh@9W)a_`Xhj%7&VM%q<7UGMwRl-J*2IBHtPf1G(eQ zZi|R;C~wdd+xjWFUn*0hfxcW!>08_qIao@yFKw)XRM102O`!S5LnNyYX1{rXx&1mD zs0cy*>GXN59mtVA?dv>8?NwY%a!uuX9MSBW%?7QxD_Qzp9A^lBZ1@$*<$4HzGF%Qk z{!lfRMaXe&Nja{G(8?4XyhQpk*?BgDoC{6n+11eFm^vZa?UiNl{x%?~`}lH#vGUWb z3ET~3pnR_H$Q-$XuDzM}T;-H6&ym0I$Q1 zcOiS6)c$51bf@cRQHfNx(>cY}Ik{Y4yV7%hYp!(QVZ5^V~X% zn4+p3QM;JrY#S&8p)w-z<1cgxLi1fp4$GmkyhwD9!@DuZ1#~*bUo3w}r49vdQsszJXqC>`x5xzCdj1h!|~{+)->t zgt+j8FPcqI%|jS)m@47DymY>usf`_a4Xi7O&)bq0@*K-p5N_fsR$r1g8(ZC1od=zb zRX^zQek9Lb zLruX&!EGf*Y#tB)j6afN9`BQ}n@r?<{VrHnY)WPbtZ@X;XN=QrE}Iin9HWn#;G~u14(&~EV~A+1qs~rGk@8ZN_;_g? zu=lp1Y!E`V(Q4cCBS#o+ml-P6L?EIkoVgRSL)JU-7_c`w=(K7~W<>WcFV*)=biP4j6`gp_r`C#Q3@ zm%CXCS!_!wCM8d_=6KUy^YWoQsrO3yV2{X*DSl$=!VgiylitMfweXqz*z2Oo53Mh) zFQ>r%Eelb@0h<)^K`oSZuZ30vv0!TE%1%zfPbutjgFA`a7deoJF3(h4WWw}peRVBB z@{Q*~k?)jY)`}%7Yzw**Ub`+dxgg#_ftT!hzuqfZBGu{FHs+9DGcl{;46I3gq)7OD#=VFZx|QUz1yU<$f*w&?wA;HeNxyjV*P0D zTyFIA-YnNdc0IgM(Kr-{!VU_4{z0=xP>ta+Q6JeDEosM(w)7D6UR2{{CF>H}R|un{ zhN)$zr7b*J@-Y{dA(Z)oI(RZWJGrGG`z4k~W^wRi0QFXd^65kQ&kX%su)g+bX|F5V zA+BBB*|i@c&xi(-6z)Ozku5$y_Jp>52&zcliBmdgDBpkKUeoFv=>3@G!(%k>tNjWt zg3;fOAJyilJt4OA_%qLIkY3gZ|$Wm=rr?_bL;Ul)c98=bN#e9i-Qw0MMnT27*%^Yxy?v`KsOz|K5gxwaa* zyn*Pv*@k8>TQNNA=`?GB!yHo)Tp?MYiSEL)liWWZpQ#!S>-}Os${VH=&m6&Lw}I71 zd0JZ}&p$L`iN0k{j^;<2Jtz>$Sh=U$>1Os1y^nUpiJUoX zL(B5-fQ5V_>KyV4Ewg}|{XjolUp|}godfxL)RfOtTSlAEQRk@?xpXQByTkj7gi#%) zDyoX|`W5O{7bbyoDN7%nQ99A|Hi(5&QB=!UFC0 z_%U!hamI+lFh(!amyymo7L2E^OZQ9buRzUVq?=z~UpQk7)2nc9lNeXuBJUPnoV>DE zazjjJcV~ZolunG7^G=i^fcUzdY`+88u9s=R(VKHy-1#;cskfUw?je0DGQfu??7jLW zI%|O+O&yq}Yz&=3?eJNTEV{DeiO`Z?79y0DG}rEI=Vk3iHK6BtDDz%ol|t2%*P(IA zdc1A{f&@#UaRddlT-dPss?YOwv%cnKYg`ct{N-(s=4{2_8`3c0;M$)?Igbju2(`iZ z@Q#79M0Bhc6_K=y`I5xCu{^Q|dlA%?!BRHUGdNa!fJX^a|8%veed$0TPE$~OboYLT z)T7cbgiuos`?aNh^8s2yYKk6Qgfj(y^}Mm2lkH9+Gzr**p@;631x0Y1zEmuWD-x-F z=*ZU2J&WgNz{Io?ON+=+k58&y2AR$@U03Z52JZ^IK5gzkWc#^+mie{$$%v|zY*WM4 zQOMrS!q%<3u`-yV-TFVP) zBSamBjpjPU>*oDhTznQnaqdCl2$T`IuP5^GkKA}&0*>#>tC_pDT`a_-H@HkT+Vc?k zfxrH!G`F*DFrS4u93IeyA#Ifv^)ZU1$?xe1Q4`d`34>E>t*L@+&k33B+xxry*Hu*& z??nI3xAm&yLQjn4*;62M!m=}W+|c0n)LPi|VsPa(+yH%8lE#LvKEoxQm)I#z2DPFk zEs;7*M353pnLSz-m%8qvctI(3@Mo2wPcNUmA_P`VDKxQovL|8|S*Q zHH=a4_aW#w?;>Z05OGLKeFqviL+`aUz0kWnIug=KP17<)eVz>;`5^Ib6AGk^lG?ObVNSFKn$TzJwHV}y&GcWzT}P=k(|UC zeGt6vHrP<>;5RnCxOy-y^pmd5u%;U>RzdLO~> z6is^Y{aD`U z*;d+ArYJ_NInSpkrHer<$PdbF7bN6XgZT^>R=37;{VKk3UV5f5TsVg2JI;#Rwp z15c(C1k!-aE_7sP7n|{}WK&k)iz?Qbz!CX^6MUL5ER2eHINWr3|0niDLCr_|Qyw=K3xkUPZ;UBBG%WtA||P|^22 z#zC`~@W}AX;VY?y940lq-im!86bLuIH0sH$@9OtG>g)Nym(S0|c^6HZ8)x*2g%C^!rBZp>%iHYlPdb1+8ObBUEAU(L!ttU6fg<;Kot+)x*ly?`( z3j5k|h2sf-vHw`GCc~T{TQ?Jel`eC@4L6C^q+9lybE<9#T3b z;?3es>#LjUstBS&QDX*3?k8UbimSsPMfq(4mh1-N^HJ2oL_6hsSMoJsqaYn52jeR= zr0_jvCk{%BqT?Wrx5gM7S-hGr@|&XCnetR^G>gB2ccF)a=$kZe-U_mc8 zZ&7k*qO3NU7(}clu(0@E%!Y#%!g-?x3yCUg9S64}%5ZVdooV04m(xR(L=8m_)b{)k zKhE^M!&AI#Ib#T{+5yqWSvUXo>A9JPwvVrapMxS!Q`zT<0Rf!R1Aej*%C*#y%@le6 zTd)r3AV;VL&$8XmtFOfm3vYjHencWA>C0E>;|jci{t>|s;?l2ZL}|ETl}05GFPku-*b5zvevb?4kYC*g9 zn5s@ZyD4IS^$pp4tgo*C8dwZx*x7#jwn&Y)MT)1_%v!^!!OyCf3bvb+_pDA&1KLlH zgMv?;!BrS|5Hw$_vJCtemRZzW-7jdi5yyHC7u|jr_^hScH;e^R`yji}tl%)3$TKza z9IS&RdcO8EzCsCHOi>4cra-wBzKx0>HGPe}jRR>3xoeAR8#*eUk5C{AMACjk*Gu2R znw9ix9{%LNJ}u89Y9HACaijALM@q~#jP6$@#rFN=sN-NW-Sg%XL><`AD`s$ zja&7gP%UOA|FY{TwzY=-&oXN%&%=X+sLEiZTZ(8bN1kv6d~KX;|GLwS!)DI*v|N$n zgN5y!Pgza{A!*9lHsjP0X_|syF{1O}AD%W{brNDOHlV)zJ$Uv~g6&?x`=RE+op6hm z{uF*Eu?Z->!ste*RkBN~g|5MEx(qt%`>yu_kNRwbs!#4Y#AyIOBR_FyM+`mF=vqTx2V(8P6oB(!_LYn%Gxt$VMy%|;{Cpr#O0KAJ+8)t@i^b5kQ z$dLD3#@RD=AUuOpv&FBhQgDiRG!4k(D$?q@= zvx&2A|E;|YjOeZ6P_?aZ1^4wPnpACpgbE@u69z>K$Iz#cQmYp%l-RFGV8^y_7-Z$p z*5*t6^7Qv{Vtjq^yz%bZ*9;}f9qy}g+X)V^lTjd0G#**-g;WN2)MQ{{ouMX&Ec75qgFAqg$zj5Eca4(>T8Caem2w+(@={ z=(m1|#aA)U9|vkBXdkb?84y^OxX+BGPcGV`3r^l?irrV+P~Y<4sE8sLQkK zepz;|-6#E&KwNTOq=?@ANTLH|9h4GUG0B2$^Sl6{~pQzJ|q8Hl7H>? z-!t;>b@T7#{(t*ZDF6B-e~-rBW%<_w{Ohm&$36ddll;3${sw~oMmK*A#oy@c-&pcL mhT`8%0@4-#AAfuA2IbkU7S~TVG_tqAKN$(d7lmTRZ~qU)h7JY* literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/favicons/favicon-file.ico b/server/jupyter_server/static/favicons/favicon-file.ico new file mode 100644 index 0000000000000000000000000000000000000000..8167018cd005ff4a24d8287c620539e23c69aac9 GIT binary patch literal 1150 zcmeHGu?@m75Ih7usc1=q#Ar}3g9{1u5G9Vze%<}~9qSED z_*E4+*VxQ}1%Nd{icIFY0Mu3#9X}t*S|qcPXdmjkckEEPihH)7%2nLouWvxzmEyD; z_Z}H)ul%aFKJ%B}O15MDW@wjZiljJOvMuwx&zB)}Yvgo9FK{i_b#L8MN~r!Z`{GSC cvToVotH@v7c?w*vZ?LC+B96p?o;l#&8_znXx&QzG literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/favicons/favicon-notebook.ico b/server/jupyter_server/static/favicons/favicon-notebook.ico new file mode 100644 index 0000000000000000000000000000000000000000..4537e2d989843ae1a96a0548aa0a7066a22e2698 GIT binary patch literal 1150 zcmd6kJxT;Y5QX2WpqU$)xPggaBK}}1>McBhsCWizK|@m`1#e)g;lhS0Vx(Z8XsF1B zf@t|QGc63$HWTg)-xU2`y`G|5YVo$);ya`5F=+Oz;Y!U7Un(3%^tjC*nw^$zt$lCLQe;7 z68RSTXM;T!>A8mbS(zU-$nVo*u$!1+qs)I$$1A@12wJ$KD_DZdiXRr{6L1H)bNs;% z_xMoD7kliL`IDM&zEXNy)YY0_vYhz#zuJz@P!dKp~(AL>x#lFaYILfs!CRtU&#LehmNA|NqHQ z|NlP@`~yXO{r??6b?qo(q;Ts0O91s>q0kHwp#F0dnjr+#{|<-?NpU`0ZTkWVxAGzYSw`z#PH=!+V$+80G`{{Xl#ih#x~Shz$}0 J$-&gZXaKG5Ui|<7 literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/favicons/favicon.ico b/server/jupyter_server/static/favicons/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..00ac191e771f9df4dc853aae6bcb85f89f94f794 GIT binary patch literal 61209 zcmeFa^^VWGy;+miU_ELG*T<5h!PSa z&7w%h(hcA9x)wjreZT*J=coGzU$Sz=oO8|`Gjq(GYnZOKDg_x684(c?g}R#ZH6kLU zKlF$6DEOCG<;$nQU&LNZ!d5S;-(lM)e!+7lt6OTZr{@P~-# zNER{C5%4!L^lw>+zrRKLXC3+feugeAE_!G{M1&?%SH7U{Njx`)bg1QnB z`IAfaQ7{oP0!d2Ejt>0$1G_AycZ!pS`hWjD5POz5n2iPV_qx9~Q0E6W>bwl2XZYV6 z&||7>|9CFpdgU`&cv|~g!I%Hu{`bYYo1Mr0B?olD6Egl?*Z6*7cu}{I8Gv=aT;+$zS0854Zm(O#T;c z{tNE^2F(9*lK)M~{>uRWOI-i@$bb3vfBE)*`8I@*|D{s@|E5ykS?x_5ISIDM$&ftd zKpt^SuJk~*@z?X{V(uL;?{$j+TZgIXFZ&KIs9`;SRvD(o`bVmb^_Nr|GajkN>fYaJp*_|VzZ0YUmQRAP4t5^m;LL;)VFUQ1E*gU=>@B`^Q^)9C9-nFA zUgfQ;9w(>nypfG-uc#0^c}A{K|M>Pm$9hPSQvF6>hl^7`8zpP|YgL9rt)M?3VQ!}W z6P=KKswRw|w-~EDFL-!PdmS0OY`YQarXqJqQ&Us#l42~qAM5^f20mp`fqsANm`-S> zl3!l_{N1~MAC?0+k*6B{F$C5o?+rFQ_{KZiE%fG}m7_7kjn{uhwEZ4ejbWu2-Q23b zeL1Vad%UICx{~v|yfljj0lRx{pvQ9CvD!L>kF6rHSDI)G>n-_f@BdJf4tU|S&f@m| zkFKQGcO~D0e0)pM!f^|KM;C3A{2PS&3H=oaAz*7U2Y<{`M>GvPjYSEDlpd%wDqAq? zQ7V>LTWo%8lDlY6r)z)Cp8PP6V5b82d3|{(3DZ*;W#sz-<`HRj`jU&q+%jW(aPy{8##RXGP8EB%kH07L>^Ru;>~s4**P&-t%S%|@ z44>skoju3&6!!oD)d7ZX{!022 z!YzXMSKm|`kC~TqN3NFfIJmyXcAT1(HQ4p)U&iRo-;q1izU2te{tB-8JP4t%6F)qIEE=_7u2F> z|MVEoB-4j$PO4Q)%kPSd8@P5m0uD-D@x@*Nuh&YI2=8^hL(Ju`?PNtoc$g&v=33_@ zSCQhSPFsdd4TFZc-4FP8KcAgA`rcRg&Bt$whbDF(QGZi^(%OY^7w!h8y&@66ZzzIt zNL)Ral8dEJ8RILG6VzHwIc=YjK*>i?rZ%p#}N7e8B@7lkdk`YCfj zP4>`^T}{zrlT|^Phmu}K9B8hu77AV&H+}NM^ue{=l!(;ZgxmnrkEHXriGP zTzS&*;L`@t8D%5k17r!|N^EVsZ6XTm!xsb!%gDLy6siA#~>GqY3U?fygoLaIxXiQSljc@VRCCMh%x55z_dI;y3-g z1K(d02m|-^m#!!Ppw?dpM_r}!(MjL1Oy>Qketxn6i|C15f8sd!Wj3{yp~;vbxN z?V{?iiF=b66sEW${Pbo?7-1WdIkWKMBu$!!Msf-hWxEz)Xl|#SBEKFAjI*xDh`EF* z_Ais}szGdVN?YCP7<_$fBHDKSWRZo>Qe)Xe5u>ss;ThCH4Kuxv!y&Y%a%JJWnLP}7 zV2iA#uR5RBy7hLfeW*gGDdR-dLf4q?cE{iC=I6`8>uEO=94$5(+x4W~b%jZ1>5Rs} zK~n@n&^LyZxoZBcE^YYT-;aa9WRq9J3meCmd~84~ue-?bjI}@rTEKA8%gerGvf`G6 zksxddM3@kA{#}V@LnH*lP$Yqr{#L0}G&8FE^$im3S(lD*C0?Hl`Y%TbOREB~@{0w% zNw5oUU8dhFi$LK5x)$UKZ0=Ubu8XSbk zkc#5p?TPFPgS`lSmc-dehK7N{ovw7rSm8?6o4WJ0lk^cyu9Zce-IlckoJNsv8pOIT z2W!L9c@LO?z~bZUqor_j$x_v``Fcc5+aR5u>8(RlpWp>HASIf2^xmjIk<(0Z91g3u}ep{{S$j|pbr`R(-yG8F3Vjjq#*yLUci z23-@;houk`04h-*BOxHqCCj#78Ll6cJ(xa(-@ArhvW;sbCkUGWguOP+KtL$cv5@N- z<4^A>>F%nZU-<>aG)w{so&UA=`abA=>@t|HT1D2Sbkl)c64NvLyP1X}&nnta!;eC< zn*+6ax(O0sz%>q=CUTmxp#OHgusyly{e4Smm$%|h$WFpKmBBJ-)XT&O%kWs&&vg)G zJ$eCWw6r)9MazA-SR{0nS_w7bDs`M;B300b1<8}hXjJXgGmL8sBHnkFlCU)oz}x%8 zg%n|7@NcmWqGtzGZIeatiy00*9quO<3HIj;40q{|zA}6hjvHgml|HuosgAFplG6#Z zX1gAIKEkI2FZe{6sxh#@SP(BYtz*SYIk{S+j(FlkhDj* zm;>os#>jbvNgejt*R=c!Ewk(8Dm(hQHj6x$(#O`4C?6f>9Go<|n_>JZ7YXj$Oal3< z>$YX{Ql&plo)HejmWzhdiWd$eG<7hrQ964%B)o#nV6=yiCjZ{DGlv)>rh8kikwQ>) z;$Gh0Ak7S#lH+0msmibz6I{C6IessS{1kIt9e*LD3RubOa&}0B;S~j<$4Go%Fe(Nk z2y!)0Pt7_~M99k}9*~y5%2olM!073CNHdlO_{`l(fq(c~KA&RNXJ=yj|Yu zPKdJK<-xv8^w*q&7nzrjw!b)8ubu68Cp>ktWh7RC#ktlnQ=|4YDIA2!L_nm$Bk#a3 zqUJ|wt(FH%6^ae0y5tq0btFHmu*`mT@VRDth+#~it=p~Z%t-jScB0B}-gC-%T8;pFF*(=3n|4Apd+DusC*8G~>FeF2~kS*mm!;c_0#mD8UuO()$W$9R*Q zdM%)^N=L^T(9$E+#RtE04`>k9&k7 z2zDwGI*@Kyc1aL&^#r7(Z9`C!|EWr9y9De?(pu;@hWk=8_X;l^Mo2V>kVhYDUw}U{ z34cMek6noQaV*|opg2AHCPBSPyx)}=JVyD#({{#f7wn7EvLf|9w5 zTkP0ap0^+b8PcblB`a~Eq=t^`6!OOlxV0jxHCwb^wDsOLjdU$>!dDJ3DL@;$PM%~R8pO7*F3Ifb3 z`Ez9KAu6V;mt&dBwfX(h_ujb7bvqRNN_JNe|2kTDdkMjR2vug&U^-eiQgFUnQ|lLn zv253=!a6JIyCm=i6JdM-&W+0{8LPs21-L%b_`KM$EaE6;2sKpkK*;^%dVEc-9bw=> z8rZ#?X`>FT+gLqJ^j!CL`~BN8PU$u|B2fKk+R&Jn*|US$gx1L30PJC_Eh>?KjJFX* zHSW?vtTQh!uva3aide}N=6QEooE}O~Ar#Z&HLC+5sj~PHb^YUtQblJgb*?Ty^_B~LKe+m{NA6JOe<#n>&b7W!A^4nK z_asKjnyY8EzwxE!C3tlw0AeW92y4Lxl8Dtcbntv5qZj^EqJYA$zreiGHpZj!Fhp#D z%(_KKq$~)II$C52&Py|k=LT#+glHr+O;QMU#(3qE#fwrvUW(ZSPWP+h9VslWCE|CT z({qXZ(qQxnlJ^59P3g1pVTF&YyKSKNJ_dOsUwdtMIP_pLA=lY~rSI`q<{UY6^)ZGP z@^tMdXZY`|l&8V(Rp!B&SnE&I9fu5s&djnC?ZZEO<~E0}6=uCR(X}Haj59Rr5OkQp z;_R*oP{S{|kmVks1(4_X^T!-_F>l)0NGGH3^A-|(gZ;ZN1f7#;azX$;izy9}YkVWZ z^x^_L8KyTe(REQJ+y8~yD|YDjS>J(81Z!u0hT%mE;bB|(Il=kKJL<=mai1oS2SqTt zH`7EDVmX=vLZ1}LC^!-7L$;ruB{LLedn_!pL`{0op2t>4fCdQ=({uB8MF`ny#)-b# zyYgjLm7k4BKk^ojy{#|yzN_PSGb|KV0-XxDSS-Iw7hc(QT-TgIa+mN~2E;p%Rm|}9 z_7GA7GIoHIf!y|-kgh%oLswBtixl(L zki2pij0{H4Av}sUf{Oaw2no@bTmT@7D=)4?022E+uv0ON+c>#7V_{8@SL`P(A%Zu7 ze94}R5KBlUjKF$7;6w=it$}Fp9l^dcv9P07yMhC*`+QjHdry~$ zIrJR67lARuyuWHBpw+w-(0%{oPsZAB`=g>W<0Og4?}yBauxS6yqwJu>HT$I*L8$be}JSIU9|BPG=q41iQtW&5y5p= zEPNCJPX!WZoZufGxVJNKmKjNEh2T<{9|JIO)Hl7yLfhNi$Oa89@F+tUhMHu)yO>Zd8bU@MPMTxq_uZHshwPOY9fR!(jIlNJndp_0&y8le%z+A zB>CHTRY)fC75S8cG?8fs3akC& z7LbGhkqW|A&$AvBgpq-0Qja1ws`>r`L6FS7ruF<#vJa&P_mA}xwvaj*AcU;}(`k6= z*db&)%j|JO3o+@+M=X~E(S5a@d2GW3Ea}kzIy8uDCuAKN^i{S^%64teH{MaRi%DVb zgZhNRcD;Sxl2GdsBf3Ele|6WBaQOtE>(avo*FV)qR^-C6QPiY4h}VZ^lH35I@-9U> zVW+gnJek+HgBg(%JjJ)J>Ug%u?5)U#K7toRgrNq#asscY%Z2&IxDhldvKDbCN;*9R z`%b;GZVZ9XaM^XYg743%h>Zm76W2Z2NWo2{B;Zo?XcOQfUdZo`96ms zcOEI3@q^Jo^z138dFAvltT@8s?dc9)&D=u^ZG}`-crTh@fqKZ+d6?ZfPiG_vWe%Qs zA-3*k;9ZJM1eR~S#UKp9Mb8D?SGS!e9&(=LTaP*9nA#Tm@ncU7Aa>N#6 z2Y*HeFFeThr&&N;gwq8X$d*sE-z3ltQnHh1dLInrVV2VhP>n1->3U4+s$%xCgijx@UmooLnHqI2O$8b;6Vubog;Q}qx} zu-ZQ$6h1ZSCm=dApt!|+0I<9bhVEUpuJj%Tg!%!K=wdkR6aOq6v+NPTu~9i;HlUJ{ zP{pnFKd$NCRqA8*g_$x;BPdDfM68*#Afs-hVk5galpU@f6!P)i+%4Lki@72=sTN{8*<033a)PgKdIeXnl#W|uj{ zt$Zntv1(@bgKN8;?J^}1{C2bq2&eaCD+qXJOY+%guyIW=a=nv-bX+~FJO>mfcR6*g zbP-CnSUGU@P>%8==w9|1L}eihl>&>RIUi-G8R5=j@a8NSg|U2sig2YF2C zxLFbuugExDJ=UwHK<0I^>^c|frBJcAx{K$2SA7ENBO#Nq0q~#l7*}Rzh3~upu2Sfs$SrQx%2Gju@ebl&AD23rPYwadB1ySM7Ak z6NGBkG{F!gfxgP`5(@vvXdKitq>u3Ce5_0gLLbuET%5^UE^@mQjuz}rz!u6@M7<%| zlQ~^us{LNsd8%h^{>?BV*dCL@#*38)pU5D;Hx{lo6$ z%<@x157;r>Nqos(rTIL8Fd174x+LlB9t9iRqAU;5k%%K^PGj?ik)KYcCKLSPGLV@5 zY(Gr7-(mp4`4VVi6((PzJB@isQfP6O=Ck);oFuh8glR89vK8;Va2d9t6tvH}y62>9 zHnICr#98uH_p*?|pG-d(-J(CKKzb?#{@Ln$8?HgQ+3^q<#yxLGa?*2R%18j)=4vGI zJwx*06to}g>Od9T-6D37D)>?E73D#Ly-f4jQq*b}vQ`gvh9{OaIDXvXhAfK~=t7%l zDG@UCt`k-MpInW;k3P}Ha`MDhq3soma+>xtbq?8-764Y!i$YE!@2ztyEQs#=`4~Z~H5@TcI#yXWf?qn|x@KSa&ur7Bu(R*NB+=8xp z2CJuLoHZr=F|=xCxM0XQ$ZPa$so@SBHvr3cwHLv#Gj)(2wh?A%6HqZ`(ssML%UNuHuK%v4f)M7S837Om4A64-Q_NfIeNf(*^2-FFM1 z?p-pktF7cr?sYTI)G#Snh!}t$^3`-OFVV{B>LH17NE@B8>3o+;1k=9f83ie4QShZu zdGySSo*oiOUa;7=R4)L-o)At?xVfbh9*Rh=!M-AZ9;v<{U@#d* zD!j`En&t>p^F7Fa2Yc*JLgKex&ywh}U5gv3W?hg*fFz=dqh|PxK2Zr|5k#1iz|C%b zd2temjToIm5YYAZXD#be1_BCPlAV1Cl_Hg+o!3LgyvEf?U{7~ z(SSbnY0q`QU&u|ViAMn3`9SN%KCK0C^&Wd4Y1EO zv!>T4?&n=9B@}gX16mbuoVImWl~$4fXn{xCy#kLHUo(S~`8&L>ZI>@WwMkq1B-BOV zN16J4u#~p#X%O~?Np?Czkiu70cSHV6?P-w3wIfl}WXik^etDnT&dDB?CdG8c=$J9c zQh^GO0`!p}9k7N(%kn>@s3fRV_kz+>&;y8^ToQBN)UBqj1(Fv)Ui4icdw80hFf6-P zmSEbW-@SsZAD)W?<59veWc$yA&bC5o(DeYO^q#f`)_#v3ZkxbQt#7|VQ*n{ z-a>YvNCv z9H8a#KL)C~K$DXKz0$P=Yhn-;^n^9rPdo9Q4KJoiH9Ev&w326NDM<-+@KebJb&QfF z#a?QbwDk;}ZJ0|gHy>^|$P4sdI@$|DR;NXF{6TrhX;CSdp(;TPK42r0+Ut*LQe_>w z>)4Pfe^+dG-_Rpb;j;=t95qxr^6Zv)wGj*XT)Dl`>r7+j$W;Y<~Li$Vt7ZHaqDYO(`@<_zG1pC3W&Fooc4LG=*62Zwxr1~Tq<=Bkx~>|pSG{k}>Jh##avQ;Vbx-YH=dZ_bvA zLZ%{N4xDXiqEHRWCful{eagD6Uc0$!PRHsf)^hC*H^`r#j2%r=7@M8m<5SvmXy|u z9K$~sb;|4zU{1@z+PV06&c5zMaxua^8@kIt5dIXnxz8=+I;Lz04TPb{N$Z|rpJb~x z1#BO%QiYyL`&K=a-^j;0a>{>Md95=1sX~5KC;5=)mfgv@<#O3=W%5`c0|`AIb^ZHC zdr7dwYczdjcR9;W&c|Q-`X+o$&tjCmJ|${1|I-P7pSk(rZUvwvfVB5`5OV9xWQ+U0PUPnggwyz@#xfW3$CuYRK(ius z-0N+n({ddnWG`rTQLsZ^EkO?;*u+L6(n>oO84#tY-J}jD3Y~?b4+`ddrnrOsT;Vah z2jnTi=p2LsOTieJA9=dqtORW<`*$&bEFo8GVE$fTu-HZ&?s1fd_A8yhur2hl?Jp1N z^zaP0SXD;*ZP)1Z{u$C{D#G3elQox*%r70~KL%eOzd5^bDQud|LlH4wAbc}Epkd>bJ?utdhi3Rrb;&e1k!?WGFoZ$ld0s{qeM##uyJ4 z>HS_EL@u43eT&MTC+CTmN+%OzPS5Lc7~ZR4X+5JD zkV=1xsXAf8!@pzCXla35}-V&*m-t8bcg%f-k zLA@Mt9bnY3*r{Vo_F46=fYRP@Zlg!Vf?z{@0aBZ~&#|Wi#W@1^MZ`h=kotTf6|xku zZ0vbPieS)>7k4g=13^2cp`z5DdhvZnUgoPY#?{8zjydw#cD#Gz!&f?`PzDqQ^dON> zM+#&K7yh2%$2t7&!P~wIc7A3g8Y^@1n|&ki&9jGEls2Xo@Hv@t0+kU?NNpN?P-DE> zTNvMug6c!!U#5%*ja*N;(L4}`&21P==JR^m?Z4q?^3|y#3abtc&Y*I( z`~gD=^p zszu+XTzMF_;JUVT1B+zhnU{q&O6*G3>Ng#W+)l-A-C7xST&a?;>|9^k=+*NL8&6qo?qpoz zD<32ScnI3>p{MEUY=0AG181P#%`tsKezy$u&&HzqxrT(1Ujco0f4{(y=8wFn4<#!F z+wR5s!DQP8{lN#_GuOrjf8zC5M4iFjN*H7Lw6i~%!5!B|Cx_-z6Bb|OG}#}L{h23x zO&*o=q78bK75+1)YZ`Sjq+=f?+FiVg3@NQ{8%b=tYeVQLM^WPoYEFDDgo)L7M8wg7 zCSc(ipfP|+K~h?~?fBv&KAlu|6Z3h!by!|Vs*jk zaH04LHS|QGy{feB<11JnSrXwBsx`}Nq#N-}GS;6)t#nv-7JB{kdI3FAi?)w0H7Tub zKlBx7T6K-#0Zfx5(@EdX<1%&Y{66j@O4`a@eDM7VeoWLk(_YW{Crg|0I%~H7y|m0) z$vFzakW2O@wuiDnHQL{<2#^H|(v?i};uN3K;*p9=`1WFJkEM)+>)wcdDYj9cYw`xv z$yc%@e)gx&MsBCHHNy-_9)uZA%;~WGf}t8f{#9*1{Gc~y4F+q#;4mdyK%P;y->Zq% zyv%$ib3;yiYJ|D0yX)WY0tjvT^&uL{il7inog5EH^=Ey|aI@$K*-+_C&a#S=^Lhm) zcXk5CKd{{VS?^de{!nCN&(6bhGuMY~z`M;k1g$DL*$?m_?XLY#xCHpeEYx=`3_0KX z#;23(Ql+RuGa^j74^^;^O{EvT-`SbmsZ)-=I~EyHA@Xvk>Fo)-yaqVm{Lt5K5S{`z zzH*xHVgxW)ty-BIxIK$L*7bypD0BRu2&EC)`qkgVBrz9PPJ^KYifWl1k1j&)J;MR) zmFoH=EQ~q^&<#!GiKphpQNKP#B4U*8izTp*v3|SNqa&2!Xv*B+t{qS{RaW@f?E6#5 zXa31GNT^~h-lhFEFME?n#&e7+i8Y+HdHHGP0>y^HogN+Len&03oU2bZnAW;aIvPR* z!!2MJVLYZ)K$&(gv8g;k2cf!JN(vlEk(1YU@%xIg~R*Qq{tQ{3=|6Y2!Tx(utq=I4%$;EsJ< ze#?eA7!k(uw`e5H!CCYVkmBoSTU$We2}X|&Qv>=%mu3kP*8UPv--#U^)ziB>tbGj8 ztWlE;6?U{wv#}anJefX;PvT@W!d^k401uWCtn(aVkpV^PBk6hZilTUYkwM*WHTvFN} z<^mp9OeQHXv?Tjy(^jiizpUsdZUo#kk@o#)s+2!o!*DkvX-{rDocx%>-7}M551_Ux z7}Z-Og4%CrVG`p~C7scfy{7{gv!0>tOMZ9F>j((P*u=a&c%z!3C1CZg7Y(|VMP3;8e!oQyBh{&SJ=)gCI?H^cef0ii zvz8xEBvQ>^>d(tO656(=XWr7Ak03uL3z`HfuL_q_p+}JF^8jz6PfCYu(~=j{Ws&XD zwtvQnLs#4K;5pY=lf{6!bJA4h-dz5r=lZ)P)Ojknc?XqCE}<>{uMdlw-HTu*osq#D z4$aIY8Uay?1r4{ORpo@}F{1&Vwwqhsq1R9zz0yi?^bM3&m&M328YE!g8_btQ8Gz~( z4rN#gkoDR#k3cR0YQda_i%I)TuoxSdrm;%4fve6*RuxYDe*C^OX%pM(ykqCdRts*7 z*%p5@++eyZO92)WM}wb!qW_)*w2LtVG>hg>Q2S;+)>N=avQ8j@*%J71zs((g|z03!d+tDirwE9fUQmbm($ETrrAFz7x20kWVek~?K{aJM@i(RAa(WSgMEEl)7^a*BjC zV8(JY;fb*LfMeUl_EJ#MDf_oVT(00i$jzG#gAU)Q`(aq;1HSc)=oj2%)aP~mMxXt> zsM{{{YgyOutipy1`7uhiD~ve0C#Tok$k@|#Xr>%Nd#iv=$-a&rmcddI$k1=~7WS?i zB%e!Vct?^I8i#>nV6)|@f*SlkcE9<0>Z!`!$rIPLzf`0k&F^>WN;_2Des!gn(1?l$ zcB#$z4lcd5dEmZg`reOz=m|9ogG|TQ1^8B5D`FI@hE0`nZAzNy{*KcKIMhOz4$#rNwh%n3L9;$>wGoh+yD?PV9`*z|Z_3INrPd`}`i7G_*LSANTkk?6xARX}pUbre zqtP4A5!2)2`YQL z@2)EG<$3y?!7!$aQ;V2=_j^6;2duUKui_wGqdaOg6v ztu$M#1QmHv^LkBv?e=LR?^t(nk85FsCSuM&fu5*E1lkEROFhX@wWMoXwIIXw zcbe%HAZxXL<3k{zk0y5<5WBmZlZmvU>#N?-1xMt_XvKf^*!^ja+RlGsWt>}W4cPrV zE8L&UN<^TJW-B1I#R*=hX7D%G$46Piz+^oh1Wt=Fw8D5onTvA*n*y05Ku9MUC^mcMbXR867Z^6}Y<8w5eL! z+81Wdwb(<18<;%|P7`4(n4pFa)cx~YoRTBZ4w6HY?Q6fG$`yr@f4_p>1mRtc|JOw5 z_LAu7weDRZx*`&n8|P_4YW#*p%_qu|f#%BT@#?xJI?!-3pyCfBy^#BHCW`=!M}WVB zSYPgbyIv9G4aZdWw0M@@BQW*t6##m}0lV7|B`sG$;jWx+{sGuR);%xHmekE4@(IlZ_!)Ei>jd|8{JuN)8%@z>wVJOzX2Z(i)@$3j?y9tYd; zgwMzm7^W!#?|q2= zb>1`r$_+q{0$6~X?lizM=uB|i^ju?Q{ta|=auI7Tdo?B1enM^ZVl%nb`Fw_4$`$uPzRhbPnELHaz;5%&tHr+RuZ~M#!a2QJGU>AM8YOX5f>X$Ty zAw(N2#hxk5JQ$%htA&<81_ThWx)BMMz|ZB{1g1j<9Da42Q*LpQ^EK2|*{VAej@HO#^F5#W9J znz&3hq&8oR7;57@VxqQXOr04ANuP1U)K_CzVSKL%T%Fzp`}r~5q7&NBw^(o(r%Bht z1R%1O2@d}xdu)%&QS&rMXeZn0wH16jbA?v?ve4m3)>S31_9xK5Miwhr&6@6g=x`Ms z+EQ9@6HLOX5gWe_Yl9GO0YbRGR^F<0Aw-A#*tfjQ8=h}7F5|p3cbWjw+VtN!53T1T z7*D6YZSV^^%%7Id^W~sffY&D5XV=(z^q-O}}ja<&0OR72v>% z%HH_7v(PLsMiwAM531K5>~O3tCak`~bf@&lZM5Xyz*7t*cNPYCvIT5%@^>I}# zSz_eD^MRgkLmGX65b`yBU)jytflGug9w3kl8nVK=p^=By>(HdFCrE7E6o&4Gk?yWv zl4~sVQ;DPF7;Z=!Uf?mj{6)Whz;n7WL)ZIj5?mDg0^@UGO3j=geE>zJ9O!0%6Pxz4 zwH#vCEhk5s#%L#^Q#9gce*$4pYyn?PwW_tRX`lA^xgCbIi)}siyyTE3FXks(3PfZ1N-qvaD@@!zqy2fgv+EBy*87L?_(b+uW7sn8 z14ZPYE}(^aRsjf*SW(Rv?ANC?w2tRqRtng*-27d%_*Lgo*-K7LU1)SjjG|qS&Sj$q zsh}CWUTsM|B|+l?+?zTbl>P1k^biYe!ys*2CT=<}S|k43NKODhvQrs+Bs505e&dBS zB4D-L>}Eu6F+JZ+8T;uQP3dC;nyovD@$Pm+U`IL47z2zJ z$B1CmcDL4XIkowTtcbLg9lx+sarBX&O`yQ{*E)VHVGt|KkoPGy^%y4j2+NqE;mNi5 zP*XdIkp4&2j&4UvI<;mD0a@Bh;)*7GZD;H88vp|>qPkLFlTg|`t)cP*$11Kj$*>RA z+6b+dou+>+2r!6U1;o^orDu47nP{CbdRHt5&Osj}oHwVDzJW{69Q0n+a3B-FPmT`I z`!9dQT&yt5PU92n9b_FTxl`sv;c5-;qs~hM${n^#hp>H%y6;)k`a6)4;UO5@@h__x z5~-k90}LYd$N`Dl9v#1Rwf#&~QcMU&q=9vumji09VVF>xNp&i*+Mp!I$df^**?QCa zlbT=I>qR3+_PnzF?j=O0O8hBe^*XmZtDWtC=V8is^`kX|jcsq5X~K>2fD~$A_>?b7 zJ|d0?qf8=ctFv4J384chJNebVk$W|L251bIDsUGn{waE~`&V3+(+r}6Ix{L>RbDy* z7Yh#nfRvZIL%JB(fPevN5>(%b|2(dVo9fIStJ@Hy92XQUcW4{+-^j<;Iqf)jT(s4= z2mtu}P@=*J;n?pIs24uQYd!(m1s!pa2Ck0ET>*zzBBCQ!$E8IS;IvNgp6WOMNg} zM9&W7+bV-IxBLx;21{J(bMN}zf_onAYtHoALi3G0X!cuRXCKE4f`)$=on1>~y+d86 zqPECc~TG3NmmA}0^Zs-xxoT(_$Q7T=lhG^LGiuYs`u<){< z{RU3>t$g0);1J#V4$YGMV;mGsNuqISB`L~%qS!h$fe|x&EIKGgp^Qtktuy*4N*?b5 z*b=3I;jA~+T0)2dVd0xl0q`+S=PrQaLSUlrs~G{o`~+pCgQM_xyF#K+Qr0W~EJmqJ zVQ$E$YGU$!<1TrvAgJS3?WS*7fMSxob`zX2%nv2er?i2NoQwlN;|o>qkxo)&-(>&y0*l+mrcjY7SnUHW1Z(k5~ll8fIBQxxH7gbbe9eNlm(7)Gl*N^RrI~_=*X!-Ois6KAq0`a)!p!{pSbZzP>MB z_{}Q=UcYW)JLT&b6za$znj+8L9y28PL&`Hw0}R`|EcO1c zK^3o$siSv(54_*e-Sl%)xc<%NKnu433Z%{xQ0k=&ES1ydN*XXb{982mvX82zd)a>O zkCzb*DQ&a2igmoQdn)|rKFp7`_+>048jnn#Uc8)pKQu|0Eg2pk0a~Ddni*nM-=IVw zk-!rSJC)1lxLxMOrx)xr2JA5E+zS^fSfQM3*;8$7W*tymT#>%u4M&s$;ptDpx4>fP zpzqT7(%=06It_HPolMciTqdpzgVPbl7w%MKc=P#i3*f_w=8l_`s5b~BU!MT&W-y4@ z`1}s#J3==adO!=x)eEu^Fgo+x4eB1at!a)`g$BIm^Ihm176>}?S%!43W8uK(fDsVW zN6eN(L1RY9eN&io%by5X#V^uLzM8`mWLjj<(Y{$xad;Bq6bWcfJ^A!i01>d>W;O;o zm#^L~#bNVF7^vXW5sGWk>!3U!!k7XO8t0^7f(&j33C=^vyI1b;)ykHat?%cuDYCG7 zJGvNOh^V-O>3TW3SUox)2#1C^Kg#>%DRZiZP>pc{x!bE7BxKJ84CEE55R)BM(MnRU z0H=8dzw7E$C%~`Wa{(_RHpIyjcn|h_J2u*CJB<;gq|bUsSx2lgs&BZz1?Oha^?xj0(b6{J7n3>gJ#R zoe`KvtOxPIwMoJVFGc`iOL;%YL1bCtsQ|Zm2D|I#mrlG1m80Cvf=c99@}UEO0gU@? z%D6vI9Tl!xzRVQ{o%N}(H+3+3@C@MUxDZDw6lLxm<`NV7{I&_(6-qn3w7j!d>S$Zf z?t6?(u484oc>MZIN(SpfYW6gqxbs1?jn$0!BKHNoml$C7o+`L!o$an`B#)%QFbmWn z!+#JxIWI04OJPk?pVGFnGTN|V1yWdU*B4N0Ub1tg@2?!pkR1G{QlZFq53%8RlBz&h zWhX&g!*7ty*U8p%clr_-&I@2gEVcs_5K@@=b}@?#BUN}3MeFBuo%yCA>~~O0oU7an zI!NqBM<5jzu*@+JjL4uy28zwkRTcCC-$f%p;(KzRUy$9HRG;hn@CcK$g`9aC$2wdB z9R+A4K|PoQ6fI`>w|U_9NAp+J=+^RjF_?JZzC~~4N1_-zEVB7g(J#OL>r(n2G^RQI zh-Ha{7YYx`pxs4#fhHJi_7DPmg94?fQ7@ZE?2EJ{bUtpS#^S8TZS%*J8FZev4xZ?q z**Aj3=ZcT3SAf5cjOOoa6M|{Y_FZx&XN^Rx>;c z4F1l2*-3R}9DV;0(!?ltUYY_i02{i@078C^YiOdV)pT*;gf0R~MPyuqq(a^-HQw4V zr)Dq)eHqbzqaHIMwoqRD+7U7%s~FAxn=Z7ltfd0t3d6 z$w9w39XcMbM*_HG^kzWA+Q**j=CpbF*rJ1#MAz}W=9-^$=a=K3vMLwL_^@N()O#5U znVF{WamrVyTkU&=L?`77I<9`cFUGht7mRb00Z~(c$-^&`8Yn>4kMJF77gs?17XBW0IiQlyi)Z-Q8PqU6|By{%QNYVLLtCbv@1f#UutuW>m zlj>fHI8)Q3hWkx5PYQZW#VWRLsrtx;CDlm%Yr>$2y9daU_!Xo*D>1?zgzQE@s&4QP zm6e$Ll`TpRw zgU$zcBV|hho-7*KmUT$bxx7L#x3Rw28`)`t+m9h&K5@|*>hdoyfet;%qaVz-2o0rw z419oo4mfBHzQ3Z`l*+>tfyl}A^z-?)4@RADnUmBnch`+A9{g#kmDH)ZcSiSpa5dh#mtoHg=v(6mkNqbmx$-(Rqeoc#Dn-$D{OMsPj1Ww)NfIb z^qPe&*G1#Wky1)tzDH+2_nq{Oz3PFab`3^9 zmr`{$`D({zC}ETO@O!o(>);2wOo^p?1Kdc~(N6X&W_#+)EgY}Nv`9TKkB+l*1QH=g zt;_?XGh5U2I1!}m)SRxjor3l0ye}?$K0+5`MIT6g8YkqE6qlc0hImC-@ju|vg@T5j#&mt;?3t~Chvi+F5EpOya(cVFr?d36| zh~>z4Hv(AVi4}H3gV4_qQm@lk=}C6?BH|;4p|_s}3K6%t2&ViIq}(F9W(j;hg{{hT ze%+g3zWcULCu6iNu79Ge%8hI9WxnBlRl|>JV%?QUayoCfKkL?!+bz=aB%FbP?-1_0 zCkIy{-SpELRle2OWmSAw+JE$c=+2naT9-JMpRA^mc1?~0-?#3qvgdl1o7;81-}+o( zGQBYE#oM-!PSlD0?{lM?Z<4YHqDv682-a#F{~INtZqJAjnddjYYVH^xf1E%)%!$9D znYlZrPuC1tW)fCIw`&xXXEma}ark2dClbFVoO9VdR?G*iD-eARtcLs3=L^u! z_Ym=#b@#TOb&XR_@N*$Yie?j)*zCkP4Ia8D1+uO+ky$L<>2_EB-O~1CP4G>(+3`tL zB4T7DW7hBHO-=Bh98iCGiQRsB{YAr69Cb0xW~ak_gS0QI*mZ=GPduu{X;e+}4epce zQ7fCA4x929vf!10HAvtrhCVbD2+}?ybilj1@wYUmf5yJHHo>)t%Kou9CRbuc@l}=G z9+Ms_dpX9Ki-Wod?W`T`X#6QJ)3dIR3YeIh3UC*z9_gdd&lfEpZQpv+fP*BcvS^#X zbP+p^=vcIW;7!`2)P%4|I}y9}d-s&c(_g?Mh~2>J&*m0{(Gep{_{DO0Eku&dpM7G| zEgL4;UsJD{+kWwF;m1pV(#SLTqz#jtr2ZDw`?o@MF>P;7t`BDnn_SOKaL#{)JqmXesaKr8gMPrHnmG&hyg?Xnmj;{>%bY=h!-Y{vctF={n6 z%g&bTdLe-}bfTQq?_ev2keB8&Lg{`xS(*30~#foiD# zN7Z)+Qu&8(=XBIDs)OvZx9pWNj$H~NTPQQKGZW{OWRHxHA|fk$mT|sOHVKh=WR)n{ z3cdH^`~AJ|@BN?W`8=O--`92B*Zn#DJOnnGwm7Tls8C!siq&jdSYL7zS$s|UtQp6v zT)r@u=tqnrb%jDZ_vJrspcL+YNVB)}v z5ASXzUZ;bBh%=BWS~mLLm^Rh@r(BeAd+vvW{P?Gzs{#~#R|&(%CI6``a8>?PCs$nj zXgs6**@GDx(UC#*w|#F~{;5FT`~OeCTNGn2rhVLVKQwOUkT?2SK&3v*n{EADx=~M< zzfE!%8#_95#VIbnF}@o4&|1j$;Vd<>hiOSONnJ1+?1geX+tZSV1&OftKICvb3&`nr zmfFG8Y1;hk7aeX@OZk)S&cjH?yVAaST|dm2|iHaR#NKQrf!55+(0E3AxbV@b@K ziBCk$kq3AUe4$i07{YrFTw5R0%2_ytSPa95#4EZ&zmS{&>EdYZr;EzX1hvclGrrb%BQ0(eH&mybI4Pr)3EV@(gARv5je_JOsP290OT_K`|CNjJlL?%}ZvnphMLO z#?WeFoK+E>!)E#B^{6{YQgt9dOTmZ)LC0NaPe0`qSsJQ+kXm$%ylgLK7pN%j|l;K0%+eg?74~RxS~zI~W`Vf7HDik*LAz`EZq3|Ff7@ zo;(r>8&93`!!Npk6Qu(d=H=ZMd=%HGFYk2#QHPl%7U5smq0Z3w7y!T03-6*UQ$ z93Qt2dQCC)y`4rjycxVab!bu@UM_f2!0@z^Tz*@_1&k@h?i{XY>HH_x>a-wqG*)13 z&1WDytoKTMK)rlL0DVrdWUGz@Od2iOKJA+qoD_%2(2`c*0l5MG$ODBPk~C*ZsZHS7|qPk-l3lpY;59KcQsNC7CJ*1a;NuLY&tg{ z#{d-HiZ}4487-oNxYmkyeqrU9Cj*RO(DmgiCes=)BSQWIz52!#zbmZZ+=LS2Hq?H zZ>F<|IR|exCjW8uQpX~%{Y8vA-Pa}7B=(=T!fPTF(9v|M%FkliXIK>PlZl;gykM*& z+HDhY@&T;eSx6V`jy0^+uz$`_TsB@avXp%(eJR(lm<|F~4g{HtLY=XsE1^S(G4`E$mmSccwU) z`Pk~MF0m=M$b;`cdDc(pw6dmb5W~GsiQ(2>4i_1;^De;u6PaQ2oy~cg4;jzS%tlwI z$&z?w^-S%(#;6c=k*P^3al4<|2ItX|JS3wj@yB|3GyH1fCv(uBN{eYfT_YJFAuk(n zAl<|ROJp0V+a$6qC$>0cV(BKMF+r2#!eYxVY#k(eR2WsUL0ka+;v_pQikUXm{70hj zLdCXfSe+mMjZ=I?YrBUsOhXzFc&W~0kRE%#l<>5hpL=Tf<*>!~+{}Wz83Q(h(#xpF zz4J%?M-v1&R7rn?3tPL&GBJ#jNZ30UTz3S?g8JxnCwPT1>geK|xxSyZ=B|9~WHV?; zQ>#X7F)}3+%h;7ryh+m?-?Y<(s;7~EwV?R$%eH7fOA^CjBS)Zr$=FFbUr=UIIZ~*! z)o9G$y<}U&_gvD9q0UJYx69Ua!=auzT4cW_H`#!6o%4vT*ddhGAwoWC*C@7Al@CII zhbo@;Mm_BfiiW#&m-crqY877_3B!A$hDB{!A9%*N<=~4V6})D$DOrcY3prrrJ_F|C z|Ko}~1`Cphg2VA>G~$8mtJ6aj_la?X$md|9F<<5Td)chTAN?bYuHf)k`$9=EyC%g? zwSF?<5K5MwMAx5pOsyK0;16VO$y&Wli+`GA{FiOsES(#?IYnt8h5VPT2)9TDQ8(c_ zDtBL@Uc+ZU{vc!MLf~vkhJj$zY3i=@=u7xzD0ebcV9#=T{7nfIfQMeN2`bkTO=re* zPV=HZ)77c?s2ZP~wTIM-5Sft+aEMrCju^o)xP$pB$O*urLd!n?fe88j%)c?L>$Sch zx`MaN%`Dg7EA6VB#eNd2P?pK#fnlBlUV{sbWiLS?Tj(gHf=ls5{MBPVs{v zW?OY)_%P}pRA5zK?Se#scNs-#rI^Pn^U=E*uVpVH2k9FUMT5z)$|y`IX13tT&_V9T zP`az4gSXtGg2qQ!AGNWfFEiHJ2IOw99~Gb=W2KnA<29$dpEs7TkxvN+;1V3BTOEd? z@I?ls$ogE#>TyRHc)=3Y`f=(GZUA73dns8*d}&inza$GUNRGDYx~g8&M3-C*8RmZ+ zs3qx-gcK$uBoShC+aVcTBNi#q{50)#%KUn6)|p_v?DnX|bdu{~u)?xk^gkv6s9!QP0PT)yb-Kzw&J4GuD+Z0K+6)=5&J) zwAM{)JHzkch_Ng#>1R%kp7+so(Z2-n>EWT%%HOWcWk8BJO9=9T+Z-f+V9u3HOPWq+ zHhva}xptWOOhPucteN=N(<)|9_~Nk`#`d0$1f8P77{5bsu)6_`wvE^DdjXE1t#i9x zKUlV^6^{|SPoDnf=*R|PO+{Fw3ARUavaUc_QDXh#~- z!LcCjk0f&BY+m)II|F-@&eXz!bB_F()01{J+(CSsbW=qkp#9Tj!AE9BJ1X)G>co9i z(U8v@JKCJVr743!EE(EB1lOils3Fe|(O#P)K2umu#GxuKhef0%`R;A4#`S#rao9oP zTpnw*N^)A#{&PHJntNvps`G_Wm>1fxC9YodVgvQz1yQ7M;ALy(Li=mOZ|hV@aBKcN zma%_NfQ;-YMTXH+F=2j2`TX}+=?OH1cZrm9;{}$@+*}O+1?doC^gKBN5>_^4Qo->| z)~V5O6oqpvkAp5 zQ!`%6juK~sdn*5%R@9l0V&{y$Y^~6Ion&CMuybes@hBvCT&X6}LmDMY2Y9V>XziQ0 zpIIdfYwvnzQ2njrIm6M7hcKteTF~1=wx;*#;An+kruLsL4A}LbHisentUmWW=9UWa z3c+Rmu{K9$$I3C+V3V-B!;wPMZ1KWRQnHYc0E-}wodmVu52SeO^&o{p(ten(9z+ zOaeMS+@|_=LIrRR0fjtc?k}8;ishW&&-lCrZXS_w{#%6jnw#GJw7r$X1Aw_@X(UoF4{!?4Ia5dcIpvZN$9kt9vYt5MyovZIe{^Uqtb^D} zyG|7I#!Q6+FJJ@=!bmXpAHxB(BFM|^c>S|}W9!HTJ6qbXr{D^4aY|yk$GgXH5ya=L~YV1mKZ@6=jll& z4y4ioHjiJeSU;yt!=aqJ(aq=^D24hWmC?W>wm(`l(U(u!xUth&JWZ{}i-~D{DzzIt zVR9J$4U!4FUp0UKL97oLhCU3MiXSe&A?1tSYRXWQq8qJw7w;Q@Zg17dx7C=Zec=ul}L*IskbSRJ7P z7MY;D?0&5Giwp`QKnO9+d|Fw^B-rr|Pb|Z;k=1&j{_=+@ZZrP>wBjULHPgfK>U-xu z={vQbgQJ^r?z#iJ=uxyPxxykVCpWK}j{ieEgU#THVKSI~6_|awDlQ3-Bm@av3uUxz zg49;(m?fog-oM**id>o02owmkO3Sc%FQl1geYBNbuxxjo%S=9vUX|Ov9gqw^q@I}< zpR!9Jj~hZB?{DgJ%1qVFb8pa#*#umV%g0Y8wm3QpQ2fMVXhy0c>w{Hs{t&8TLE~-n z)X{tZih}k*_AY330TS0jUK2FRd0y$Kv?XDMCj21g4rx4VKQ;@a^dykeQO*_(M8R3o}+ko>b<%ib$+-!31bN*Y8q3; zh;sr7%z&g3=RF#k&V|QO6Ce;>(vk2hw#70xek*0K>X^)#&}79b06BBJW-iFP-+-jk zLj(?jRMDPSM;amPloc1k$$;6k$-8Rkz(=TNeR#qRB5~P>cFi}`eKoaJ5(LiF;M3TU z7dRjNXX9DtdLUR~Gm!w9ZE}Jap(DD;EZh{)yi0@QP_=P&eUQWm5egL+J%efvsnt+& zUebWUzC|}RH)vC?7jMIP4ZmdLT(4J>YQZwxv;^OX8M%mW-t=`owg`d%1r>U5_au_L z_1=1fo%(QIoacnQyEu>eTXl|L!T?CE$>wjL!hc0jd_8U`))GvsX6p{D>NSLKGk25_ z49S+J&dlexxbizw^u*nzs5|MiU7wWvj$XWt;o&Uvq*t~|w3)%8?Xt}k*GDVANzWnl z#u3>C#hckR@O?a2^7wcf^%dteR5{XdNc3t->{sf))h=aWV{Fuv{-UN834}J(sPBU> zz6PI2+>0i}tHJlTKp3)P$^elR^v&-nM%oE+api*%ufJbb4gpc@+|hCRll>g=C}6fG zmvoOSn;YUf1gYusg;v`~&G|Its&*(?$+Rd}qB;_KG;<+~m1?d{kW*LBYln+|_^8OC z9FQaNThoh1+%(GqXvWsvJ*x9FRl(=<1hOD_3^@_o0rKI_4_zVG!jfN=?Y)|C+#db3gbkk*k==< z!p4Ne@?{{r<#*}yRUj@f)_H3rZ<}h%WIWj7CubY85}Vjl^M`<$N_5ct6B@C?Kq#m9 z8gOJoz3(8J|A1*6X?&0mNG=oudA!r!TU!kZIm9yJ^AxL{jmi1{iRPlu=;J@j1cM+3 zN@ifwKP)nzr4XJ`RtdYo*X{Ho$b~eh?A4&12QKlPkNk|8%$SauW2hDs``}i4 z=4S2j<~fj1&kC+A2iQyIc0W7{F!CZoC%nuHftk8RbvkbB1?+8nDW$>?vNY>C}q-1K9zX>J=AV7<2{6s|%+ zCyac4aJGwlAnM`Nsu+qB3;=!|Q_9XKkdZ;5>57G@%)4lHk~xlJ0`)kLOCYSIkoRT^Nn*YOZL5 z)bzPK|9~f!J%@qI-a1(b1LsC{>4_!Hy=065BmjZTMCn#yqt=`IUj9Ih%@NFM@O93G z4YG0?gQ-;Y1qMB#_S@x24sAyr zIO5WhuFs3E5YBp>yjd`p3js3z^Z_q8C9{e|aLkAXB&j!5=ndEe7{W2(dRqp9>U&SZ zB!M%)|BfOHyo4|7apL}`rAK9!fPQg#_QtW!ne$A`bUk4TmE;W3TV2D0ULWD$l|97oi>k>LD~{eX=deNIX`VG%YaIR< z`F|4oSee7sj^n7lrXikVPY?U%Q~KbD#wcaemYMPzXRMHI12G!dK$4bNO!M(WIO1{S z7(b`a2b7dS`(Z7i1ZwJnC#*s#t8SlY*RyiR^vs?{VcHi3A5<*e=1YVyOmqXYPCo64 z=DMX%X9kagp0mv)S<*qlM0SVDQt9Q@yri9_YZsj#%aBcc8~7xZTD>aBMD~Vqyz%w> z5~Ca|43mh*>)*QGLk*HS$1rTL(V*6+w>gGqfs=TVtn|0~@21yMnn+N;rq#S_i3IpV zge!#7A_}_=^$KJPUJ_4t3BP;!P#OsWpcI1A%X#Qa9sn{u1&62mrVEz_@e^uiQFr9E z{jPtx54U}}*F*C_4Y6t6gAoW1&Xf-brBW+jec*=~(X*MTpcEWbdx9_n90PiLy#sYl z12)DBq7PM9Zr%u8J>e8OiKsTVh)8C241h~M#Knp;2V*GQv1PAt0R*k4+Hig48dnaX zFnkxHJq8@6V?*g;{}tPX5v?jttjZVaN$dhcXq?9_0boC_^v}=WksmL;>A$EdaScf=}&P2kLc8&_soH zB1QOVvNj6z>e}q5+}&m$%NG6XXiF$p#8I&%V6h$*INZd}>x&a)P?yxP*_syJX=_{V zmrZyG)G09A^jvogT%R!c95q~|HC9BqcFO!tVGIBbM5ZVZ(Q2geh#>YL6upm;US4kUO^x$2ujwgI=a)P(ff>Jj%slX=IbxFjA38Q$ck34QghT0Jx# zg@>B!GDf`yS;-z!U1gl>@EeMhMqc0GTXxNt4Gi7VtgGD&+r6Ld4UAeZkX4qVviCI76*#{k1xLg>nAiwEa8NnXH5uSPU^ z>%f^J&YWM&u}?%6(V3H@-l!)^6C{zX1G{^u@$5WWj3Y8anDT9KtVK=yFsr38B6#-u@;vQ;2SL6|Ma`Eq( z{S-dc0dmN@XO*Vnz8!Ck15}5UBNR(AAeX*H%NF$io6n9N>!3T&lI_ZMf^3G|&FZZa z`-~XsP&81M!p>Av3aArG0^QJaXcRm~#!bF{t=Rh|JuCBhl{nFrw!K%YDu;9KYCgVk zVgS8^Moh(ciiJSM|$x6ao#pL znBrOb?ti{>5P}s)9T|lK$P-p*35F;rPbYraZ7}>90BYtuMX`mkVDO2Y{ce{4E=>+a zWSg`lS8~}j5MjL~{W7Nfj~>bf+qrJ#Dd+1|dbdw|=`gJ1yF0kJ)DO6?%HWnTPJ~6S zxbb#JGDgUsFF<+EJgMTvg2pn@eAs#^TWrT$N<(8sPPKkayjHL5XekYbq0Ulg@thLr zU&75KC+DKWu3Rj@4=VoO)ZK@_ufJsfeqafkKFDbQ!wD`J!yMYpodp!lg)T9^gw;E9-QXjJ zNivp7;g50RpbvG{!5sWPI^jcQ)&Y0x*8-+B;IJ0i@>`$;OOgsC<5gHm9{08c_zIr* zQogPFyUdA?jwGx|UdSo}$vyyXin@2fKLW&vo}!}G(qT`2gd;PcOnOBU2ej+!9sW_1 zg(?phu{K&CKLSOSUld>849UEsR*Ti^YG@4J1gXncVzjLxh5t)Gx;KBN1C%1Iv6yAR z;dOYE37awE7v?Z)^3b^dH53nne~G(&y}oGpR}@qeS1aY79Jmo$$b;+h%vZgJyuuM` zWpk1ca=u7Cuc-G1vMBNSFsl)fLMRRyu9q}y% zx!3+W&R_N?7)YQ5RxdAK;c4VE=xT>Z2<4zcMdY@~!rnB(iKzq)+|c|JO=c>;Sm}nhoNtVZ)sa| z1k&E@6b!TG<~n|`JjA6%R3*6#MA`p(p&Iqd4uQ9y0EPJbX?Q~M94`JE|BGB8$={>_3pVWoHPZ-BQuvN^&nY-S3DrdiF#EtkUopXCoM za)wW=kY zPtLhc_vCj!cJC3rIJP4%0g9g*qPJFY>vP0Q5MrF^BN{A!h}ct)Unf_5-DnkfWSeZJ z%pvDBGj^ovwf1TnqsZZpy@J^VMb^RN#O1YBOzZs2yA&O@Tx?kk z?4O#OltTCLd*luUa}S#(q8vMxrwl$9OoJh4r>7}p>7;G}c%7%%l}ofJ)HlQkdbcKnnQWNi)~CLN z1wax$$DU;4*(Qy}8lyKaCh)!VOoSq#xWPvq_=QVTq~H~$l1Ta<(7)eh9ktBv7v?qlGs+eW*-5WrU8WVK_K$?|RU{p0~ytvKacV^PcYtvWqQuG^ZTAw)8&RU2DEFg zmxOaabhR%f*?7^SEfL>o7fX@{lqZRgE|OO`YwMrW?UBF6krnb{*Hby?uH}cM(D)mK zQeCkWsG%7K8S5_GCOoVeGV@LPE+4YrXW~EG_02Wo)o+uNfwNPf3l#Zxg4c)@k!5Dy zKX(RJZIfu;0Y0x};(ceg%wVYS{EBza$2ToBrS<8mSxlW<(tBd0wR7jyr}OzDW6D31 z7^fnXxCG`2x72YI<5fUyR(B%W-oep(jtUYA4P1f5@D|;hhDcg7u|7;x?s}9#dBLp> z4a%EwOl4Sc{e$yw_$u^*Du*eFAmT&YcWV3Zp0&^FgFv2W#HvTwRj4;oT-89*=RG*j z6rV_-eIxZ8Gx`LrBcL7q+7D-*%nYFsSHYDQdoyM}<&*YNl?c8v#T_F5fniwUsq@FG zzrSl+JeRw40o#t3m01c=t-Y-b0MvH``F4*Px^t);3d^%Ort>jkOs;Y@tU@anbAHpT zsRSwCpyuVerAGegQ%YATe)z*ziZN}{{wp6zyJ`vT{S-a8OqLK^ao!B*xWLMM=jkpc zxXL6HlpA896%xL%*KmD$<i29{6!mB}?{E=Mu$HLfw87(WJ+J<;WjG>z`yOZ0 zonnjqef>N4v?6w!{{~!MJolSUDfTe7ZvNH;*)kbaYT*ZJh~ey_Xb4b39vx=9l0}{D z2+j%``M_N&KKhV~3{c3}(6vni9Im3&OXq}dslm7nssg-yx<$gEA+5Pr! zlx)Lte}p&!fsBHVRu29Wz<=#VEq8x>=|T;wQ4{_QqI0P?Bgk6$xXGQtZPA!+aX%Cc z-xk)s9q_^TDrp1vIv={O4xdO>SLT%VRTA~s$uLMWe{cKJ@;GdJg~fN!wTwzY1hp}E*O>SMCov`jL$0Ud)7^c>}Qb$hnrbMEuB1}Q6C z-1_HV0Sl?87tbG!?c0)?r}IS3pqu1{v)Cs3K(rzl@KrTQd7z9 ztCk%a@vE}fUrU^2H!C|U@)>j1iu6ooDEntgBGhpF{VndUj7c=^PrMOX9aT{ZX$y5C z{UZ+vJAhtpo%&XwswM|wCc++{^W5TuNE1Y zN^X?gAX19B6cQW0B4kPeLtmS;%FUH}@DMyf2m2rQcB(5-G^Fop88d`yf9rIuYc?(k z%~P`|vHwly_SkY(df9!`l}m%e08gWwz&Fcb^8)ugeS86=MWoLC3tZYV8_GAn2&Xk- z%SwqRFB!YOTzs(kibJL=l4yOrs`E`-;75Z^Z+DrFN$!Hv7n4%phcGQD5Z>0MJ5$+d z9HVNFZO?*;&)k|PfL=P|G4RDpP|T((?&+Ao6Q6;cy9N>}H)U&qxUSufTmi`26&B_dSxJZ()UY4J-Lhs`wk+RL| z&nUReHTtPwn?k!2yJ33zSvqXp4Yz`ByXp*dYxl?oXfK^dk2T&rw%PcaExRch2q(WeT^i1kH11 z0g_?jw7Y4Cw)_#mRRyK!TiM8o7BF&O>It~I}>b!PCFGHQaPMES1$;THvs5FOa{yfZ;S#6gml~)vU zVSh|HE>Gcz?pHTLW#Qgxk6E$m*B{1w=g?dThLPqxN|u8LbD&`04MzkzSSgN(mWKSp zlZ{kROgOUG;)b|+FG3>&;Ak3DoYWR9*qa zNf6&Woxg4V>=ug}#~lY*lWk%W4+V!%it(mmgU($AHrDqycE=dEq$8}{6bHWj-T9;c zZJE6r#NP!Q!cF=efF1@%wQxgFqS1%xq@@QdwIz?oLZr~IfNdrZ{lq-!q>!L-QFnHz zI%(_6r_GSxekdQ;cCLxpkPR2N=pS&y(f3^EMvgsFfti!=RQ03!`V&vR?d!)%cN+B% zYWE8j66%joWEd|d3)hX=UWwvoTeQ7t8GH8Es0SLgiTx0FVf&kSt(NUbK0{yQ1kc9P z!)8VOaYio(`Zv3~MUtvH1kdJz@v_0I=X3Y3RD-L!VfYxB&T`M0D>CShf>|LA{zHWa ziqY1$c9+qvb+D!zZcvP8gBA*E;) zZa_Q{l()=tO}t6R^|Iaami|6I{A%?_Ih)~IUS0GT+We=c9M(^JY1=)oGglc}^x=0e z`|OVJ?$w5{RbDd6f2f9b(-%I;u(N&fYy5n6I<%z`|E|9Hb{0$KNQqc)7<)-=QBoFS zd(~_#ERWiJ?HuoYT-VZ~!0?D=Y5j?ZT9d!C$^EL05l0OXMb5vzNlW&6=svF0hhql+MWnZKcPzeQN<6F|U; z?ax-@8Fc#V-*fe~Ph1({me@u6D?VQmvEe49DaPceABVVLxBlmKre%f#3l;zNT*J+G z{}Sx=6A!dZJEEe<2w1YcD@!MUV%>QAgbo$Bv)QZk2jYp_;}a}_v-UDmCnx}OM+OB{ zxH0Dhf7!U9{J(6k@h^yZMhb=TJWpvJ?oBru%a07Ys28Ina1AJvZjof>% zY*Q%^QG1=moTH#2B9Is8mocjOPVEB_XwJ^E3PU?qJv3_}zMCAXpn`Ko!Ii^JId{Wc z_Uh8}n{$SPx=uj`WP}XZIO5@)x#FyCJB;65Kc?xWNsPZPNHWSf zB`4n5|MB55%vm#F5&X4mK3W{QP?1R+|NF)v^-_5zma{kY0Lu;Y*y9A&m2~t8qpGz?^z?YH`Y0N+u4({H+^mb-< zvxrHi;sGu*J?BfrwQ-sn4cjXeW0=8w3SIXraRCX)Xgyq>j`+i97ul?Rmh?B;8Ieuk zAi!cVjaqG3YO^?qyTy9#E|5- zkl<#Kvt2Z$)>ji1*qyJx)6|kq(Gdzj>Tw0vWz?QXA05JR@S=i>E>dvws}P@|0_#M( zuw#|3OIL&&2Y(tyZG%$)CV^3P=J5VFV3fHO39hM!(*ff%p%EqJ`=}_WQKb3i1LO}_WiB*ZW$9QAZ$NC z6f|t2A*T&LStHNFtR&w;rQws20#U`i`>TWBV|)u~eN;HUADq0Rfx~8{N9oHliLgiF z1_l~S&nX{ySxXpL?8|3=S`&G}I!VA@U=j|4r_f|d4q#MKfx|Q!^ivKmF6FX9;eI~OJ~~~QHYd%X)x{-EovGS z=*C|Ag1s@YTjV)bD-11L)bRsRt%D#hv({knU&-?XvG&`1*K8l3w5d(x1&1$h0A4SC7g9tzu&Q84Z2!=8& zQW?K_WPTlEXHI%Q>Qx}LYWLc-(>omZ(tL6*O)hP$jk8BXMvBMd-MB%T(YufDn7{a5 zl{n$*J>_f~*py9B;V;pf&%e}Wlsr*ZVV=4=X{4hqegw9@txXPH#T|%W^y8|1x0`)e-~Z~18=HG2_QoJU+alw8VxCpN zP3tI;MOEGmPH{t@=)YrTyuJBns-ZaL;Iw|?72rp{ zs?HnYE75MIro9Kc`Y%I)fBRso6ZY~;Qr$I87dG4J6KW1X~k00*2 zD`;VBwL6b~;`6d2nL%TkmeQ8IPdYB>?BsNTZSoT-ew5F;hu=@;f{R?-KA)J=WWtoS zmEiARrl4@0)r0-IP|XY-0;*Ys;(8?(M5jzpE_Epsay9={ZHSL99E>?%Kan(*#n-~m zi4|RWiD>$2dj2)vCu<9vDU^+$D)XSf&@-{Gbow#EZ-rTjZK@G zCmEmh+(Dlqhq8gHSWfU6!U!I(L%#jqb|CDzIs7h;I43E=W!`&>*Gi0TYoKN4d>~}l z8)J0dw+T1rlsgys>sd)Hzx$Kti4cGN|8F?gOO2vwr8m@mG z74f2QFh25j1<*Vsy>)&uLy2(b)748>qJG5#{7cSql_lR=S2A(9|XzyG|&vrhzloHR*6+#|Tbwf)C1|tifH) zfU>deeTfz+Wn5qKuPov5F#wFp^=+@vqiN|scwhCeb9pN8MB;*&sbF8APRtLM4 zX0D#7QY8;!a|%R$TbBQIqT~0!+ zZ$nDDb?P|Xm{MBm-Dj5dF7`%}551_t3SkNamioToDO3T-eCf&+1s=D*%g0u2bhr-+ ziNFy*%Sr0$qEX~$p&)n=YMznpd}D=q@%?N)%pEd1b#6B$ga)K)zd}91wb<9}=Rx=K z1*qLoqMXYeUxx56c}Hi@w{4zydAK#Tl(F%Q1bAO`F^C$&hu*&ab^|L2w;r(ORy&G& z-Tz1agxidJ_&+_F9$y0!5{^0lCWB=fLF}Y=YEM+Faj4?ug}5%XV`#TkMKpHb_%Scj zslUC{LXHAsA1hrvOEA6qiRO!nlN)Qb>DlhmXSc3g$~32;WxOv&2vuwLH^w;_uL`)n z^*Dvc9YKc4A*;bDZcH#rq_Srg{urIyKh&mN{9~}tC<1n}hXD!cSebY1xGoxgfvhp; z&9jlTlY&_$50<{Z<1+}$a6-vDmnZaGR>Okv1$_%a7q02|B00og? zyK`zy0Vo*_w)*m#@djdq|5d`d>f3)xnTBqBd%^$4;Rp96Hd%F=?ZtI`hT3ycNJcn&un8G13xwP1`AQh!`nrgNN_jO(o?3JeX#WfqO7WoILYuWPyYH41 zqj}w>l{*u^21Y)!XS{Pk-=GvfePGy$HzI2~W}lq-mL}ZVhFkb!Yn(ml+0iF~(DPGC z8%5C3YiB?Wbl&xPVD1_g3ZnY;>ecDeEy;6|X8ir&z^=U+Z@f-f_ve`wcbL9?OT$*d8gJ$pUyGQ&Q}Ze=wx_e#w($4GU)v8D6bC{deC-l zi50d@AFp0FcXK=pxjXtd1>>|LkUL2FY|A(i@I`!RwvdIU7{s)zNG%&_ zq>-?*iZZ4BR8Pn3^6yLX_wQbALWL_@B2i=7tU-%ZM^x*6kg}4zvbt{^&|GqUdgd(A zl8k{{qINlAya@^MwXXThsO$xvMi&iUT8t?r(&A~NWLFi(i)D?b+m1}fgmm0|@cx|+ z!1J&D39sqyC#B+>$~}I%_(Ys;oz$f2F8S3ZHz-XGr>iUu#_$PUW#PvVpiHb0zCS_V>`Q0RhRwqzg+XS-@rh>`$W5(1VGZ$cB7!&gpz|h<=Ev#q-@{c#tHPiJ>lpO6})E z5ilK>E}pUOQ{T44FrZH4C!e@zOE+>iw70UHBHeHO+M?9wu2P*fmS^Fvg2WIF3t6gU z$!q`B*s9{b4XYGyZ?S!Q6jJbf>mb^Aw7m67GgWnzA69!o{Lyl4H8XQ1fLSonKelOF zT)f>2715Oz(8{xs7SyvViS%BeZy^GC4@)Tgf&{r+@vHbq)e}sxEaQC>E|*wkQ~4o> zB>QxIJtJ2-HxrfGpYTimUFC3F4AL5$;_qwEj`-jqUGK6f9%hBuR3QjqR(3;>l^}rn zeYCqLN;vx#Cl!GXN%&{+{(lyNte&AR{u8BD zGki(!uMN^=av7U1y4t?stj>o#7cmU@?g^d-z1AY-sy2nP4HuFY>*DNvw8Z`G8}R%3 zp0*;&Leb6GGl$r0M5U?HW-H@rwz~SJoVEASr*38tUln>cwYT-@&agbgyYdqM;w)s2 zCQDoc*(r`)JkJ`P)B5-I>pMMe-?nQ$8}qD&{K)R{u$~`>&&XIC)U32>gC?> za;+e>xMeov3IG{XTwHrYME}weYqC(cqhw8jpAU>?{N!#X-d9gn?2;kk@X^|#wvkRg zbT@6PG4vr0(n2E5rch{Iu$N9rnyucXj{E_N8@80V#gJU2<+byTxs>q_XS_4n{#LGe z(i$gl?z$UsmUm=@*?Bq~Td!LvFUMbVZas0340dfmB$<=EDS)gw=vRp0I)SzQVj4bp z{JZHL>(1V`h;b3{71+Kr_mh+8|7-~f;fWk}NJ@*;OKUOwQBe$8Y)Qn`Hs2#i1AmG5q30~w zOv0g4BFHww4lc8k>2UKeuS%E|I`cQy-_u z;ch#WN_rk?@xykm3O&?42~Hw=N$|fTj){13NCFjj-2~A1y7SjNcpa0`|2C$F1Py}4 zN8CgM8up^;KS=!s>)t3>JQQjUx?Jw-G?x3_aM32W+9|uXdFuK`RZ{lrZ98aKGAniS z2O|SVC7f)#PftDqRDf#}%)Pu*Go-<5ZitFovYAUVWNHw%B1wXk3E%|uK82tBYUtgo z@cL*;3WI4cdWoT147%K*HP2EqBI|hB-~~z z$9Z2}WYR@^^@1WAiq{0e?5b>I`MFGxQoHnW%ea!@+ga%nY@Nz0j8sBBr)7Ui!SU>u zAy?QOZ`&1`(Vd$@`serj;LZpI6k`u{dj^Z9`+M7WN|+a_0DG9z0>*SJ0gGGcO-3WPJ|LyU+~J`zDu%hUXdU z2|mg_qj73l^?f+L$>q1oVc~)3@Rd%x1AB|9g_|TDgsi!gze(1l zVQ-$X8qHW|6IkBO+E->gBNobZ2wE46lx~76kD5!CnMeH9I6pWzWCU)g>Zp^x$+6_- zKGY3U@BY|W%650da7Yv(RyHM>{<#1zW&xvJoQ&nBxnRnnb-J7ou|(@D^$m~Mm2U8R z|4AL3Z)LF{9d#+x5Zk+?%3+yRe+T!>2-Kh@{;c!CEa%g1jRlL?_ObMs$f<9{=mVB~ z0rW3({j-+?%;$j>*QorqJ(`;NB|7*Iw7iav`z`T-*KST+WV)Krm1pvle1mc=p?+#;V6qMU|+X^>^Eey-C1g zC*J)TQE_1~l~J0ejrU4vire2gHxNl5BV7{EYum70Ij++bou^7x^@9%nph^aWe z*;4l;>nwsxHFci1TIA~>KFJhO3O%^}XviH-f#fiP-!B>>l z=xJj2E)fcQc`hkeEhb+5i}LM09z`{>>$TGT10H^Yw7_FhF+y}P{ zaguy=9~9htPre?PDmIpik(yaK>2!(EHJ>EOo-ShXo-H}>_MOJ{f?lsjd*ru;C;pf} z-4oV6=E}XYH%g3l#k?$r*QjcHbG$dzZK@0>D0ESIss zg-j2(>>)Q6Ae~w3Jli}An1;YIHQ&>Vw~-+jlfN%w&7Crmy9^*c-W&p`!S{~O5a#O2 zYb*;R6%Z|1{0wqag8V4786P~8^qYK6vOJXL5mfI!^(z-^{>g9hdenmcad#uN#rd*N zr6AIWeKL@FBe6#|>(&-xUTu=)gJ{lt+Hp@~JtZapisAFiLw3 zT!_PDN!Si73EzsRZQNIR+Y7ybj$T+mRPI4~dd+C^&#bq{RMVRYK2fFdD1y`O=4$q( z;sLSelF*C6Ovk9#S19gK?yfHL6?|tZbDYbkM=>+h=_oQ-ymxd68+vLue&6{)zDnMv zr|$g+L+kq@K9ab@VFfFnv`O>t=xN&*UKS>w*un4!5&!0?zZ+pEoP`B?Ts{6O+_0;A z;`N|;t#UwPKa?j~pd-?Ged6Gt>yC%TIwCE}>0oU0 z$UJO&Yz-=uWpK>syM=%u|!pDvUb&ft5a9*Zw@G$2uK2&~K+xbsE5_+M433PBIe z2YO9D#usIaaGzxvo%CDAHW;lSD24~wn6aZpR%TP1KYbeHbA0C_!o=^`*6Yysu0-*bJf3vF?Im zE#Nz0!cnu1uE_WFJ8-W>f*#8qOByPb7uKXNQimlgC6)Kc^-(BM#L1j{?2>63W9l`i zO!e43i|p+cf32;A>zqXFU=g)0U z?85A;Jv1NIvRq9&oY=k170NF>3=5l8VLPl%-ShlTbN0DysMC2WJ(RV6U$iGvjV11S zj*GX$Rcx;TM8LIr+$we!Hq)UfxYf$DrbuN+mceOh>G6V}RBc?$lkh_OD+jSf{}mp* zF6qR-=RQDPEC4N#9>co!<8CrV!^B9*x5%57ulvC&3cm;i8E-tA8;MqTWYqMyue4+S zRn9hwJePUU)Qkd~bDhgw!pBTFg21?|!napl;pwsuE#0ey>FrMtq*IhvCV*4WUAm7$ z`mm?D8nif62+1IEeEz0E!S?P49|cm`qPmkh*7ib{-`IsPmSU`(rrZ~Jw1!D!<>8k3 z90W^4;}Z2(Tcc>#lTAO58n5~@SG`&nB%3Bm0HI0`wm7~y-o>* z3nhj$*v#JKkKaD>r55|uFf%c(u_&D8iApuQ@Xo5hTdy@KHp=1?>_c_ z@c0vQ4C`LA?zOIU#d)5Wm8v_%>wGZ7HeMlEw%FVycjy~f(rhpihBN@Qqa5tQew!ZP za>*KBb+P~m-^NSz3`U$Sxr?M_WjP~=RB>K1lxj?uv*z%h3A+E8APMf=QD(Zv+O8WC{@=%nJ)MLtW~&jQ>cH{Ixdfy0l!$aH1Fq7X;00F%`I zB8xVgvF2el|Ei$-kDwSYolNYVCD4iy$R>QRM(5RlB=j*8Uj1W{#o6l)E>6Mw?hN++ z0o~!x1|)uG&{w3cI62Gvyaca>ey2zevK665O6k_tA%xr>w~}@ zUyGOZ`8PGu8;5Eq2@Qd;xhW1D_ZJZ-Dr%b104^z7^t1G+Obi((aP$N7CxQfGQ;0hW z{zl_k@p@dgAB*OtnyXa!*(kmxhT`pi82-r+nAI?Kf(-o(K}v{PmB!rAis9L!7n|JX z$O(NR0`(|dSVY+g6r;g;UFl)4SpSkR)!9CLNqoZ7L2S7MWP{wHwnm!tXCB?O#8HG7 zpTJp5}})XJKfpoO>i(Tb+ES zAKrk*cI)WU-P6MjY3YUuyq3-bm-n|ig@IxS7_dY^X|4lpO@swdk6+(gCJ~1U+it~( zH1wHMizg*>nV`Ak`ZQGC{Y9_f1) zm^7RSj)woW-Z?;c<@3fyiM|*0c>dYjPDMhR$_@aMkys?8un`V;O^=ulS6iooBTO;9 z0U++iA-xAlvlT@r$csFfOc+w>Z8c^G!dJT+*~wG7@7Z0=U${1i^gz94VBB z6)7WBiL4TS2*O570lAh=lW(^;f>cnoeO?1aK&}@^!@<0FaNHEVqWH91+%jY*;RMhv zUp{TVj}-F2!a6WdpHFzMKYszr?{``E+TOG(nxgOZbmyNi0C`m8#yXEeLtbHZQZ-9y z)Z7!u1LOqFin~~vbdu{q|0Io1)SuyV{#$h#DXfU7D$xRAgxrJR;-W?%iNI>&fbwoK ztv>^E&x*GOvl@+p_!G(AuiuAV{h~Bk0Beb??W4W7HZ%ZLTz+Mc;Y(L6SUQ#}f5C?i zdkv-1^t19>dm#2%UzxXRrxzO$*(XdY<7u#uw!sQnPhOB{;Sj%;C0Uh02{22j;4(! zui?kWk=#RvN%ZV>8UZM$Qxc&83?Hx;ckY6sl)%NE<-qAn^GfI|aeau$%s5TK^lc=% z(iG%5v&pp{ZM1LY`LDeLSJ)r4=5q)+@b{>6&>-_~0^rF;&YdHD0!W7*&VvLXbPNt* zC}1YLk8U2Oq{aBWBlEiG_Ktv>+w2%WOI z*AxROy1?+{lmZz@0ahoAC`~93=-^SmDWL%hYQ>9(_r~}6Gko&BfALApmGUDYmv)Nq z7fHqNSwNE&XTMZdB8j~H72^o&kHXP0K4EY-8uA-;0dPMuxG+uvE1FgQbU%_Mosmh% zPXIWWfseiK*42Q`1U#coW^zqOzTZcnI*v&5&2T6)7>g7YgEXi_-SZtBi|v23WnBoL z4wrpWxgT=*5sa11woqpMdnKs8fX_81Y7H(M!eg}g&va7(GGzV5 zxB0k7qh7Krf@{wCG9YYk#vJ9EiDdfH0re&gWE&@gtGy_Ri3F>4=Kv8!)i8Q>g9P2y z8iFi2>{6FUZ9k-l0CEdnM$3}&SX8{I=-t#tG)BpW6d#-AK%x;$5P1$sAfEgS6Cv`~ zMDN}!$og0NRAJ%Z*3@vCG~z-m{Qfh82-n3lOZs!-W( z#?J0oDef3YiqW8e>E*?Kp{*gor{Q)T1&T>PjfTFhO=->)bFZLh>xA(<7Gjh}3aImJ zQr)O8+0z@;0S|j3n0*CJp8d2V?obwKCSviBSgIpz1{bEgw9(P+cLyrzPy5RB#9Nnk-G+I6gJ9R&(G+GN@f2?$UcD?=w94>=7+b7Snr2OoGwk!^i zL}6^0nS50e2hf$Ad+-r~^&vT*7{K~4mQAejc$AP#3|!%HB-*2@SOuN=(H{{-Qo^%8 zMF{k@0pTcXJ1!Q}w-UfB9bde1j3J|yK^phiTCy`-Dq!51iJuM+GIIWG&In`wtOwUDVSQH51A77l~zm7{W=^ zb21kW#z0>Z=@1!y=9D+=PoZOk{&WP=#4AwwOf)7Cs8?Zy6Cou=02**L}ymicFK9K}? zFCeD?pNoF>Jt#63ye8w0Oo$Fo$bc{qNy%s}@^f}O*}}N#s-Kn_`WsJ$RepoM!B1NVMLM}kxeb8^y#bT0F0gvOvvNXe5GwSTelL6R@UVo;EbC{ges39oxEM>qJ7|^N0HP6i z#2gH+jtUs!DoTUyDFP2ii#KPIOJEOf@T`^XEVH4uNDD&eT!I#DWqk zjW1F%;M^wP%JAJ9d2mwLC*YFtF2Fw-+?Ebbn|ONffl_;LwU5{fcYK98kbqm3_VW7! zOZ^B3HK>+k5UFO62{jrYn}^QUO@5eD*@_fhfr8dn{8rfsWm`aphihi{h$}|~Xh4Ir zIau6i0}6KitR9alqb4Vw#O)vD3t*^22iLLPhMCEw+VQuoa z!gmA0CE;VHK}d@Ai@p#^ko`2?jp6WEZsyJFS)wy0huwTzIX}yRRr^{G(eM`j8qFf^ zazFkA@&tSsdj-7_1+wzE5Un28xKJ_SUvxlPguefqW~Rr+aS)OO>fZsG<*%Ju+6xi7 z2cx#h4Kv|%eGWt$J%{@;CV>+41^dezvZdvfU*#L<054#RbP16|(n2=4QwD@-AN&kq z*bA#n&ivv;Gr@3x10u?S7D%E-oSk|%H2D+!1c1LM%~Fwe5$&~#Xw>=3z#=wY|97GF zy3=f?zJhn3kZ42z3_@B}?$|&sWEN2r`UBWh`m?J$P8YBJlO9mefJ|uj z_M*ROrzPR8Ntu%774tI(6woJls(YC32p>Y+hw+I_*r67HE~tt5IKHIfRJkxaUD&g? z5V`LZ0BkTgcGmP2Ni@nUeX&J-OPEFIHmTO(erkv3Tl|BmTk{}^F@k*OPdwWRy|Kf{ zpBDDy?EO{Hz6YALS;nMj^bCgIvu!HVs;e^-MPA%UNEbI0Xaunc-#5Ze8PH8ETAu5V zo&wrT-OXyRWRU~nNzy5nnyY@1XFnvvWqqDR2d@-#nwa<{ZD-I~8V+PoAI z99xfq14)mIPt{!8W6JRrK-C3`h-P*`cT*sOztRZ`6lOm%zv&KMNg?X8R`9wTv@~y% zzZ9qAA}d!8##CkBm^m~M&VDg`&h-$gk1TatC4CTW0t}r}3X>}IU2y=~F|+|xT8a78 z5=i*Tql)!KjLSeFg@ZSGet*QaU4hh9RH|OPunW1?R@`s3&@DAu%eQb6Z~{r`Xo4rz z(o4CN>c40nJ^*fDKS%*z>a!z7B~W63E9AY5#o)4{`<^3ROW506N*v&<9?WqeWe@Wk zOSPxN`j&QI5uGs$AXB25Bp-*y#YH)#wz^FGvxIb;KbM2MOF_wvDGW;Zv z&*iuaG|e%&aQ*r=5r3}GbR|DkQ#E#D@2q39J8-4UHKV=+mE;(IGPr;dDtqNodg)Ac zy5MP#)VAC)m->o^_w*+GRjx|kGE$-5-BByU`DDGw2c#954p%!nJG@Jlfke{Y5{IOe zWCHEzI)G*XwO!M!YvP_IhoWw2HQ5?uj;fMtwXX{9)@!P|8lEAT0|z0hhcBPg#nYUQ z9LprsBNM~V`cx4L+&}1){L1R024W(u`B^BVKs>73fm)ZD~+14QG&a2Mgn4Z%t3KGt}mfkx%hU9*Pw{X~s!4V?J{RY%A12CTzmxC4v z_=Qbgd>EH!SG<6q-yZP=S1lA;_58yFX~kFo$Js{0anzWL7mmadwVN_~b9r2bs_#`E zcNCpEo=zy($g-Ib10mvAfhdCkA2at;?Mk32%wXdz(?MAG$!yq^O6 zk)fA^{i4dTGK}@FTqfx8;WJfVu|PYk40{5ySrZ2(&GozGHDiLsgdo|>wbA|tJ88il zAka&dk?OYgNVtrmtQZwqjBnwbe9%V5|o?uum_dqAO|9N z-=)(k%@q{J8GsNY*v5K#yekW1BGPkdz{ zSg!A=DZ`@J%&*V1r{h#~d2O(A;_z|^Z;%AllC(lt^7$#{byU>E(sr}K4rcg!ocH1Y zweLVf`m-w~>87Dh5H`5^{r+v=a?Xm^I`PZcSYx8&V!9R4ANn0tX@nU0Z6=0zvgHQL zWV&RS*aIEW?k4rX0_YX3u>wS;Riz*Cy7SG%G*%Fi>s7GnT-1kWCJuy~&SuUC3YSQg zn%=~p?p)M4IX^q}+4b7J_Pg0kJiDGXuwFNuKQhCuX;LDx$alr*T67Lf^9V$h&6~z- zixRdOvb0Fs_j#sf0Q&E2tX%8IqzU*4=wIa66!g7WEZIOKIaW*6SK=FF)ndc42rYXj zs)KqoQ);SMYlP!IYZV`esyX4;*i6icu@y4;W2WSIDeOKX^cIX5I`ew$9FwRXZN3-r z!%-@kz2G>|5)+KJeW^rrWu>J!a-Z}(O+fR!!IA6o%vx9xqLGi{U%-!@BxXFi;;i#YshWE3ik z?2z}}*s$^}hp`&3dGL2`s`Be9kQ1{E?B)VDzuxZFA}(HY8wO5eN_5X030a7b>$sCy#qRT*=6ytWW#C zkqBpTUAf3qEhuaYf=3 ze28GIm#<^mHty`N$R()rxyMAY+FzLn*HIC-<3qptix|4Rd)PlNf>(e2f*cD1?WHvy z?&AlYyZR;MdLHMv6EnFWFhss7rnNQr9=}3^_Tr2==8HTl&bt7sD((+Ygb=f*`mNlq z&&!05QrTe|+|FhptxZRlX8_~87W?hxyYFq6nwN0w{b0oFu;b100|1;TR4QOiIPSPz ze_gT|o3?}nAtc_*ebXu^iglxXYL8fH!>0iq#F72g(d}6f!yiy*1hlxe#{=?DO=CMZ zP=T{?>?o|$lB|2)lq4h+o4Vowi6hpwXi-|efG3Pr23^0Do71YNyY)$V?o34_OI-22 z`WME8Zlw!!L9~ycv3+)m$Iu4TUMOJ&-3V?kK~?XBu-RKgB%B`_-#YLCbMc(rs>b9!4!yI2mf3`~~Ur><4GX-+X$x_OW66(C~9or7X{=sFcUDrJg!{ zjIRRIyd8C_r8$i1(;XjSx_iSm^S8N$BhAplaX(oV_mM4g*%__D;TJbR~(=-@8m4%yg$SNC!nHk0^s*a}S@*|^z} zS%hxRin{#PH7QL!Ef*?`r*|>PW#l{&qA`-qbon}UX6v0AqC>d#>`s;Uz)$aY_~T{J zt)(aI9E@7}#=R8czTYQMd8yVh@9W)a_`Xhj%7&VM%q<7UGMwRl-J*2IBHtPf1G(eQ zZi|R;C~wdd+xjWFUn*0hfxcW!>08_qIao@yFKw)XRM102O`!S5LnNyYX1{rXx&1mD zs0cy*>GXN59mtVA?dv>8?NwY%a!uuX9MSBW%?7QxD_Qzp9A^lBZ1@$*<$4HzGF%Qk z{!lfRMaXe&Nja{G(8?4XyhQpk*?BgDoC{6n+11eFm^vZa?UiNl{x%?~`}lH#vGUWb z3ET~3pnR_H$Q-$XuDzM}T;-H6&ym0I$Q1 zcOiS6)c$51bf@cRQHfNx(>cY}Ik{Y4yV7%hYp!(QVZ5^V~X% zn4+p3QM;JrY#S&8p)w-z<1cgxLi1fp4$GmkyhwD9!@DuZ1#~*bUo3w}r49vdQsszJXqC>`x5xzCdj1h!|~{+)->t zgt+j8FPcqI%|jS)m@47DymY>usf`_a4Xi7O&)bq0@*K-p5N_fsR$r1g8(ZC1od=zb zRX^zQek9Lb zLruX&!EGf*Y#tB)j6afN9`BQ}n@r?<{VrHnY)WPbtZ@X;XN=QrE}Iin9HWn#;G~u14(&~EV~A+1qs~rGk@8ZN_;_g? zu=lp1Y!E`V(Q4cCBS#o+ml-P6L?EIkoVgRSL)JU-7_c`w=(K7~W<>WcFV*)=biP4j6`gp_r`C#Q3@ zm%CXCS!_!wCM8d_=6KUy^YWoQsrO3yV2{X*DSl$=!VgiylitMfweXqz*z2Oo53Mh) zFQ>r%Eelb@0h<)^K`oSZuZ30vv0!TE%1%zfPbutjgFA`a7deoJF3(h4WWw}peRVBB z@{Q*~k?)jY)`}%7Yzw**Ub`+dxgg#_ftT!hzuqfZBGu{FHs+9DGcl{;46I3gq)7OD#=VFZx|QUz1yU<$f*w&?wA;HeNxyjV*P0D zTyFIA-YnNdc0IgM(Kr-{!VU_4{z0=xP>ta+Q6JeDEosM(w)7D6UR2{{CF>H}R|un{ zhN)$zr7b*J@-Y{dA(Z)oI(RZWJGrGG`z4k~W^wRi0QFXd^65kQ&kX%su)g+bX|F5V zA+BBB*|i@c&xi(-6z)Ozku5$y_Jp>52&zcliBmdgDBpkKUeoFv=>3@G!(%k>tNjWt zg3;fOAJyilJt4OA_%qLIkY3gZ|$Wm=rr?_bL;Ul)c98=bN#e9i-Qw0MMnT27*%^Yxy?v`KsOz|K5gxwaa* zyn*Pv*@k8>TQNNA=`?GB!yHo)Tp?MYiSEL)liWWZpQ#!S>-}Os${VH=&m6&Lw}I71 zd0JZ}&p$L`iN0k{j^;<2Jtz>$Sh=U$>1Os1y^nUpiJUoX zL(B5-fQ5V_>KyV4Ewg}|{XjolUp|}godfxL)RfOtTSlAEQRk@?xpXQByTkj7gi#%) zDyoX|`W5O{7bbyoDN7%nQ99A|Hi(5&QB=!UFC0 z_%U!hamI+lFh(!amyymo7L2E^OZQ9buRzUVq?=z~UpQk7)2nc9lNeXuBJUPnoV>DE zazjjJcV~ZolunG7^G=i^fcUzdY`+88u9s=R(VKHy-1#;cskfUw?je0DGQfu??7jLW zI%|O+O&yq}Yz&=3?eJNTEV{DeiO`Z?79y0DG}rEI=Vk3iHK6BtDDz%ol|t2%*P(IA zdc1A{f&@#UaRddlT-dPss?YOwv%cnKYg`ct{N-(s=4{2_8`3c0;M$)?Igbju2(`iZ z@Q#79M0Bhc6_K=y`I5xCu{^Q|dlA%?!BRHUGdNa!fJX^a|8%veed$0TPE$~OboYLT z)T7cbgiuos`?aNh^8s2yYKk6Qgfj(y^}Mm2lkH9+Gzr**p@;631x0Y1zEmuWD-x-F z=*ZU2J&WgNz{Io?ON+=+k58&y2AR$@U03Z52JZ^IK5gzkWc#^+mie{$$%v|zY*WM4 zQOMrS!q%<3u`-yV-TFVP) zBSamBjpjPU>*oDhTznQnaqdCl2$T`IuP5^GkKA}&0*>#>tC_pDT`a_-H@HkT+Vc?k zfxrH!G`F*DFrS4u93IeyA#Ifv^)ZU1$?xe1Q4`d`34>E>t*L@+&k33B+xxry*Hu*& z??nI3xAm&yLQjn4*;62M!m=}W+|c0n)LPi|VsPa(+yH%8lE#LvKEoxQm)I#z2DPFk zEs;7*M353pnLSz-m%8qvctI(3@Mo2wPcNUmA_P`VDKxQovL|8|S*Q zHH=a4_aW#w?;>Z05OGLKeFqviL+`aUz0kWnIug=KP17<)eVz>;`5^Ib6AGk^lG?ObVNSFKn$TzJwHV}y&GcWzT}P=k(|UC zeGt6vHrP<>;5RnCxOy-y^pmd5u%;U>RzdLO~> z6is^Y{aD`U z*;d+ArYJ_NInSpkrHer<$PdbF7bN6XgZT^>R=37;{VKk3UV5f5TsVg2JI;#Rwp z15c(C1k!-aE_7sP7n|{}WK&k)iz?Qbz!CX^6MUL5ER2eHINWr3|0niDLCr_|Qyw=K3xkUPZ;UBBG%WtA||P|^22 z#zC`~@W}AX;VY?y940lq-im!86bLuIH0sH$@9OtG>g)Nym(S0|c^6HZ8)x*2g%C^!rBZp>%iHYlPdb1+8ObBUEAU(L!ttU6fg<;Kot+)x*ly?`( z3j5k|h2sf-vHw`GCc~T{TQ?Jel`eC@4L6C^q+9lybE<9#T3b z;?3es>#LjUstBS&QDX*3?k8UbimSsPMfq(4mh1-N^HJ2oL_6hsSMoJsqaYn52jeR= zr0_jvCk{%BqT?Wrx5gM7S-hGr@|&XCnetR^G>gB2ccF)a=$kZe-U_mc8 zZ&7k*qO3NU7(}clu(0@E%!Y#%!g-?x3yCUg9S64}%5ZVdooV04m(xR(L=8m_)b{)k zKhE^M!&AI#Ib#T{+5yqWSvUXo>A9JPwvVrapMxS!Q`zT<0Rf!R1Aej*%C*#y%@le6 zTd)r3AV;VL&$8XmtFOfm3vYjHencWA>C0E>;|jci{t>|s;?l2ZL}|ETl}05GFPku-*b5zvevb?4kYC*g9 zn5s@ZyD4IS^$pp4tgo*C8dwZx*x7#jwn&Y)MT)1_%v!^!!OyCf3bvb+_pDA&1KLlH zgMv?;!BrS|5Hw$_vJCtemRZzW-7jdi5yyHC7u|jr_^hScH;e^R`yji}tl%)3$TKza z9IS&RdcO8EzCsCHOi>4cra-wBzKx0>HGPe}jRR>3xoeAR8#*eUk5C{AMACjk*Gu2R znw9ix9{%LNJ}u89Y9HACaijALM@q~#jP6$@#rFN=sN-NW-Sg%XL><`AD`s$ zja&7gP%UOA|FY{TwzY=-&oXN%&%=X+sLEiZTZ(8bN1kv6d~KX;|GLwS!)DI*v|N$n zgN5y!Pgza{A!*9lHsjP0X_|syF{1O}AD%W{brNDOHlV)zJ$Uv~g6&?x`=RE+op6hm z{uF*Eu?Z->!ste*RkBN~g|5MEx(qt%`>yu_kNRwbs!#4Y#AyIOBR_FyM+`mF=vqTx2V(8P6oB(!_LYn%Gxt$VMy%|;{Cpr#O0KAJ+8)t@i^b5kQ z$dLD3#@RD=AUuOpv&FBhQgDiRG!4k(D$?q@= zvx&2A|E;|YjOeZ6P_?aZ1^4wPnpACpgbE@u69z>K$Iz#cQmYp%l-RFGV8^y_7-Z$p z*5*t6^7Qv{Vtjq^yz%bZ*9;}f9qy}g+X)V^lTjd0G#**-g;WN2)MQ{{ouMX&Ec75qgFAqg$zj5Eca4(>T8Caem2w+(@={ z=(m1|#aA)U9|vkBXdkb?84y^OxX+BGPcGV`3r^l?irrV+P~Y<4sE8sLQkK zepz;|-6#E&KwNTOq=?@ANTLH|9h4GUG0B2$^Sl6{~pQzJ|q8Hl7H>? z-!t;>b@T7#{(t*ZDF6B-e~-rBW%<_w{Ohm&$36ddll;3${sw~oMmK*A#oy@c-&pcL mhT`8%0@4-#AAfuA2IbkU7S~TVG_tqAKN$(d7lmTRZ~qU)h7JY* literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/logo/logo.png b/server/jupyter_server/static/logo/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..00ac191e771f9df4dc853aae6bcb85f89f94f794 GIT binary patch literal 61209 zcmeFa^^VWGy;+miU_ELG*T<5h!PSa z&7w%h(hcA9x)wjreZT*J=coGzU$Sz=oO8|`Gjq(GYnZOKDg_x684(c?g}R#ZH6kLU zKlF$6DEOCG<;$nQU&LNZ!d5S;-(lM)e!+7lt6OTZr{@P~-# zNER{C5%4!L^lw>+zrRKLXC3+feugeAE_!G{M1&?%SH7U{Njx`)bg1QnB z`IAfaQ7{oP0!d2Ejt>0$1G_AycZ!pS`hWjD5POz5n2iPV_qx9~Q0E6W>bwl2XZYV6 z&||7>|9CFpdgU`&cv|~g!I%Hu{`bYYo1Mr0B?olD6Egl?*Z6*7cu}{I8Gv=aT;+$zS0854Zm(O#T;c z{tNE^2F(9*lK)M~{>uRWOI-i@$bb3vfBE)*`8I@*|D{s@|E5ykS?x_5ISIDM$&ftd zKpt^SuJk~*@z?X{V(uL;?{$j+TZgIXFZ&KIs9`;SRvD(o`bVmb^_Nr|GajkN>fYaJp*_|VzZ0YUmQRAP4t5^m;LL;)VFUQ1E*gU=>@B`^Q^)9C9-nFA zUgfQ;9w(>nypfG-uc#0^c}A{K|M>Pm$9hPSQvF6>hl^7`8zpP|YgL9rt)M?3VQ!}W z6P=KKswRw|w-~EDFL-!PdmS0OY`YQarXqJqQ&Us#l42~qAM5^f20mp`fqsANm`-S> zl3!l_{N1~MAC?0+k*6B{F$C5o?+rFQ_{KZiE%fG}m7_7kjn{uhwEZ4ejbWu2-Q23b zeL1Vad%UICx{~v|yfljj0lRx{pvQ9CvD!L>kF6rHSDI)G>n-_f@BdJf4tU|S&f@m| zkFKQGcO~D0e0)pM!f^|KM;C3A{2PS&3H=oaAz*7U2Y<{`M>GvPjYSEDlpd%wDqAq? zQ7V>LTWo%8lDlY6r)z)Cp8PP6V5b82d3|{(3DZ*;W#sz-<`HRj`jU&q+%jW(aPy{8##RXGP8EB%kH07L>^Ru;>~s4**P&-t%S%|@ z44>skoju3&6!!oD)d7ZX{!022 z!YzXMSKm|`kC~TqN3NFfIJmyXcAT1(HQ4p)U&iRo-;q1izU2te{tB-8JP4t%6F)qIEE=_7u2F> z|MVEoB-4j$PO4Q)%kPSd8@P5m0uD-D@x@*Nuh&YI2=8^hL(Ju`?PNtoc$g&v=33_@ zSCQhSPFsdd4TFZc-4FP8KcAgA`rcRg&Bt$whbDF(QGZi^(%OY^7w!h8y&@66ZzzIt zNL)Ral8dEJ8RILG6VzHwIc=YjK*>i?rZ%p#}N7e8B@7lkdk`YCfj zP4>`^T}{zrlT|^Phmu}K9B8hu77AV&H+}NM^ue{=l!(;ZgxmnrkEHXriGP zTzS&*;L`@t8D%5k17r!|N^EVsZ6XTm!xsb!%gDLy6siA#~>GqY3U?fygoLaIxXiQSljc@VRCCMh%x55z_dI;y3-g z1K(d02m|-^m#!!Ppw?dpM_r}!(MjL1Oy>Qketxn6i|C15f8sd!Wj3{yp~;vbxN z?V{?iiF=b66sEW${Pbo?7-1WdIkWKMBu$!!Msf-hWxEz)Xl|#SBEKFAjI*xDh`EF* z_Ais}szGdVN?YCP7<_$fBHDKSWRZo>Qe)Xe5u>ss;ThCH4Kuxv!y&Y%a%JJWnLP}7 zV2iA#uR5RBy7hLfeW*gGDdR-dLf4q?cE{iC=I6`8>uEO=94$5(+x4W~b%jZ1>5Rs} zK~n@n&^LyZxoZBcE^YYT-;aa9WRq9J3meCmd~84~ue-?bjI}@rTEKA8%gerGvf`G6 zksxddM3@kA{#}V@LnH*lP$Yqr{#L0}G&8FE^$im3S(lD*C0?Hl`Y%TbOREB~@{0w% zNw5oUU8dhFi$LK5x)$UKZ0=Ubu8XSbk zkc#5p?TPFPgS`lSmc-dehK7N{ovw7rSm8?6o4WJ0lk^cyu9Zce-IlckoJNsv8pOIT z2W!L9c@LO?z~bZUqor_j$x_v``Fcc5+aR5u>8(RlpWp>HASIf2^xmjIk<(0Z91g3u}ep{{S$j|pbr`R(-yG8F3Vjjq#*yLUci z23-@;houk`04h-*BOxHqCCj#78Ll6cJ(xa(-@ArhvW;sbCkUGWguOP+KtL$cv5@N- z<4^A>>F%nZU-<>aG)w{so&UA=`abA=>@t|HT1D2Sbkl)c64NvLyP1X}&nnta!;eC< zn*+6ax(O0sz%>q=CUTmxp#OHgusyly{e4Smm$%|h$WFpKmBBJ-)XT&O%kWs&&vg)G zJ$eCWw6r)9MazA-SR{0nS_w7bDs`M;B300b1<8}hXjJXgGmL8sBHnkFlCU)oz}x%8 zg%n|7@NcmWqGtzGZIeatiy00*9quO<3HIj;40q{|zA}6hjvHgml|HuosgAFplG6#Z zX1gAIKEkI2FZe{6sxh#@SP(BYtz*SYIk{S+j(FlkhDj* zm;>os#>jbvNgejt*R=c!Ewk(8Dm(hQHj6x$(#O`4C?6f>9Go<|n_>JZ7YXj$Oal3< z>$YX{Ql&plo)HejmWzhdiWd$eG<7hrQ964%B)o#nV6=yiCjZ{DGlv)>rh8kikwQ>) z;$Gh0Ak7S#lH+0msmibz6I{C6IessS{1kIt9e*LD3RubOa&}0B;S~j<$4Go%Fe(Nk z2y!)0Pt7_~M99k}9*~y5%2olM!073CNHdlO_{`l(fq(c~KA&RNXJ=yj|Yu zPKdJK<-xv8^w*q&7nzrjw!b)8ubu68Cp>ktWh7RC#ktlnQ=|4YDIA2!L_nm$Bk#a3 zqUJ|wt(FH%6^ae0y5tq0btFHmu*`mT@VRDth+#~it=p~Z%t-jScB0B}-gC-%T8;pFF*(=3n|4Apd+DusC*8G~>FeF2~kS*mm!;c_0#mD8UuO()$W$9R*Q zdM%)^N=L^T(9$E+#RtE04`>k9&k7 z2zDwGI*@Kyc1aL&^#r7(Z9`C!|EWr9y9De?(pu;@hWk=8_X;l^Mo2V>kVhYDUw}U{ z34cMek6noQaV*|opg2AHCPBSPyx)}=JVyD#({{#f7wn7EvLf|9w5 zTkP0ap0^+b8PcblB`a~Eq=t^`6!OOlxV0jxHCwb^wDsOLjdU$>!dDJ3DL@;$PM%~R8pO7*F3Ifb3 z`Ez9KAu6V;mt&dBwfX(h_ujb7bvqRNN_JNe|2kTDdkMjR2vug&U^-eiQgFUnQ|lLn zv253=!a6JIyCm=i6JdM-&W+0{8LPs21-L%b_`KM$EaE6;2sKpkK*;^%dVEc-9bw=> z8rZ#?X`>FT+gLqJ^j!CL`~BN8PU$u|B2fKk+R&Jn*|US$gx1L30PJC_Eh>?KjJFX* zHSW?vtTQh!uva3aide}N=6QEooE}O~Ar#Z&HLC+5sj~PHb^YUtQblJgb*?Ty^_B~LKe+m{NA6JOe<#n>&b7W!A^4nK z_asKjnyY8EzwxE!C3tlw0AeW92y4Lxl8Dtcbntv5qZj^EqJYA$zreiGHpZj!Fhp#D z%(_KKq$~)II$C52&Py|k=LT#+glHr+O;QMU#(3qE#fwrvUW(ZSPWP+h9VslWCE|CT z({qXZ(qQxnlJ^59P3g1pVTF&YyKSKNJ_dOsUwdtMIP_pLA=lY~rSI`q<{UY6^)ZGP z@^tMdXZY`|l&8V(Rp!B&SnE&I9fu5s&djnC?ZZEO<~E0}6=uCR(X}Haj59Rr5OkQp z;_R*oP{S{|kmVks1(4_X^T!-_F>l)0NGGH3^A-|(gZ;ZN1f7#;azX$;izy9}YkVWZ z^x^_L8KyTe(REQJ+y8~yD|YDjS>J(81Z!u0hT%mE;bB|(Il=kKJL<=mai1oS2SqTt zH`7EDVmX=vLZ1}LC^!-7L$;ruB{LLedn_!pL`{0op2t>4fCdQ=({uB8MF`ny#)-b# zyYgjLm7k4BKk^ojy{#|yzN_PSGb|KV0-XxDSS-Iw7hc(QT-TgIa+mN~2E;p%Rm|}9 z_7GA7GIoHIf!y|-kgh%oLswBtixl(L zki2pij0{H4Av}sUf{Oaw2no@bTmT@7D=)4?022E+uv0ON+c>#7V_{8@SL`P(A%Zu7 ze94}R5KBlUjKF$7;6w=it$}Fp9l^dcv9P07yMhC*`+QjHdry~$ zIrJR67lARuyuWHBpw+w-(0%{oPsZAB`=g>W<0Og4?}yBauxS6yqwJu>HT$I*L8$be}JSIU9|BPG=q41iQtW&5y5p= zEPNCJPX!WZoZufGxVJNKmKjNEh2T<{9|JIO)Hl7yLfhNi$Oa89@F+tUhMHu)yO>Zd8bU@MPMTxq_uZHshwPOY9fR!(jIlNJndp_0&y8le%z+A zB>CHTRY)fC75S8cG?8fs3akC& z7LbGhkqW|A&$AvBgpq-0Qja1ws`>r`L6FS7ruF<#vJa&P_mA}xwvaj*AcU;}(`k6= z*db&)%j|JO3o+@+M=X~E(S5a@d2GW3Ea}kzIy8uDCuAKN^i{S^%64teH{MaRi%DVb zgZhNRcD;Sxl2GdsBf3Ele|6WBaQOtE>(avo*FV)qR^-C6QPiY4h}VZ^lH35I@-9U> zVW+gnJek+HgBg(%JjJ)J>Ug%u?5)U#K7toRgrNq#asscY%Z2&IxDhldvKDbCN;*9R z`%b;GZVZ9XaM^XYg743%h>Zm76W2Z2NWo2{B;Zo?XcOQfUdZo`96ms zcOEI3@q^Jo^z138dFAvltT@8s?dc9)&D=u^ZG}`-crTh@fqKZ+d6?ZfPiG_vWe%Qs zA-3*k;9ZJM1eR~S#UKp9Mb8D?SGS!e9&(=LTaP*9nA#Tm@ncU7Aa>N#6 z2Y*HeFFeThr&&N;gwq8X$d*sE-z3ltQnHh1dLInrVV2VhP>n1->3U4+s$%xCgijx@UmooLnHqI2O$8b;6Vubog;Q}qx} zu-ZQ$6h1ZSCm=dApt!|+0I<9bhVEUpuJj%Tg!%!K=wdkR6aOq6v+NPTu~9i;HlUJ{ zP{pnFKd$NCRqA8*g_$x;BPdDfM68*#Afs-hVk5galpU@f6!P)i+%4Lki@72=sTN{8*<033a)PgKdIeXnl#W|uj{ zt$Zntv1(@bgKN8;?J^}1{C2bq2&eaCD+qXJOY+%guyIW=a=nv-bX+~FJO>mfcR6*g zbP-CnSUGU@P>%8==w9|1L}eihl>&>RIUi-G8R5=j@a8NSg|U2sig2YF2C zxLFbuugExDJ=UwHK<0I^>^c|frBJcAx{K$2SA7ENBO#Nq0q~#l7*}Rzh3~upu2Sfs$SrQx%2Gju@ebl&AD23rPYwadB1ySM7Ak z6NGBkG{F!gfxgP`5(@vvXdKitq>u3Ce5_0gLLbuET%5^UE^@mQjuz}rz!u6@M7<%| zlQ~^us{LNsd8%h^{>?BV*dCL@#*38)pU5D;Hx{lo6$ z%<@x157;r>Nqos(rTIL8Fd174x+LlB9t9iRqAU;5k%%K^PGj?ik)KYcCKLSPGLV@5 zY(Gr7-(mp4`4VVi6((PzJB@isQfP6O=Ck);oFuh8glR89vK8;Va2d9t6tvH}y62>9 zHnICr#98uH_p*?|pG-d(-J(CKKzb?#{@Ln$8?HgQ+3^q<#yxLGa?*2R%18j)=4vGI zJwx*06to}g>Od9T-6D37D)>?E73D#Ly-f4jQq*b}vQ`gvh9{OaIDXvXhAfK~=t7%l zDG@UCt`k-MpInW;k3P}Ha`MDhq3soma+>xtbq?8-764Y!i$YE!@2ztyEQs#=`4~Z~H5@TcI#yXWf?qn|x@KSa&ur7Bu(R*NB+=8xp z2CJuLoHZr=F|=xCxM0XQ$ZPa$so@SBHvr3cwHLv#Gj)(2wh?A%6HqZ`(ssML%UNuHuK%v4f)M7S837Om4A64-Q_NfIeNf(*^2-FFM1 z?p-pktF7cr?sYTI)G#Snh!}t$^3`-OFVV{B>LH17NE@B8>3o+;1k=9f83ie4QShZu zdGySSo*oiOUa;7=R4)L-o)At?xVfbh9*Rh=!M-AZ9;v<{U@#d* zD!j`En&t>p^F7Fa2Yc*JLgKex&ywh}U5gv3W?hg*fFz=dqh|PxK2Zr|5k#1iz|C%b zd2temjToIm5YYAZXD#be1_BCPlAV1Cl_Hg+o!3LgyvEf?U{7~ z(SSbnY0q`QU&u|ViAMn3`9SN%KCK0C^&Wd4Y1EO zv!>T4?&n=9B@}gX16mbuoVImWl~$4fXn{xCy#kLHUo(S~`8&L>ZI>@WwMkq1B-BOV zN16J4u#~p#X%O~?Np?Czkiu70cSHV6?P-w3wIfl}WXik^etDnT&dDB?CdG8c=$J9c zQh^GO0`!p}9k7N(%kn>@s3fRV_kz+>&;y8^ToQBN)UBqj1(Fv)Ui4icdw80hFf6-P zmSEbW-@SsZAD)W?<59veWc$yA&bC5o(DeYO^q#f`)_#v3ZkxbQt#7|VQ*n{ z-a>YvNCv z9H8a#KL)C~K$DXKz0$P=Yhn-;^n^9rPdo9Q4KJoiH9Ev&w326NDM<-+@KebJb&QfF z#a?QbwDk;}ZJ0|gHy>^|$P4sdI@$|DR;NXF{6TrhX;CSdp(;TPK42r0+Ut*LQe_>w z>)4Pfe^+dG-_Rpb;j;=t95qxr^6Zv)wGj*XT)Dl`>r7+j$W;Y<~Li$Vt7ZHaqDYO(`@<_zG1pC3W&Fooc4LG=*62Zwxr1~Tq<=Bkx~>|pSG{k}>Jh##avQ;Vbx-YH=dZ_bvA zLZ%{N4xDXiqEHRWCful{eagD6Uc0$!PRHsf)^hC*H^`r#j2%r=7@M8m<5SvmXy|u z9K$~sb;|4zU{1@z+PV06&c5zMaxua^8@kIt5dIXnxz8=+I;Lz04TPb{N$Z|rpJb~x z1#BO%QiYyL`&K=a-^j;0a>{>Md95=1sX~5KC;5=)mfgv@<#O3=W%5`c0|`AIb^ZHC zdr7dwYczdjcR9;W&c|Q-`X+o$&tjCmJ|${1|I-P7pSk(rZUvwvfVB5`5OV9xWQ+U0PUPnggwyz@#xfW3$CuYRK(ius z-0N+n({ddnWG`rTQLsZ^EkO?;*u+L6(n>oO84#tY-J}jD3Y~?b4+`ddrnrOsT;Vah z2jnTi=p2LsOTieJA9=dqtORW<`*$&bEFo8GVE$fTu-HZ&?s1fd_A8yhur2hl?Jp1N z^zaP0SXD;*ZP)1Z{u$C{D#G3elQox*%r70~KL%eOzd5^bDQud|LlH4wAbc}Epkd>bJ?utdhi3Rrb;&e1k!?WGFoZ$ld0s{qeM##uyJ4 z>HS_EL@u43eT&MTC+CTmN+%OzPS5Lc7~ZR4X+5JD zkV=1xsXAf8!@pzCXla35}-V&*m-t8bcg%f-k zLA@Mt9bnY3*r{Vo_F46=fYRP@Zlg!Vf?z{@0aBZ~&#|Wi#W@1^MZ`h=kotTf6|xku zZ0vbPieS)>7k4g=13^2cp`z5DdhvZnUgoPY#?{8zjydw#cD#Gz!&f?`PzDqQ^dON> zM+#&K7yh2%$2t7&!P~wIc7A3g8Y^@1n|&ki&9jGEls2Xo@Hv@t0+kU?NNpN?P-DE> zTNvMug6c!!U#5%*ja*N;(L4}`&21P==JR^m?Z4q?^3|y#3abtc&Y*I( z`~gD=^p zszu+XTzMF_;JUVT1B+zhnU{q&O6*G3>Ng#W+)l-A-C7xST&a?;>|9^k=+*NL8&6qo?qpoz zD<32ScnI3>p{MEUY=0AG181P#%`tsKezy$u&&HzqxrT(1Ujco0f4{(y=8wFn4<#!F z+wR5s!DQP8{lN#_GuOrjf8zC5M4iFjN*H7Lw6i~%!5!B|Cx_-z6Bb|OG}#}L{h23x zO&*o=q78bK75+1)YZ`Sjq+=f?+FiVg3@NQ{8%b=tYeVQLM^WPoYEFDDgo)L7M8wg7 zCSc(ipfP|+K~h?~?fBv&KAlu|6Z3h!by!|Vs*jk zaH04LHS|QGy{feB<11JnSrXwBsx`}Nq#N-}GS;6)t#nv-7JB{kdI3FAi?)w0H7Tub zKlBx7T6K-#0Zfx5(@EdX<1%&Y{66j@O4`a@eDM7VeoWLk(_YW{Crg|0I%~H7y|m0) z$vFzakW2O@wuiDnHQL{<2#^H|(v?i};uN3K;*p9=`1WFJkEM)+>)wcdDYj9cYw`xv z$yc%@e)gx&MsBCHHNy-_9)uZA%;~WGf}t8f{#9*1{Gc~y4F+q#;4mdyK%P;y->Zq% zyv%$ib3;yiYJ|D0yX)WY0tjvT^&uL{il7inog5EH^=Ey|aI@$K*-+_C&a#S=^Lhm) zcXk5CKd{{VS?^de{!nCN&(6bhGuMY~z`M;k1g$DL*$?m_?XLY#xCHpeEYx=`3_0KX z#;23(Ql+RuGa^j74^^;^O{EvT-`SbmsZ)-=I~EyHA@Xvk>Fo)-yaqVm{Lt5K5S{`z zzH*xHVgxW)ty-BIxIK$L*7bypD0BRu2&EC)`qkgVBrz9PPJ^KYifWl1k1j&)J;MR) zmFoH=EQ~q^&<#!GiKphpQNKP#B4U*8izTp*v3|SNqa&2!Xv*B+t{qS{RaW@f?E6#5 zXa31GNT^~h-lhFEFME?n#&e7+i8Y+HdHHGP0>y^HogN+Len&03oU2bZnAW;aIvPR* z!!2MJVLYZ)K$&(gv8g;k2cf!JN(vlEk(1YU@%xIg~R*Qq{tQ{3=|6Y2!Tx(utq=I4%$;EsJ< ze#?eA7!k(uw`e5H!CCYVkmBoSTU$We2}X|&Qv>=%mu3kP*8UPv--#U^)ziB>tbGj8 ztWlE;6?U{wv#}anJefX;PvT@W!d^k401uWCtn(aVkpV^PBk6hZilTUYkwM*WHTvFN} z<^mp9OeQHXv?Tjy(^jiizpUsdZUo#kk@o#)s+2!o!*DkvX-{rDocx%>-7}M551_Ux z7}Z-Og4%CrVG`p~C7scfy{7{gv!0>tOMZ9F>j((P*u=a&c%z!3C1CZg7Y(|VMP3;8e!oQyBh{&SJ=)gCI?H^cef0ii zvz8xEBvQ>^>d(tO656(=XWr7Ak03uL3z`HfuL_q_p+}JF^8jz6PfCYu(~=j{Ws&XD zwtvQnLs#4K;5pY=lf{6!bJA4h-dz5r=lZ)P)Ojknc?XqCE}<>{uMdlw-HTu*osq#D z4$aIY8Uay?1r4{ORpo@}F{1&Vwwqhsq1R9zz0yi?^bM3&m&M328YE!g8_btQ8Gz~( z4rN#gkoDR#k3cR0YQda_i%I)TuoxSdrm;%4fve6*RuxYDe*C^OX%pM(ykqCdRts*7 z*%p5@++eyZO92)WM}wb!qW_)*w2LtVG>hg>Q2S;+)>N=avQ8j@*%J71zs((g|z03!d+tDirwE9fUQmbm($ETrrAFz7x20kWVek~?K{aJM@i(RAa(WSgMEEl)7^a*BjC zV8(JY;fb*LfMeUl_EJ#MDf_oVT(00i$jzG#gAU)Q`(aq;1HSc)=oj2%)aP~mMxXt> zsM{{{YgyOutipy1`7uhiD~ve0C#Tok$k@|#Xr>%Nd#iv=$-a&rmcddI$k1=~7WS?i zB%e!Vct?^I8i#>nV6)|@f*SlkcE9<0>Z!`!$rIPLzf`0k&F^>WN;_2Des!gn(1?l$ zcB#$z4lcd5dEmZg`reOz=m|9ogG|TQ1^8B5D`FI@hE0`nZAzNy{*KcKIMhOz4$#rNwh%n3L9;$>wGoh+yD?PV9`*z|Z_3INrPd`}`i7G_*LSANTkk?6xARX}pUbre zqtP4A5!2)2`YQL z@2)EG<$3y?!7!$aQ;V2=_j^6;2duUKui_wGqdaOg6v ztu$M#1QmHv^LkBv?e=LR?^t(nk85FsCSuM&fu5*E1lkEROFhX@wWMoXwIIXw zcbe%HAZxXL<3k{zk0y5<5WBmZlZmvU>#N?-1xMt_XvKf^*!^ja+RlGsWt>}W4cPrV zE8L&UN<^TJW-B1I#R*=hX7D%G$46Piz+^oh1Wt=Fw8D5onTvA*n*y05Ku9MUC^mcMbXR867Z^6}Y<8w5eL! z+81Wdwb(<18<;%|P7`4(n4pFa)cx~YoRTBZ4w6HY?Q6fG$`yr@f4_p>1mRtc|JOw5 z_LAu7weDRZx*`&n8|P_4YW#*p%_qu|f#%BT@#?xJI?!-3pyCfBy^#BHCW`=!M}WVB zSYPgbyIv9G4aZdWw0M@@BQW*t6##m}0lV7|B`sG$;jWx+{sGuR);%xHmekE4@(IlZ_!)Ei>jd|8{JuN)8%@z>wVJOzX2Z(i)@$3j?y9tYd; zgwMzm7^W!#?|q2= zb>1`r$_+q{0$6~X?lizM=uB|i^ju?Q{ta|=auI7Tdo?B1enM^ZVl%nb`Fw_4$`$uPzRhbPnELHaz;5%&tHr+RuZ~M#!a2QJGU>AM8YOX5f>X$Ty zAw(N2#hxk5JQ$%htA&<81_ThWx)BMMz|ZB{1g1j<9Da42Q*LpQ^EK2|*{VAej@HO#^F5#W9J znz&3hq&8oR7;57@VxqQXOr04ANuP1U)K_CzVSKL%T%Fzp`}r~5q7&NBw^(o(r%Bht z1R%1O2@d}xdu)%&QS&rMXeZn0wH16jbA?v?ve4m3)>S31_9xK5Miwhr&6@6g=x`Ms z+EQ9@6HLOX5gWe_Yl9GO0YbRGR^F<0Aw-A#*tfjQ8=h}7F5|p3cbWjw+VtN!53T1T z7*D6YZSV^^%%7Id^W~sffY&D5XV=(z^q-O}}ja<&0OR72v>% z%HH_7v(PLsMiwAM531K5>~O3tCak`~bf@&lZM5Xyz*7t*cNPYCvIT5%@^>I}# zSz_eD^MRgkLmGX65b`yBU)jytflGug9w3kl8nVK=p^=By>(HdFCrE7E6o&4Gk?yWv zl4~sVQ;DPF7;Z=!Uf?mj{6)Whz;n7WL)ZIj5?mDg0^@UGO3j=geE>zJ9O!0%6Pxz4 zwH#vCEhk5s#%L#^Q#9gce*$4pYyn?PwW_tRX`lA^xgCbIi)}siyyTE3FXks(3PfZ1N-qvaD@@!zqy2fgv+EBy*87L?_(b+uW7sn8 z14ZPYE}(^aRsjf*SW(Rv?ANC?w2tRqRtng*-27d%_*Lgo*-K7LU1)SjjG|qS&Sj$q zsh}CWUTsM|B|+l?+?zTbl>P1k^biYe!ys*2CT=<}S|k43NKODhvQrs+Bs505e&dBS zB4D-L>}Eu6F+JZ+8T;uQP3dC;nyovD@$Pm+U`IL47z2zJ z$B1CmcDL4XIkowTtcbLg9lx+sarBX&O`yQ{*E)VHVGt|KkoPGy^%y4j2+NqE;mNi5 zP*XdIkp4&2j&4UvI<;mD0a@Bh;)*7GZD;H88vp|>qPkLFlTg|`t)cP*$11Kj$*>RA z+6b+dou+>+2r!6U1;o^orDu47nP{CbdRHt5&Osj}oHwVDzJW{69Q0n+a3B-FPmT`I z`!9dQT&yt5PU92n9b_FTxl`sv;c5-;qs~hM${n^#hp>H%y6;)k`a6)4;UO5@@h__x z5~-k90}LYd$N`Dl9v#1Rwf#&~QcMU&q=9vumji09VVF>xNp&i*+Mp!I$df^**?QCa zlbT=I>qR3+_PnzF?j=O0O8hBe^*XmZtDWtC=V8is^`kX|jcsq5X~K>2fD~$A_>?b7 zJ|d0?qf8=ctFv4J384chJNebVk$W|L251bIDsUGn{waE~`&V3+(+r}6Ix{L>RbDy* z7Yh#nfRvZIL%JB(fPevN5>(%b|2(dVo9fIStJ@Hy92XQUcW4{+-^j<;Iqf)jT(s4= z2mtu}P@=*J;n?pIs24uQYd!(m1s!pa2Ck0ET>*zzBBCQ!$E8IS;IvNgp6WOMNg} zM9&W7+bV-IxBLx;21{J(bMN}zf_onAYtHoALi3G0X!cuRXCKE4f`)$=on1>~y+d86 zqPECc~TG3NmmA}0^Zs-xxoT(_$Q7T=lhG^LGiuYs`u<){< z{RU3>t$g0);1J#V4$YGMV;mGsNuqISB`L~%qS!h$fe|x&EIKGgp^Qtktuy*4N*?b5 z*b=3I;jA~+T0)2dVd0xl0q`+S=PrQaLSUlrs~G{o`~+pCgQM_xyF#K+Qr0W~EJmqJ zVQ$E$YGU$!<1TrvAgJS3?WS*7fMSxob`zX2%nv2er?i2NoQwlN;|o>qkxo)&-(>&y0*l+mrcjY7SnUHW1Z(k5~ll8fIBQxxH7gbbe9eNlm(7)Gl*N^RrI~_=*X!-Ois6KAq0`a)!p!{pSbZzP>MB z_{}Q=UcYW)JLT&b6za$znj+8L9y28PL&`Hw0}R`|EcO1c zK^3o$siSv(54_*e-Sl%)xc<%NKnu433Z%{xQ0k=&ES1ydN*XXb{982mvX82zd)a>O zkCzb*DQ&a2igmoQdn)|rKFp7`_+>048jnn#Uc8)pKQu|0Eg2pk0a~Ddni*nM-=IVw zk-!rSJC)1lxLxMOrx)xr2JA5E+zS^fSfQM3*;8$7W*tymT#>%u4M&s$;ptDpx4>fP zpzqT7(%=06It_HPolMciTqdpzgVPbl7w%MKc=P#i3*f_w=8l_`s5b~BU!MT&W-y4@ z`1}s#J3==adO!=x)eEu^Fgo+x4eB1at!a)`g$BIm^Ihm176>}?S%!43W8uK(fDsVW zN6eN(L1RY9eN&io%by5X#V^uLzM8`mWLjj<(Y{$xad;Bq6bWcfJ^A!i01>d>W;O;o zm#^L~#bNVF7^vXW5sGWk>!3U!!k7XO8t0^7f(&j33C=^vyI1b;)ykHat?%cuDYCG7 zJGvNOh^V-O>3TW3SUox)2#1C^Kg#>%DRZiZP>pc{x!bE7BxKJ84CEE55R)BM(MnRU z0H=8dzw7E$C%~`Wa{(_RHpIyjcn|h_J2u*CJB<;gq|bUsSx2lgs&BZz1?Oha^?xj0(b6{J7n3>gJ#R zoe`KvtOxPIwMoJVFGc`iOL;%YL1bCtsQ|Zm2D|I#mrlG1m80Cvf=c99@}UEO0gU@? z%D6vI9Tl!xzRVQ{o%N}(H+3+3@C@MUxDZDw6lLxm<`NV7{I&_(6-qn3w7j!d>S$Zf z?t6?(u484oc>MZIN(SpfYW6gqxbs1?jn$0!BKHNoml$C7o+`L!o$an`B#)%QFbmWn z!+#JxIWI04OJPk?pVGFnGTN|V1yWdU*B4N0Ub1tg@2?!pkR1G{QlZFq53%8RlBz&h zWhX&g!*7ty*U8p%clr_-&I@2gEVcs_5K@@=b}@?#BUN}3MeFBuo%yCA>~~O0oU7an zI!NqBM<5jzu*@+JjL4uy28zwkRTcCC-$f%p;(KzRUy$9HRG;hn@CcK$g`9aC$2wdB z9R+A4K|PoQ6fI`>w|U_9NAp+J=+^RjF_?JZzC~~4N1_-zEVB7g(J#OL>r(n2G^RQI zh-Ha{7YYx`pxs4#fhHJi_7DPmg94?fQ7@ZE?2EJ{bUtpS#^S8TZS%*J8FZev4xZ?q z**Aj3=ZcT3SAf5cjOOoa6M|{Y_FZx&XN^Rx>;c z4F1l2*-3R}9DV;0(!?ltUYY_i02{i@078C^YiOdV)pT*;gf0R~MPyuqq(a^-HQw4V zr)Dq)eHqbzqaHIMwoqRD+7U7%s~FAxn=Z7ltfd0t3d6 z$w9w39XcMbM*_HG^kzWA+Q**j=CpbF*rJ1#MAz}W=9-^$=a=K3vMLwL_^@N()O#5U znVF{WamrVyTkU&=L?`77I<9`cFUGht7mRb00Z~(c$-^&`8Yn>4kMJF77gs?17XBW0IiQlyi)Z-Q8PqU6|By{%QNYVLLtCbv@1f#UutuW>m zlj>fHI8)Q3hWkx5PYQZW#VWRLsrtx;CDlm%Yr>$2y9daU_!Xo*D>1?zgzQE@s&4QP zm6e$Ll`TpRw zgU$zcBV|hho-7*KmUT$bxx7L#x3Rw28`)`t+m9h&K5@|*>hdoyfet;%qaVz-2o0rw z419oo4mfBHzQ3Z`l*+>tfyl}A^z-?)4@RADnUmBnch`+A9{g#kmDH)ZcSiSpa5dh#mtoHg=v(6mkNqbmx$-(Rqeoc#Dn-$D{OMsPj1Ww)NfIb z^qPe&*G1#Wky1)tzDH+2_nq{Oz3PFab`3^9 zmr`{$`D({zC}ETO@O!o(>);2wOo^p?1Kdc~(N6X&W_#+)EgY}Nv`9TKkB+l*1QH=g zt;_?XGh5U2I1!}m)SRxjor3l0ye}?$K0+5`MIT6g8YkqE6qlc0hImC-@ju|vg@T5j#&mt;?3t~Chvi+F5EpOya(cVFr?d36| zh~>z4Hv(AVi4}H3gV4_qQm@lk=}C6?BH|;4p|_s}3K6%t2&ViIq}(F9W(j;hg{{hT ze%+g3zWcULCu6iNu79Ge%8hI9WxnBlRl|>JV%?QUayoCfKkL?!+bz=aB%FbP?-1_0 zCkIy{-SpELRle2OWmSAw+JE$c=+2naT9-JMpRA^mc1?~0-?#3qvgdl1o7;81-}+o( zGQBYE#oM-!PSlD0?{lM?Z<4YHqDv682-a#F{~INtZqJAjnddjYYVH^xf1E%)%!$9D znYlZrPuC1tW)fCIw`&xXXEma}ark2dClbFVoO9VdR?G*iD-eARtcLs3=L^u! z_Ym=#b@#TOb&XR_@N*$Yie?j)*zCkP4Ia8D1+uO+ky$L<>2_EB-O~1CP4G>(+3`tL zB4T7DW7hBHO-=Bh98iCGiQRsB{YAr69Cb0xW~ak_gS0QI*mZ=GPduu{X;e+}4epce zQ7fCA4x929vf!10HAvtrhCVbD2+}?ybilj1@wYUmf5yJHHo>)t%Kou9CRbuc@l}=G z9+Ms_dpX9Ki-Wod?W`T`X#6QJ)3dIR3YeIh3UC*z9_gdd&lfEpZQpv+fP*BcvS^#X zbP+p^=vcIW;7!`2)P%4|I}y9}d-s&c(_g?Mh~2>J&*m0{(Gep{_{DO0Eku&dpM7G| zEgL4;UsJD{+kWwF;m1pV(#SLTqz#jtr2ZDw`?o@MF>P;7t`BDnn_SOKaL#{)JqmXesaKr8gMPrHnmG&hyg?Xnmj;{>%bY=h!-Y{vctF={n6 z%g&bTdLe-}bfTQq?_ev2keB8&Lg{`xS(*30~#foiD# zN7Z)+Qu&8(=XBIDs)OvZx9pWNj$H~NTPQQKGZW{OWRHxHA|fk$mT|sOHVKh=WR)n{ z3cdH^`~AJ|@BN?W`8=O--`92B*Zn#DJOnnGwm7Tls8C!siq&jdSYL7zS$s|UtQp6v zT)r@u=tqnrb%jDZ_vJrspcL+YNVB)}v z5ASXzUZ;bBh%=BWS~mLLm^Rh@r(BeAd+vvW{P?Gzs{#~#R|&(%CI6``a8>?PCs$nj zXgs6**@GDx(UC#*w|#F~{;5FT`~OeCTNGn2rhVLVKQwOUkT?2SK&3v*n{EADx=~M< zzfE!%8#_95#VIbnF}@o4&|1j$;Vd<>hiOSONnJ1+?1geX+tZSV1&OftKICvb3&`nr zmfFG8Y1;hk7aeX@OZk)S&cjH?yVAaST|dm2|iHaR#NKQrf!55+(0E3AxbV@b@K ziBCk$kq3AUe4$i07{YrFTw5R0%2_ytSPa95#4EZ&zmS{&>EdYZr;EzX1hvclGrrb%BQ0(eH&mybI4Pr)3EV@(gARv5je_JOsP290OT_K`|CNjJlL?%}ZvnphMLO z#?WeFoK+E>!)E#B^{6{YQgt9dOTmZ)LC0NaPe0`qSsJQ+kXm$%ylgLK7pN%j|l;K0%+eg?74~RxS~zI~W`Vf7HDik*LAz`EZq3|Ff7@ zo;(r>8&93`!!Npk6Qu(d=H=ZMd=%HGFYk2#QHPl%7U5smq0Z3w7y!T03-6*UQ$ z93Qt2dQCC)y`4rjycxVab!bu@UM_f2!0@z^Tz*@_1&k@h?i{XY>HH_x>a-wqG*)13 z&1WDytoKTMK)rlL0DVrdWUGz@Od2iOKJA+qoD_%2(2`c*0l5MG$ODBPk~C*ZsZHS7|qPk-l3lpY;59KcQsNC7CJ*1a;NuLY&tg{ z#{d-HiZ}4487-oNxYmkyeqrU9Cj*RO(DmgiCes=)BSQWIz52!#zbmZZ+=LS2Hq?H zZ>F<|IR|exCjW8uQpX~%{Y8vA-Pa}7B=(=T!fPTF(9v|M%FkliXIK>PlZl;gykM*& z+HDhY@&T;eSx6V`jy0^+uz$`_TsB@avXp%(eJR(lm<|F~4g{HtLY=XsE1^S(G4`E$mmSccwU) z`Pk~MF0m=M$b;`cdDc(pw6dmb5W~GsiQ(2>4i_1;^De;u6PaQ2oy~cg4;jzS%tlwI z$&z?w^-S%(#;6c=k*P^3al4<|2ItX|JS3wj@yB|3GyH1fCv(uBN{eYfT_YJFAuk(n zAl<|ROJp0V+a$6qC$>0cV(BKMF+r2#!eYxVY#k(eR2WsUL0ka+;v_pQikUXm{70hj zLdCXfSe+mMjZ=I?YrBUsOhXzFc&W~0kRE%#l<>5hpL=Tf<*>!~+{}Wz83Q(h(#xpF zz4J%?M-v1&R7rn?3tPL&GBJ#jNZ30UTz3S?g8JxnCwPT1>geK|xxSyZ=B|9~WHV?; zQ>#X7F)}3+%h;7ryh+m?-?Y<(s;7~EwV?R$%eH7fOA^CjBS)Zr$=FFbUr=UIIZ~*! z)o9G$y<}U&_gvD9q0UJYx69Ua!=auzT4cW_H`#!6o%4vT*ddhGAwoWC*C@7Al@CII zhbo@;Mm_BfiiW#&m-crqY877_3B!A$hDB{!A9%*N<=~4V6})D$DOrcY3prrrJ_F|C z|Ko}~1`Cphg2VA>G~$8mtJ6aj_la?X$md|9F<<5Td)chTAN?bYuHf)k`$9=EyC%g? zwSF?<5K5MwMAx5pOsyK0;16VO$y&Wli+`GA{FiOsES(#?IYnt8h5VPT2)9TDQ8(c_ zDtBL@Uc+ZU{vc!MLf~vkhJj$zY3i=@=u7xzD0ebcV9#=T{7nfIfQMeN2`bkTO=re* zPV=HZ)77c?s2ZP~wTIM-5Sft+aEMrCju^o)xP$pB$O*urLd!n?fe88j%)c?L>$Sch zx`MaN%`Dg7EA6VB#eNd2P?pK#fnlBlUV{sbWiLS?Tj(gHf=ls5{MBPVs{v zW?OY)_%P}pRA5zK?Se#scNs-#rI^Pn^U=E*uVpVH2k9FUMT5z)$|y`IX13tT&_V9T zP`az4gSXtGg2qQ!AGNWfFEiHJ2IOw99~Gb=W2KnA<29$dpEs7TkxvN+;1V3BTOEd? z@I?ls$ogE#>TyRHc)=3Y`f=(GZUA73dns8*d}&inza$GUNRGDYx~g8&M3-C*8RmZ+ zs3qx-gcK$uBoShC+aVcTBNi#q{50)#%KUn6)|p_v?DnX|bdu{~u)?xk^gkv6s9!QP0PT)yb-Kzw&J4GuD+Z0K+6)=5&J) zwAM{)JHzkch_Ng#>1R%kp7+so(Z2-n>EWT%%HOWcWk8BJO9=9T+Z-f+V9u3HOPWq+ zHhva}xptWOOhPucteN=N(<)|9_~Nk`#`d0$1f8P77{5bsu)6_`wvE^DdjXE1t#i9x zKUlV^6^{|SPoDnf=*R|PO+{Fw3ARUavaUc_QDXh#~- z!LcCjk0f&BY+m)II|F-@&eXz!bB_F()01{J+(CSsbW=qkp#9Tj!AE9BJ1X)G>co9i z(U8v@JKCJVr743!EE(EB1lOils3Fe|(O#P)K2umu#GxuKhef0%`R;A4#`S#rao9oP zTpnw*N^)A#{&PHJntNvps`G_Wm>1fxC9YodVgvQz1yQ7M;ALy(Li=mOZ|hV@aBKcN zma%_NfQ;-YMTXH+F=2j2`TX}+=?OH1cZrm9;{}$@+*}O+1?doC^gKBN5>_^4Qo->| z)~V5O6oqpvkAp5 zQ!`%6juK~sdn*5%R@9l0V&{y$Y^~6Ion&CMuybes@hBvCT&X6}LmDMY2Y9V>XziQ0 zpIIdfYwvnzQ2njrIm6M7hcKteTF~1=wx;*#;An+kruLsL4A}LbHisentUmWW=9UWa z3c+Rmu{K9$$I3C+V3V-B!;wPMZ1KWRQnHYc0E-}wodmVu52SeO^&o{p(ten(9z+ zOaeMS+@|_=LIrRR0fjtc?k}8;ishW&&-lCrZXS_w{#%6jnw#GJw7r$X1Aw_@X(UoF4{!?4Ia5dcIpvZN$9kt9vYt5MyovZIe{^Uqtb^D} zyG|7I#!Q6+FJJ@=!bmXpAHxB(BFM|^c>S|}W9!HTJ6qbXr{D^4aY|yk$GgXH5ya=L~YV1mKZ@6=jll& z4y4ioHjiJeSU;yt!=aqJ(aq=^D24hWmC?W>wm(`l(U(u!xUth&JWZ{}i-~D{DzzIt zVR9J$4U!4FUp0UKL97oLhCU3MiXSe&A?1tSYRXWQq8qJw7w;Q@Zg17dx7C=Zec=ul}L*IskbSRJ7P z7MY;D?0&5Giwp`QKnO9+d|Fw^B-rr|Pb|Z;k=1&j{_=+@ZZrP>wBjULHPgfK>U-xu z={vQbgQJ^r?z#iJ=uxyPxxykVCpWK}j{ieEgU#THVKSI~6_|awDlQ3-Bm@av3uUxz zg49;(m?fog-oM**id>o02owmkO3Sc%FQl1geYBNbuxxjo%S=9vUX|Ov9gqw^q@I}< zpR!9Jj~hZB?{DgJ%1qVFb8pa#*#umV%g0Y8wm3QpQ2fMVXhy0c>w{Hs{t&8TLE~-n z)X{tZih}k*_AY330TS0jUK2FRd0y$Kv?XDMCj21g4rx4VKQ;@a^dykeQO*_(M8R3o}+ko>b<%ib$+-!31bN*Y8q3; zh;sr7%z&g3=RF#k&V|QO6Ce;>(vk2hw#70xek*0K>X^)#&}79b06BBJW-iFP-+-jk zLj(?jRMDPSM;amPloc1k$$;6k$-8Rkz(=TNeR#qRB5~P>cFi}`eKoaJ5(LiF;M3TU z7dRjNXX9DtdLUR~Gm!w9ZE}Jap(DD;EZh{)yi0@QP_=P&eUQWm5egL+J%efvsnt+& zUebWUzC|}RH)vC?7jMIP4ZmdLT(4J>YQZwxv;^OX8M%mW-t=`owg`d%1r>U5_au_L z_1=1fo%(QIoacnQyEu>eTXl|L!T?CE$>wjL!hc0jd_8U`))GvsX6p{D>NSLKGk25_ z49S+J&dlexxbizw^u*nzs5|MiU7wWvj$XWt;o&Uvq*t~|w3)%8?Xt}k*GDVANzWnl z#u3>C#hckR@O?a2^7wcf^%dteR5{XdNc3t->{sf))h=aWV{Fuv{-UN834}J(sPBU> zz6PI2+>0i}tHJlTKp3)P$^elR^v&-nM%oE+api*%ufJbb4gpc@+|hCRll>g=C}6fG zmvoOSn;YUf1gYusg;v`~&G|Its&*(?$+Rd}qB;_KG;<+~m1?d{kW*LBYln+|_^8OC z9FQaNThoh1+%(GqXvWsvJ*x9FRl(=<1hOD_3^@_o0rKI_4_zVG!jfN=?Y)|C+#db3gbkk*k==< z!p4Ne@?{{r<#*}yRUj@f)_H3rZ<}h%WIWj7CubY85}Vjl^M`<$N_5ct6B@C?Kq#m9 z8gOJoz3(8J|A1*6X?&0mNG=oudA!r!TU!kZIm9yJ^AxL{jmi1{iRPlu=;J@j1cM+3 zN@ifwKP)nzr4XJ`RtdYo*X{Ho$b~eh?A4&12QKlPkNk|8%$SauW2hDs``}i4 z=4S2j<~fj1&kC+A2iQyIc0W7{F!CZoC%nuHftk8RbvkbB1?+8nDW$>?vNY>C}q-1K9zX>J=AV7<2{6s|%+ zCyac4aJGwlAnM`Nsu+qB3;=!|Q_9XKkdZ;5>57G@%)4lHk~xlJ0`)kLOCYSIkoRT^Nn*YOZL5 z)bzPK|9~f!J%@qI-a1(b1LsC{>4_!Hy=065BmjZTMCn#yqt=`IUj9Ih%@NFM@O93G z4YG0?gQ-;Y1qMB#_S@x24sAyr zIO5WhuFs3E5YBp>yjd`p3js3z^Z_q8C9{e|aLkAXB&j!5=ndEe7{W2(dRqp9>U&SZ zB!M%)|BfOHyo4|7apL}`rAK9!fPQg#_QtW!ne$A`bUk4TmE;W3TV2D0ULWD$l|97oi>k>LD~{eX=deNIX`VG%YaIR< z`F|4oSee7sj^n7lrXikVPY?U%Q~KbD#wcaemYMPzXRMHI12G!dK$4bNO!M(WIO1{S z7(b`a2b7dS`(Z7i1ZwJnC#*s#t8SlY*RyiR^vs?{VcHi3A5<*e=1YVyOmqXYPCo64 z=DMX%X9kagp0mv)S<*qlM0SVDQt9Q@yri9_YZsj#%aBcc8~7xZTD>aBMD~Vqyz%w> z5~Ca|43mh*>)*QGLk*HS$1rTL(V*6+w>gGqfs=TVtn|0~@21yMnn+N;rq#S_i3IpV zge!#7A_}_=^$KJPUJ_4t3BP;!P#OsWpcI1A%X#Qa9sn{u1&62mrVEz_@e^uiQFr9E z{jPtx54U}}*F*C_4Y6t6gAoW1&Xf-brBW+jec*=~(X*MTpcEWbdx9_n90PiLy#sYl z12)DBq7PM9Zr%u8J>e8OiKsTVh)8C241h~M#Knp;2V*GQv1PAt0R*k4+Hig48dnaX zFnkxHJq8@6V?*g;{}tPX5v?jttjZVaN$dhcXq?9_0boC_^v}=WksmL;>A$EdaScf=}&P2kLc8&_soH zB1QOVvNj6z>e}q5+}&m$%NG6XXiF$p#8I&%V6h$*INZd}>x&a)P?yxP*_syJX=_{V zmrZyG)G09A^jvogT%R!c95q~|HC9BqcFO!tVGIBbM5ZVZ(Q2geh#>YL6upm;US4kUO^x$2ujwgI=a)P(ff>Jj%slX=IbxFjA38Q$ck34QghT0Jx# zg@>B!GDf`yS;-z!U1gl>@EeMhMqc0GTXxNt4Gi7VtgGD&+r6Ld4UAeZkX4qVviCI76*#{k1xLg>nAiwEa8NnXH5uSPU^ z>%f^J&YWM&u}?%6(V3H@-l!)^6C{zX1G{^u@$5WWj3Y8anDT9KtVK=yFsr38B6#-u@;vQ;2SL6|Ma`Eq( z{S-dc0dmN@XO*Vnz8!Ck15}5UBNR(AAeX*H%NF$io6n9N>!3T&lI_ZMf^3G|&FZZa z`-~XsP&81M!p>Av3aArG0^QJaXcRm~#!bF{t=Rh|JuCBhl{nFrw!K%YDu;9KYCgVk zVgS8^Moh(ciiJSM|$x6ao#pL znBrOb?ti{>5P}s)9T|lK$P-p*35F;rPbYraZ7}>90BYtuMX`mkVDO2Y{ce{4E=>+a zWSg`lS8~}j5MjL~{W7Nfj~>bf+qrJ#Dd+1|dbdw|=`gJ1yF0kJ)DO6?%HWnTPJ~6S zxbb#JGDgUsFF<+EJgMTvg2pn@eAs#^TWrT$N<(8sPPKkayjHL5XekYbq0Ulg@thLr zU&75KC+DKWu3Rj@4=VoO)ZK@_ufJsfeqafkKFDbQ!wD`J!yMYpodp!lg)T9^gw;E9-QXjJ zNivp7;g50RpbvG{!5sWPI^jcQ)&Y0x*8-+B;IJ0i@>`$;OOgsC<5gHm9{08c_zIr* zQogPFyUdA?jwGx|UdSo}$vyyXin@2fKLW&vo}!}G(qT`2gd;PcOnOBU2ej+!9sW_1 zg(?phu{K&CKLSOSUld>849UEsR*Ti^YG@4J1gXncVzjLxh5t)Gx;KBN1C%1Iv6yAR z;dOYE37awE7v?Z)^3b^dH53nne~G(&y}oGpR}@qeS1aY79Jmo$$b;+h%vZgJyuuM` zWpk1ca=u7Cuc-G1vMBNSFsl)fLMRRyu9q}y% zx!3+W&R_N?7)YQ5RxdAK;c4VE=xT>Z2<4zcMdY@~!rnB(iKzq)+|c|JO=c>;Sm}nhoNtVZ)sa| z1k&E@6b!TG<~n|`JjA6%R3*6#MA`p(p&Iqd4uQ9y0EPJbX?Q~M94`JE|BGB8$={>_3pVWoHPZ-BQuvN^&nY-S3DrdiF#EtkUopXCoM za)wW=kY zPtLhc_vCj!cJC3rIJP4%0g9g*qPJFY>vP0Q5MrF^BN{A!h}ct)Unf_5-DnkfWSeZJ z%pvDBGj^ovwf1TnqsZZpy@J^VMb^RN#O1YBOzZs2yA&O@Tx?kk z?4O#OltTCLd*luUa}S#(q8vMxrwl$9OoJh4r>7}p>7;G}c%7%%l}ofJ)HlQkdbcKnnQWNi)~CLN z1wax$$DU;4*(Qy}8lyKaCh)!VOoSq#xWPvq_=QVTq~H~$l1Ta<(7)eh9ktBv7v?qlGs+eW*-5WrU8WVK_K$?|RU{p0~ytvKacV^PcYtvWqQuG^ZTAw)8&RU2DEFg zmxOaabhR%f*?7^SEfL>o7fX@{lqZRgE|OO`YwMrW?UBF6krnb{*Hby?uH}cM(D)mK zQeCkWsG%7K8S5_GCOoVeGV@LPE+4YrXW~EG_02Wo)o+uNfwNPf3l#Zxg4c)@k!5Dy zKX(RJZIfu;0Y0x};(ceg%wVYS{EBza$2ToBrS<8mSxlW<(tBd0wR7jyr}OzDW6D31 z7^fnXxCG`2x72YI<5fUyR(B%W-oep(jtUYA4P1f5@D|;hhDcg7u|7;x?s}9#dBLp> z4a%EwOl4Sc{e$yw_$u^*Du*eFAmT&YcWV3Zp0&^FgFv2W#HvTwRj4;oT-89*=RG*j z6rV_-eIxZ8Gx`LrBcL7q+7D-*%nYFsSHYDQdoyM}<&*YNl?c8v#T_F5fniwUsq@FG zzrSl+JeRw40o#t3m01c=t-Y-b0MvH``F4*Px^t);3d^%Ort>jkOs;Y@tU@anbAHpT zsRSwCpyuVerAGegQ%YATe)z*ziZN}{{wp6zyJ`vT{S-a8OqLK^ao!B*xWLMM=jkpc zxXL6HlpA896%xL%*KmD$<i29{6!mB}?{E=Mu$HLfw87(WJ+J<;WjG>z`yOZ0 zonnjqef>N4v?6w!{{~!MJolSUDfTe7ZvNH;*)kbaYT*ZJh~ey_Xb4b39vx=9l0}{D z2+j%``M_N&KKhV~3{c3}(6vni9Im3&OXq}dslm7nssg-yx<$gEA+5Pr! zlx)Lte}p&!fsBHVRu29Wz<=#VEq8x>=|T;wQ4{_QqI0P?Bgk6$xXGQtZPA!+aX%Cc z-xk)s9q_^TDrp1vIv={O4xdO>SLT%VRTA~s$uLMWe{cKJ@;GdJg~fN!wTwzY1hp}E*O>SMCov`jL$0Ud)7^c>}Qb$hnrbMEuB1}Q6C z-1_HV0Sl?87tbG!?c0)?r}IS3pqu1{v)Cs3K(rzl@KrTQd7z9 ztCk%a@vE}fUrU^2H!C|U@)>j1iu6ooDEntgBGhpF{VndUj7c=^PrMOX9aT{ZX$y5C z{UZ+vJAhtpo%&XwswM|wCc++{^W5TuNE1Y zN^X?gAX19B6cQW0B4kPeLtmS;%FUH}@DMyf2m2rQcB(5-G^Fop88d`yf9rIuYc?(k z%~P`|vHwly_SkY(df9!`l}m%e08gWwz&Fcb^8)ugeS86=MWoLC3tZYV8_GAn2&Xk- z%SwqRFB!YOTzs(kibJL=l4yOrs`E`-;75Z^Z+DrFN$!Hv7n4%phcGQD5Z>0MJ5$+d z9HVNFZO?*;&)k|PfL=P|G4RDpP|T((?&+Ao6Q6;cy9N>}H)U&qxUSufTmi`26&B_dSxJZ()UY4J-Lhs`wk+RL| z&nUReHTtPwn?k!2yJ33zSvqXp4Yz`ByXp*dYxl?oXfK^dk2T&rw%PcaExRch2q(WeT^i1kH11 z0g_?jw7Y4Cw)_#mRRyK!TiM8o7BF&O>It~I}>b!PCFGHQaPMES1$;THvs5FOa{yfZ;S#6gml~)vU zVSh|HE>Gcz?pHTLW#Qgxk6E$m*B{1w=g?dThLPqxN|u8LbD&`04MzkzSSgN(mWKSp zlZ{kROgOUG;)b|+FG3>&;Ak3DoYWR9*qa zNf6&Woxg4V>=ug}#~lY*lWk%W4+V!%it(mmgU($AHrDqycE=dEq$8}{6bHWj-T9;c zZJE6r#NP!Q!cF=efF1@%wQxgFqS1%xq@@QdwIz?oLZr~IfNdrZ{lq-!q>!L-QFnHz zI%(_6r_GSxekdQ;cCLxpkPR2N=pS&y(f3^EMvgsFfti!=RQ03!`V&vR?d!)%cN+B% zYWE8j66%joWEd|d3)hX=UWwvoTeQ7t8GH8Es0SLgiTx0FVf&kSt(NUbK0{yQ1kc9P z!)8VOaYio(`Zv3~MUtvH1kdJz@v_0I=X3Y3RD-L!VfYxB&T`M0D>CShf>|LA{zHWa ziqY1$c9+qvb+D!zZcvP8gBA*E;) zZa_Q{l()=tO}t6R^|Iaami|6I{A%?_Ih)~IUS0GT+We=c9M(^JY1=)oGglc}^x=0e z`|OVJ?$w5{RbDd6f2f9b(-%I;u(N&fYy5n6I<%z`|E|9Hb{0$KNQqc)7<)-=QBoFS zd(~_#ERWiJ?HuoYT-VZ~!0?D=Y5j?ZT9d!C$^EL05l0OXMb5vzNlW&6=svF0hhql+MWnZKcPzeQN<6F|U; z?ax-@8Fc#V-*fe~Ph1({me@u6D?VQmvEe49DaPceABVVLxBlmKre%f#3l;zNT*J+G z{}Sx=6A!dZJEEe<2w1YcD@!MUV%>QAgbo$Bv)QZk2jYp_;}a}_v-UDmCnx}OM+OB{ zxH0Dhf7!U9{J(6k@h^yZMhb=TJWpvJ?oBru%a07Ys28Ina1AJvZjof>% zY*Q%^QG1=moTH#2B9Is8mocjOPVEB_XwJ^E3PU?qJv3_}zMCAXpn`Ko!Ii^JId{Wc z_Uh8}n{$SPx=uj`WP}XZIO5@)x#FyCJB;65Kc?xWNsPZPNHWSf zB`4n5|MB55%vm#F5&X4mK3W{QP?1R+|NF)v^-_5zma{kY0Lu;Y*y9A&m2~t8qpGz?^z?YH`Y0N+u4({H+^mb-< zvxrHi;sGu*J?BfrwQ-sn4cjXeW0=8w3SIXraRCX)Xgyq>j`+i97ul?Rmh?B;8Ieuk zAi!cVjaqG3YO^?qyTy9#E|5- zkl<#Kvt2Z$)>ji1*qyJx)6|kq(Gdzj>Tw0vWz?QXA05JR@S=i>E>dvws}P@|0_#M( zuw#|3OIL&&2Y(tyZG%$)CV^3P=J5VFV3fHO39hM!(*ff%p%EqJ`=}_WQKb3i1LO}_WiB*ZW$9QAZ$NC z6f|t2A*T&LStHNFtR&w;rQws20#U`i`>TWBV|)u~eN;HUADq0Rfx~8{N9oHliLgiF z1_l~S&nX{ySxXpL?8|3=S`&G}I!VA@U=j|4r_f|d4q#MKfx|Q!^ivKmF6FX9;eI~OJ~~~QHYd%X)x{-EovGS z=*C|Ag1s@YTjV)bD-11L)bRsRt%D#hv({knU&-?XvG&`1*K8l3w5d(x1&1$h0A4SC7g9tzu&Q84Z2!=8& zQW?K_WPTlEXHI%Q>Qx}LYWLc-(>omZ(tL6*O)hP$jk8BXMvBMd-MB%T(YufDn7{a5 zl{n$*J>_f~*py9B;V;pf&%e}Wlsr*ZVV=4=X{4hqegw9@txXPH#T|%W^y8|1x0`)e-~Z~18=HG2_QoJU+alw8VxCpN zP3tI;MOEGmPH{t@=)YrTyuJBns-ZaL;Iw|?72rp{ zs?HnYE75MIro9Kc`Y%I)fBRso6ZY~;Qr$I87dG4J6KW1X~k00*2 zD`;VBwL6b~;`6d2nL%TkmeQ8IPdYB>?BsNTZSoT-ew5F;hu=@;f{R?-KA)J=WWtoS zmEiARrl4@0)r0-IP|XY-0;*Ys;(8?(M5jzpE_Epsay9={ZHSL99E>?%Kan(*#n-~m zi4|RWiD>$2dj2)vCu<9vDU^+$D)XSf&@-{Gbow#EZ-rTjZK@G zCmEmh+(Dlqhq8gHSWfU6!U!I(L%#jqb|CDzIs7h;I43E=W!`&>*Gi0TYoKN4d>~}l z8)J0dw+T1rlsgys>sd)Hzx$Kti4cGN|8F?gOO2vwr8m@mG z74f2QFh25j1<*Vsy>)&uLy2(b)748>qJG5#{7cSql_lR=S2A(9|XzyG|&vrhzloHR*6+#|Tbwf)C1|tifH) zfU>deeTfz+Wn5qKuPov5F#wFp^=+@vqiN|scwhCeb9pN8MB;*&sbF8APRtLM4 zX0D#7QY8;!a|%R$TbBQIqT~0!+ zZ$nDDb?P|Xm{MBm-Dj5dF7`%}551_t3SkNamioToDO3T-eCf&+1s=D*%g0u2bhr-+ ziNFy*%Sr0$qEX~$p&)n=YMznpd}D=q@%?N)%pEd1b#6B$ga)K)zd}91wb<9}=Rx=K z1*qLoqMXYeUxx56c}Hi@w{4zydAK#Tl(F%Q1bAO`F^C$&hu*&ab^|L2w;r(ORy&G& z-Tz1agxidJ_&+_F9$y0!5{^0lCWB=fLF}Y=YEM+Faj4?ug}5%XV`#TkMKpHb_%Scj zslUC{LXHAsA1hrvOEA6qiRO!nlN)Qb>DlhmXSc3g$~32;WxOv&2vuwLH^w;_uL`)n z^*Dvc9YKc4A*;bDZcH#rq_Srg{urIyKh&mN{9~}tC<1n}hXD!cSebY1xGoxgfvhp; z&9jlTlY&_$50<{Z<1+}$a6-vDmnZaGR>Okv1$_%a7q02|B00og? zyK`zy0Vo*_w)*m#@djdq|5d`d>f3)xnTBqBd%^$4;Rp96Hd%F=?ZtI`hT3ycNJcn&un8G13xwP1`AQh!`nrgNN_jO(o?3JeX#WfqO7WoILYuWPyYH41 zqj}w>l{*u^21Y)!XS{Pk-=GvfePGy$HzI2~W}lq-mL}ZVhFkb!Yn(ml+0iF~(DPGC z8%5C3YiB?Wbl&xPVD1_g3ZnY;>ecDeEy;6|X8ir&z^=U+Z@f-f_ve`wcbL9?OT$*d8gJ$pUyGQ&Q}Ze=wx_e#w($4GU)v8D6bC{deC-l zi50d@AFp0FcXK=pxjXtd1>>|LkUL2FY|A(i@I`!RwvdIU7{s)zNG%&_ zq>-?*iZZ4BR8Pn3^6yLX_wQbALWL_@B2i=7tU-%ZM^x*6kg}4zvbt{^&|GqUdgd(A zl8k{{qINlAya@^MwXXThsO$xvMi&iUT8t?r(&A~NWLFi(i)D?b+m1}fgmm0|@cx|+ z!1J&D39sqyC#B+>$~}I%_(Ys;oz$f2F8S3ZHz-XGr>iUu#_$PUW#PvVpiHb0zCS_V>`Q0RhRwqzg+XS-@rh>`$W5(1VGZ$cB7!&gpz|h<=Ev#q-@{c#tHPiJ>lpO6})E z5ilK>E}pUOQ{T44FrZH4C!e@zOE+>iw70UHBHeHO+M?9wu2P*fmS^Fvg2WIF3t6gU z$!q`B*s9{b4XYGyZ?S!Q6jJbf>mb^Aw7m67GgWnzA69!o{Lyl4H8XQ1fLSonKelOF zT)f>2715Oz(8{xs7SyvViS%BeZy^GC4@)Tgf&{r+@vHbq)e}sxEaQC>E|*wkQ~4o> zB>QxIJtJ2-HxrfGpYTimUFC3F4AL5$;_qwEj`-jqUGK6f9%hBuR3QjqR(3;>l^}rn zeYCqLN;vx#Cl!GXN%&{+{(lyNte&AR{u8BD zGki(!uMN^=av7U1y4t?stj>o#7cmU@?g^d-z1AY-sy2nP4HuFY>*DNvw8Z`G8}R%3 zp0*;&Leb6GGl$r0M5U?HW-H@rwz~SJoVEASr*38tUln>cwYT-@&agbgyYdqM;w)s2 zCQDoc*(r`)JkJ`P)B5-I>pMMe-?nQ$8}qD&{K)R{u$~`>&&XIC)U32>gC?> za;+e>xMeov3IG{XTwHrYME}weYqC(cqhw8jpAU>?{N!#X-d9gn?2;kk@X^|#wvkRg zbT@6PG4vr0(n2E5rch{Iu$N9rnyucXj{E_N8@80V#gJU2<+byTxs>q_XS_4n{#LGe z(i$gl?z$UsmUm=@*?Bq~Td!LvFUMbVZas0340dfmB$<=EDS)gw=vRp0I)SzQVj4bp z{JZHL>(1V`h;b3{71+Kr_mh+8|7-~f;fWk}NJ@*;OKUOwQBe$8Y)Qn`Hs2#i1AmG5q30~w zOv0g4BFHww4lc8k>2UKeuS%E|I`cQy-_u z;ch#WN_rk?@xykm3O&?42~Hw=N$|fTj){13NCFjj-2~A1y7SjNcpa0`|2C$F1Py}4 zN8CgM8up^;KS=!s>)t3>JQQjUx?Jw-G?x3_aM32W+9|uXdFuK`RZ{lrZ98aKGAniS z2O|SVC7f)#PftDqRDf#}%)Pu*Go-<5ZitFovYAUVWNHw%B1wXk3E%|uK82tBYUtgo z@cL*;3WI4cdWoT147%K*HP2EqBI|hB-~~z z$9Z2}WYR@^^@1WAiq{0e?5b>I`MFGxQoHnW%ea!@+ga%nY@Nz0j8sBBr)7Ui!SU>u zAy?QOZ`&1`(Vd$@`serj;LZpI6k`u{dj^Z9`+M7WN|+a_0DG9z0>*SJ0gGGcO-3WPJ|LyU+~J`zDu%hUXdU z2|mg_qj73l^?f+L$>q1oVc~)3@Rd%x1AB|9g_|TDgsi!gze(1l zVQ-$X8qHW|6IkBO+E->gBNobZ2wE46lx~76kD5!CnMeH9I6pWzWCU)g>Zp^x$+6_- zKGY3U@BY|W%650da7Yv(RyHM>{<#1zW&xvJoQ&nBxnRnnb-J7ou|(@D^$m~Mm2U8R z|4AL3Z)LF{9d#+x5Zk+?%3+yRe+T!>2-Kh@{;c!CEa%g1jRlL?_ObMs$f<9{=mVB~ z0rW3({j-+?%;$j>*QorqJ(`;NB|7*Iw7iav`z`T-*KST+WV)Krm1pvle1mc=p?+#;V6qMU|+X^>^Eey-C1g zC*J)TQE_1~l~J0ejrU4vire2gHxNl5BV7{EYum70Ij++bou^7x^@9%nph^aWe z*;4l;>nwsxHFci1TIA~>KFJhO3O%^}XviH-f#fiP-!B>>l z=xJj2E)fcQc`hkeEhb+5i}LM09z`{>>$TGT10H^Yw7_FhF+y}P{ zaguy=9~9htPre?PDmIpik(yaK>2!(EHJ>EOo-ShXo-H}>_MOJ{f?lsjd*ru;C;pf} z-4oV6=E}XYH%g3l#k?$r*QjcHbG$dzZK@0>D0ESIss zg-j2(>>)Q6Ae~w3Jli}An1;YIHQ&>Vw~-+jlfN%w&7Crmy9^*c-W&p`!S{~O5a#O2 zYb*;R6%Z|1{0wqag8V4786P~8^qYK6vOJXL5mfI!^(z-^{>g9hdenmcad#uN#rd*N zr6AIWeKL@FBe6#|>(&-xUTu=)gJ{lt+Hp@~JtZapisAFiLw3 zT!_PDN!Si73EzsRZQNIR+Y7ybj$T+mRPI4~dd+C^&#bq{RMVRYK2fFdD1y`O=4$q( z;sLSelF*C6Ovk9#S19gK?yfHL6?|tZbDYbkM=>+h=_oQ-ymxd68+vLue&6{)zDnMv zr|$g+L+kq@K9ab@VFfFnv`O>t=xN&*UKS>w*un4!5&!0?zZ+pEoP`B?Ts{6O+_0;A z;`N|;t#UwPKa?j~pd-?Ged6Gt>yC%TIwCE}>0oU0 z$UJO&Yz-=uWpK>syM=%u|!pDvUb&ft5a9*Zw@G$2uK2&~K+xbsE5_+M433PBIe z2YO9D#usIaaGzxvo%CDAHW;lSD24~wn6aZpR%TP1KYbeHbA0C_!o=^`*6Yysu0-*bJf3vF?Im zE#Nz0!cnu1uE_WFJ8-W>f*#8qOByPb7uKXNQimlgC6)Kc^-(BM#L1j{?2>63W9l`i zO!e43i|p+cf32;A>zqXFU=g)0U z?85A;Jv1NIvRq9&oY=k170NF>3=5l8VLPl%-ShlTbN0DysMC2WJ(RV6U$iGvjV11S zj*GX$Rcx;TM8LIr+$we!Hq)UfxYf$DrbuN+mceOh>G6V}RBc?$lkh_OD+jSf{}mp* zF6qR-=RQDPEC4N#9>co!<8CrV!^B9*x5%57ulvC&3cm;i8E-tA8;MqTWYqMyue4+S zRn9hwJePUU)Qkd~bDhgw!pBTFg21?|!napl;pwsuE#0ey>FrMtq*IhvCV*4WUAm7$ z`mm?D8nif62+1IEeEz0E!S?P49|cm`qPmkh*7ib{-`IsPmSU`(rrZ~Jw1!D!<>8k3 z90W^4;}Z2(Tcc>#lTAO58n5~@SG`&nB%3Bm0HI0`wm7~y-o>* z3nhj$*v#JKkKaD>r55|uFf%c(u_&D8iApuQ@Xo5hTdy@KHp=1?>_c_ z@c0vQ4C`LA?zOIU#d)5Wm8v_%>wGZ7HeMlEw%FVycjy~f(rhpihBN@Qqa5tQew!ZP za>*KBb+P~m-^NSz3`U$Sxr?M_WjP~=RB>K1lxj?uv*z%h3A+E8APMf=QD(Zv+O8WC{@=%nJ)MLtW~&jQ>cH{Ixdfy0l!$aH1Fq7X;00F%`I zB8xVgvF2el|Ei$-kDwSYolNYVCD4iy$R>QRM(5RlB=j*8Uj1W{#o6l)E>6Mw?hN++ z0o~!x1|)uG&{w3cI62Gvyaca>ey2zevK665O6k_tA%xr>w~}@ zUyGOZ`8PGu8;5Eq2@Qd;xhW1D_ZJZ-Dr%b104^z7^t1G+Obi((aP$N7CxQfGQ;0hW z{zl_k@p@dgAB*OtnyXa!*(kmxhT`pi82-r+nAI?Kf(-o(K}v{PmB!rAis9L!7n|JX z$O(NR0`(|dSVY+g6r;g;UFl)4SpSkR)!9CLNqoZ7L2S7MWP{wHwnm!tXCB?O#8HG7 zpTJp5}})XJKfpoO>i(Tb+ES zAKrk*cI)WU-P6MjY3YUuyq3-bm-n|ig@IxS7_dY^X|4lpO@swdk6+(gCJ~1U+it~( zH1wHMizg*>nV`Ak`ZQGC{Y9_f1) zm^7RSj)woW-Z?;c<@3fyiM|*0c>dYjPDMhR$_@aMkys?8un`V;O^=ulS6iooBTO;9 z0U++iA-xAlvlT@r$csFfOc+w>Z8c^G!dJT+*~wG7@7Z0=U${1i^gz94VBB z6)7WBiL4TS2*O570lAh=lW(^;f>cnoeO?1aK&}@^!@<0FaNHEVqWH91+%jY*;RMhv zUp{TVj}-F2!a6WdpHFzMKYszr?{``E+TOG(nxgOZbmyNi0C`m8#yXEeLtbHZQZ-9y z)Z7!u1LOqFin~~vbdu{q|0Io1)SuyV{#$h#DXfU7D$xRAgxrJR;-W?%iNI>&fbwoK ztv>^E&x*GOvl@+p_!G(AuiuAV{h~Bk0Beb??W4W7HZ%ZLTz+Mc;Y(L6SUQ#}f5C?i zdkv-1^t19>dm#2%UzxXRrxzO$*(XdY<7u#uw!sQnPhOB{;Sj%;C0Uh02{22j;4(! zui?kWk=#RvN%ZV>8UZM$Qxc&83?Hx;ckY6sl)%NE<-qAn^GfI|aeau$%s5TK^lc=% z(iG%5v&pp{ZM1LY`LDeLSJ)r4=5q)+@b{>6&>-_~0^rF;&YdHD0!W7*&VvLXbPNt* zC}1YLk8U2Oq{aBWBlEiG_Ktv>+w2%WOI z*AxROy1?+{lmZz@0ahoAC`~93=-^SmDWL%hYQ>9(_r~}6Gko&BfALApmGUDYmv)Nq z7fHqNSwNE&XTMZdB8j~H72^o&kHXP0K4EY-8uA-;0dPMuxG+uvE1FgQbU%_Mosmh% zPXIWWfseiK*42Q`1U#coW^zqOzTZcnI*v&5&2T6)7>g7YgEXi_-SZtBi|v23WnBoL z4wrpWxgT=*5sa11woqpMdnKs8fX_81Y7H(M!eg}g&va7(GGzV5 zxB0k7qh7Krf@{wCG9YYk#vJ9EiDdfH0re&gWE&@gtGy_Ri3F>4=Kv8!)i8Q>g9P2y z8iFi2>{6FUZ9k-l0CEdnM$3}&SX8{I=-t#tG)BpW6d#-AK%x;$5P1$sAfEgS6Cv`~ zMDN}!$og0NRAJ%Z*3@vCG~z-m{Qfh82-n3lOZs!-W( z#?J0oDef3YiqW8e>E*?Kp{*gor{Q)T1&T>PjfTFhO=->)bFZLh>xA(<7Gjh}3aImJ zQr)O8+0z@;0S|j3n0*CJp8d2V?obwKCSviBSgIpz1{bEgw9(P+cLyrzPy5RB#9Nnk-G+I6gJ9R&(G+GN@f2?$UcD?=w94>=7+b7Snr2OoGwk!^i zL}6^0nS50e2hf$Ad+-r~^&vT*7{K~4mQAejc$AP#3|!%HB-*2@SOuN=(H{{-Qo^%8 zMF{k@0pTcXJ1!Q}w-UfB9bde1j3J|yK^phiTCy`-Dq!51iJuM+GIIWG&In`wtOwUDVSQH51A77l~zm7{W=^ zb21kW#z0>Z=@1!y=9D+=PoZOk{&WP=#4AwwOf)7Cs8?Zy6Cou=02**L}ymicFK9K}? zFCeD?pNoF>Jt#63ye8w0Oo$Fo$bc{qNy%s}@^f}O*}}N#s-Kn_`WsJ$RepoM!B1NVMLM}kxeb8^y#bT0F0gvOvvNXe5GwSTelL6R@UVo;EbC{ges39oxEM>qJ7|^N0HP6i z#2gH+jtUs!DoTUyDFP2ii#KPIOJEOf@T`^XEVH4uNDD&eT!I#DWqk zjW1F%;M^wP%JAJ9d2mwLC*YFtF2Fw-+?Ebbn|ONffl_;LwU5{fcYK98kbqm3_VW7! zOZ^B3HK>+k5UFO62{jrYn}^QUO@5eD*@_fhfr8dn{8rfsWm`aphihi{h$}|~Xh4Ir zIau6i0}6KitR9alqb4Vw#O)vD3t*^22iLLPhMCEw+VQuoa z!gmA0CE;VHK}d@Ai@p#^ko`2?jp6WEZsyJFS)wy0huwTzIX}yRRr^{G(eM`j8qFf^ zazFkA@&tSsdj-7_1+wzE5Un28xKJ_SUvxlPguefqW~Rr+aS)OO>fZsG<*%Ju+6xi7 z2cx#h4Kv|%eGWt$J%{@;CV>+41^dezvZdvfU*#L<054#RbP16|(n2=4QwD@-AN&kq z*bA#n&ivv;Gr@3x10u?S7D%E-oSk|%H2D+!1c1LM%~Fwe5$&~#Xw>=3z#=wY|97GF zy3=f?zJhn3kZ42z3_@B}?$|&sWEN2r`UBWh`m?J$P8YBJlO9mefJ|uj z_M*ROrzPR8Ntu%774tI(6woJls(YC32p>Y+hw+I_*r67HE~tt5IKHIfRJkxaUD&g? z5V`LZ0BkTgcGmP2Ni@nUeX&J-OPEFIHmTO(erkv3Tl|BmTk{}^F@k*OPdwWRy|Kf{ zpBDDy?EO{Hz6YALS;nMj^bCgIvu!HVs;e^-MPA%UNEbI0Xaunc-#5Ze8PH8ETAu5V zo&wrT-OXyRWRU~nNzy5nnyY@1XFnvvWqqDR2d@-#nwa<{ZD-I~8V+PoAI z99xfq14)mIPt{!8W6JRrK-C3`h-P*`cT*sOztRZ`6lOm%zv&KMNg?X8R`9wTv@~y% zzZ9qAA}d!8##CkBm^m~M&VDg`&h-$gk1TatC4CTW0t}r}3X>}IU2y=~F|+|xT8a78 z5=i*Tql)!KjLSeFg@ZSGet*QaU4hh9RH|OPunW1?R@`s3&@DAu%eQb6Z~{r`Xo4rz z(o4CN>c40nJ^*fDKS%*z>a!z7B~W63E9AY5#o)4{`<^3ROW506N*v&<9?WqeWe@Wk zOSPxN`j&QI5uGs$AXB25Bp-*y#YH)#wz^FGvxIb;KbM2MOF_wvDGW;Zv z&*iuaG|e%&aQ*r=5r3}GbR|DkQ#E#D@2q39J8-4UHKV=+mE;(IGPr;dDtqNodg)Ac zy5MP#)VAC)m->o^_w*+GRjx|kGE$-5-BByU`DDGw2c#954p%!nJG@Jlfke{Y5{IOe zWCHEzI)G*XwO!M!YvP_IhoWw2HQ5?uj;fMtwXX{9)@!P|8lEAT0|z0hhcBPg#nYUQ z9LprsBNM~V`cx4L+&}1){L1R024W(u`B^BVKs>73fm)ZD~+14QG&a2Mgn4Z%t3KGt}mfkx%hU9*Pw{X~s!4V?J{RY%A12CTzmxC4v z_=Qbgd>EH!SG<6q-yZP=S1lA;_58yFX~kFo$Js{0anzWL7mmadwVN_~b9r2bs_#`E zcNCpEo=zy($g-Ib10mvAfhdCkA2at;?Mk32%wXdz(?MAG$!yq^O6 zk)fA^{i4dTGK}@FTqfx8;WJfVu|PYk40{5ySrZ2(&GozGHDiLsgdo|>wbA|tJ88il zAka&dk?OYgNVtrmtQZwqjBnwbe9%V5|o?uum_dqAO|9N z-=)(k%@q{J8GsNY*v5K#yekW1BGPkdz{ zSg!A=DZ`@J%&*V1r{h#~d2O(A;_z|^Z;%AllC(lt^7$#{byU>E(sr}K4rcg!ocH1Y zweLVf`m-w~>87Dh5H`5^{r+v=a?Xm^I`PZcSYx8&V!9R4ANn0tX@nU0Z6=0zvgHQL zWV&RS*aIEW?k4rX0_YX3u>wS;Riz*Cy7SG%G*%Fi>s7GnT-1kWCJuy~&SuUC3YSQg zn%=~p?p)M4IX^q}+4b7J_Pg0kJiDGXuwFNuKQhCuX;LDx$alr*T67Lf^9V$h&6~z- zixRdOvb0Fs_j#sf0Q&E2tX%8IqzU*4=wIa66!g7WEZIOKIaW*6SK=FF)ndc42rYXj zs)KqoQ);SMYlP!IYZV`esyX4;*i6icu@y4;W2WSIDeOKX^cIX5I`ew$9FwRXZN3-r z!%-@kz2G>|5)+KJeW^rrWu>J!a-Z}(O+fR!!IA6o%vx9xqLGi{U%-!@BxXFi;;i#YshWE3ik z?2z}}*s$^}hp`&3dGL2`s`Be9kQ1{E?B)VDzuxZFA}(HY8wO5eN_5X030a7b>$sCy#qRT*=6ytWW#C zkqBpTUAf3qEhuaYf=3 ze28GIm#<^mHty`N$R()rxyMAY+FzLn*HIC-<3qptix|4Rd)PlNf>(e2f*cD1?WHvy z?&AlYyZR;MdLHMv6EnFWFhss7rnNQr9=}3^_Tr2==8HTl&bt7sD((+Ygb=f*`mNlq z&&!05QrTe|+|FhptxZRlX8_~87W?hxyYFq6nwN0w{b0oFu;b100|1;TR4QOiIPSPz ze_gT|o3?}nAtc_*ebXu^iglxXYL8fH!>0iq#F72g(d}6f!yiy*1hlxe#{=?DO=CMZ zP=T{?>?o|$lB|2)lq4h+o4Vowi6hpwXi-|efG3Pr23^0Do71YNyY)$V?o34_OI-22 z`WME8Zlw!!L9~ycv3+)m$Iu4TUMOJ&-3V?kK~?XBu-RKgB%B`_-#YLCbMc(rs>b9!4!yI2mf3`~~Ur><4GX-+X$x_OW66(C~9or7X{=sFcUDrJg!{ zjIRRIyd8C_r8$i1(;XjSx_iSm^S8N$BhAplaX(oV_mM4g*%__D;TJbR~(=-@8m4%yg$SNC!nHk0^s*a}S@*|^z} zS%hxRin{#PH7QL!Ef*?`r*|>PW#l{&qA`-qbon}UX6v0AqC>d#>`s;Uz)$aY_~T{J zt)(aI9E@7}#=R8czTYQMd8yVh@9W)a_`Xhj%7&VM%q<7UGMwRl-J*2IBHtPf1G(eQ zZi|R;C~wdd+xjWFUn*0hfxcW!>08_qIao@yFKw)XRM102O`!S5LnNyYX1{rXx&1mD zs0cy*>GXN59mtVA?dv>8?NwY%a!uuX9MSBW%?7QxD_Qzp9A^lBZ1@$*<$4HzGF%Qk z{!lfRMaXe&Nja{G(8?4XyhQpk*?BgDoC{6n+11eFm^vZa?UiNl{x%?~`}lH#vGUWb z3ET~3pnR_H$Q-$XuDzM}T;-H6&ym0I$Q1 zcOiS6)c$51bf@cRQHfNx(>cY}Ik{Y4yV7%hYp!(QVZ5^V~X% zn4+p3QM;JrY#S&8p)w-z<1cgxLi1fp4$GmkyhwD9!@DuZ1#~*bUo3w}r49vdQsszJXqC>`x5xzCdj1h!|~{+)->t zgt+j8FPcqI%|jS)m@47DymY>usf`_a4Xi7O&)bq0@*K-p5N_fsR$r1g8(ZC1od=zb zRX^zQek9Lb zLruX&!EGf*Y#tB)j6afN9`BQ}n@r?<{VrHnY)WPbtZ@X;XN=QrE}Iin9HWn#;G~u14(&~EV~A+1qs~rGk@8ZN_;_g? zu=lp1Y!E`V(Q4cCBS#o+ml-P6L?EIkoVgRSL)JU-7_c`w=(K7~W<>WcFV*)=biP4j6`gp_r`C#Q3@ zm%CXCS!_!wCM8d_=6KUy^YWoQsrO3yV2{X*DSl$=!VgiylitMfweXqz*z2Oo53Mh) zFQ>r%Eelb@0h<)^K`oSZuZ30vv0!TE%1%zfPbutjgFA`a7deoJF3(h4WWw}peRVBB z@{Q*~k?)jY)`}%7Yzw**Ub`+dxgg#_ftT!hzuqfZBGu{FHs+9DGcl{;46I3gq)7OD#=VFZx|QUz1yU<$f*w&?wA;HeNxyjV*P0D zTyFIA-YnNdc0IgM(Kr-{!VU_4{z0=xP>ta+Q6JeDEosM(w)7D6UR2{{CF>H}R|un{ zhN)$zr7b*J@-Y{dA(Z)oI(RZWJGrGG`z4k~W^wRi0QFXd^65kQ&kX%su)g+bX|F5V zA+BBB*|i@c&xi(-6z)Ozku5$y_Jp>52&zcliBmdgDBpkKUeoFv=>3@G!(%k>tNjWt zg3;fOAJyilJt4OA_%qLIkY3gZ|$Wm=rr?_bL;Ul)c98=bN#e9i-Qw0MMnT27*%^Yxy?v`KsOz|K5gxwaa* zyn*Pv*@k8>TQNNA=`?GB!yHo)Tp?MYiSEL)liWWZpQ#!S>-}Os${VH=&m6&Lw}I71 zd0JZ}&p$L`iN0k{j^;<2Jtz>$Sh=U$>1Os1y^nUpiJUoX zL(B5-fQ5V_>KyV4Ewg}|{XjolUp|}godfxL)RfOtTSlAEQRk@?xpXQByTkj7gi#%) zDyoX|`W5O{7bbyoDN7%nQ99A|Hi(5&QB=!UFC0 z_%U!hamI+lFh(!amyymo7L2E^OZQ9buRzUVq?=z~UpQk7)2nc9lNeXuBJUPnoV>DE zazjjJcV~ZolunG7^G=i^fcUzdY`+88u9s=R(VKHy-1#;cskfUw?je0DGQfu??7jLW zI%|O+O&yq}Yz&=3?eJNTEV{DeiO`Z?79y0DG}rEI=Vk3iHK6BtDDz%ol|t2%*P(IA zdc1A{f&@#UaRddlT-dPss?YOwv%cnKYg`ct{N-(s=4{2_8`3c0;M$)?Igbju2(`iZ z@Q#79M0Bhc6_K=y`I5xCu{^Q|dlA%?!BRHUGdNa!fJX^a|8%veed$0TPE$~OboYLT z)T7cbgiuos`?aNh^8s2yYKk6Qgfj(y^}Mm2lkH9+Gzr**p@;631x0Y1zEmuWD-x-F z=*ZU2J&WgNz{Io?ON+=+k58&y2AR$@U03Z52JZ^IK5gzkWc#^+mie{$$%v|zY*WM4 zQOMrS!q%<3u`-yV-TFVP) zBSamBjpjPU>*oDhTznQnaqdCl2$T`IuP5^GkKA}&0*>#>tC_pDT`a_-H@HkT+Vc?k zfxrH!G`F*DFrS4u93IeyA#Ifv^)ZU1$?xe1Q4`d`34>E>t*L@+&k33B+xxry*Hu*& z??nI3xAm&yLQjn4*;62M!m=}W+|c0n)LPi|VsPa(+yH%8lE#LvKEoxQm)I#z2DPFk zEs;7*M353pnLSz-m%8qvctI(3@Mo2wPcNUmA_P`VDKxQovL|8|S*Q zHH=a4_aW#w?;>Z05OGLKeFqviL+`aUz0kWnIug=KP17<)eVz>;`5^Ib6AGk^lG?ObVNSFKn$TzJwHV}y&GcWzT}P=k(|UC zeGt6vHrP<>;5RnCxOy-y^pmd5u%;U>RzdLO~> z6is^Y{aD`U z*;d+ArYJ_NInSpkrHer<$PdbF7bN6XgZT^>R=37;{VKk3UV5f5TsVg2JI;#Rwp z15c(C1k!-aE_7sP7n|{}WK&k)iz?Qbz!CX^6MUL5ER2eHINWr3|0niDLCr_|Qyw=K3xkUPZ;UBBG%WtA||P|^22 z#zC`~@W}AX;VY?y940lq-im!86bLuIH0sH$@9OtG>g)Nym(S0|c^6HZ8)x*2g%C^!rBZp>%iHYlPdb1+8ObBUEAU(L!ttU6fg<;Kot+)x*ly?`( z3j5k|h2sf-vHw`GCc~T{TQ?Jel`eC@4L6C^q+9lybE<9#T3b z;?3es>#LjUstBS&QDX*3?k8UbimSsPMfq(4mh1-N^HJ2oL_6hsSMoJsqaYn52jeR= zr0_jvCk{%BqT?Wrx5gM7S-hGr@|&XCnetR^G>gB2ccF)a=$kZe-U_mc8 zZ&7k*qO3NU7(}clu(0@E%!Y#%!g-?x3yCUg9S64}%5ZVdooV04m(xR(L=8m_)b{)k zKhE^M!&AI#Ib#T{+5yqWSvUXo>A9JPwvVrapMxS!Q`zT<0Rf!R1Aej*%C*#y%@le6 zTd)r3AV;VL&$8XmtFOfm3vYjHencWA>C0E>;|jci{t>|s;?l2ZL}|ETl}05GFPku-*b5zvevb?4kYC*g9 zn5s@ZyD4IS^$pp4tgo*C8dwZx*x7#jwn&Y)MT)1_%v!^!!OyCf3bvb+_pDA&1KLlH zgMv?;!BrS|5Hw$_vJCtemRZzW-7jdi5yyHC7u|jr_^hScH;e^R`yji}tl%)3$TKza z9IS&RdcO8EzCsCHOi>4cra-wBzKx0>HGPe}jRR>3xoeAR8#*eUk5C{AMACjk*Gu2R znw9ix9{%LNJ}u89Y9HACaijALM@q~#jP6$@#rFN=sN-NW-Sg%XL><`AD`s$ zja&7gP%UOA|FY{TwzY=-&oXN%&%=X+sLEiZTZ(8bN1kv6d~KX;|GLwS!)DI*v|N$n zgN5y!Pgza{A!*9lHsjP0X_|syF{1O}AD%W{brNDOHlV)zJ$Uv~g6&?x`=RE+op6hm z{uF*Eu?Z->!ste*RkBN~g|5MEx(qt%`>yu_kNRwbs!#4Y#AyIOBR_FyM+`mF=vqTx2V(8P6oB(!_LYn%Gxt$VMy%|;{Cpr#O0KAJ+8)t@i^b5kQ z$dLD3#@RD=AUuOpv&FBhQgDiRG!4k(D$?q@= zvx&2A|E;|YjOeZ6P_?aZ1^4wPnpACpgbE@u69z>K$Iz#cQmYp%l-RFGV8^y_7-Z$p z*5*t6^7Qv{Vtjq^yz%bZ*9;}f9qy}g+X)V^lTjd0G#**-g;WN2)MQ{{ouMX&Ec75qgFAqg$zj5Eca4(>T8Caem2w+(@={ z=(m1|#aA)U9|vkBXdkb?84y^OxX+BGPcGV`3r^l?irrV+P~Y<4sE8sLQkK zepz;|-6#E&KwNTOq=?@ANTLH|9h4GUG0B2$^Sl6{~pQzJ|q8Hl7H>? z-!t;>b@T7#{(t*ZDF6B-e~-rBW%<_w{Ohm&$36ddll;3${sw~oMmK*A#oy@c-&pcL mhT`8%0@4-#AAfuA2IbkU7S~TVG_tqAKN$(d7lmTRZ~qU)h7JY* literal 0 HcmV?d00001 diff --git a/server/jupyter_server/static/style/index.css b/server/jupyter_server/static/style/index.css new file mode 100644 index 0000000..3e50941 --- /dev/null +++ b/server/jupyter_server/static/style/index.css @@ -0,0 +1,91 @@ +#jupyter_server { + padding-left: 0px; + padding-top: 1px; + padding-bottom: 1px; +} + +#jupyter_server img { + height: 28px; +} + +#jupyter-main-app { + padding-top: 50px; + text-align: center; +} + +body { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 13px; + line-height: 1.42857143; + color: #000; +} + +body > #header { + display: block; + background-color: #fff; + position: relative; + z-index: 100; +} + +body > #header #header-container { + display: flex; + flex-direction: row; + justify-content: space-between; + padding: 5px; + padding-top: 5px; + padding-bottom: 5px; + padding-bottom: 5px; + padding-top: 5px; + box-sizing: border-box; + -moz-box-sizing: border-box; + -webkit-box-sizing: border-box; +} + +body > #header .header-bar { + width: 100%; + height: 1px; + background: #e7e7e7; + margin-bottom: -1px; +} + +.navbar-brand { + float: left; + height: 30px; + padding: 6px 0px; + padding-top: 6px; + padding-bottom: 6px; + padding-left: 0px; + font-size: 17px; + line-height: 18px; +} + +.navbar-brand, +.navbar-nav > li > a { + text-shadow: 0 1px 0 rgba(255, 255, 255, 0.25); +} + +.nav { + padding-left: 0; + margin-bottom: 0; + list-style: none; +} + +.center-nav { + display: inline-block; + margin-bottom: -4px; +} + +div.error { + margin: 2em; + text-align: center; +} + +div.error > h1 { + font-size: 500%; + line-height: normal; +} + +div.error > p { + font-size: 200%; + line-height: normal; +} diff --git a/server/jupyter_server/templates/404.html b/server/jupyter_server/templates/404.html new file mode 100644 index 0000000..e403081 --- /dev/null +++ b/server/jupyter_server/templates/404.html @@ -0,0 +1,4 @@ +{% extends "error.html" %} +{% block error_detail %} +

{% trans %}You are requesting a page that does not exist!{% endtrans %}

+{% endblock %} diff --git a/server/jupyter_server/templates/browser-open.html b/server/jupyter_server/templates/browser-open.html new file mode 100644 index 0000000..4217b0f --- /dev/null +++ b/server/jupyter_server/templates/browser-open.html @@ -0,0 +1,18 @@ +{# This template is not served, but written as a file to open in the browser, + passing the token without putting it in a command-line argument. #} + + + + + + Opening ElixirNote Application + + + +

+ This page should redirect you to a ElixirNote application. If it doesn't, + click here to go to ElixirNote. +

+ + + diff --git a/server/jupyter_server/templates/error.html b/server/jupyter_server/templates/error.html new file mode 100644 index 0000000..17adc7d --- /dev/null +++ b/server/jupyter_server/templates/error.html @@ -0,0 +1,32 @@ +{% extends "page.html" %} + +{% block stylesheet %} +{{super()}} + +{% endblock %} +{% block site %} + +
+ {% block h1_error %} +

{{status_code}} : {{status_message}}

+ {% endblock h1_error %} + {% block error_detail %} + {% if message %} +

{% trans %}The error was:{% endtrans %}

+
+
{{message}}
+
+ {% endif %} + {% endblock error_detail %} +
+ +{% endblock %} + +{% block script %} +{% endblock script %} diff --git a/server/jupyter_server/templates/login.html b/server/jupyter_server/templates/login.html new file mode 100644 index 0000000..d375132 --- /dev/null +++ b/server/jupyter_server/templates/login.html @@ -0,0 +1,117 @@ +{% extends "page.html" %} + + +{% block stylesheet %} +{% endblock %} + +{% block site %} + +
+ {% if login_available %} + {# login_available means password-login is allowed. Show the form. #} +
+ +
+ {% else %} +

{% trans %}No login available, you shouldn't be seeing this page.{% endtrans %}

+ {% endif %} + {% if message %} +
+ {% for key in message %} +
+ {{message[key]}} +
+ {% endfor %} +
+ {% endif %} + {% if token_available %} + {% block token_message %} +
+

+ Token authentication is enabled +

+

+ If no password has been configured, you need to open the + server with its login token in the URL, or paste it above. + This requirement will be lifted if you + + enable a password. +

+

+ The command: +

elixirnote server list
+ will show you the URLs of running servers with their tokens, + which you can copy and paste into your browser. For example: +

+
Currently running servers:
+http://localhost:8888/?token=c8de56fa... :: /Users/you/notebooks
+
+

+ or you can paste just the token value into the password field on this + page. +

+

+ See + + the documentation on how to enable a password + + in place of token authentication, + if you would like to avoid dealing with random tokens. +

+

+ Cookies are required for authenticated access to the Kennen server. +

+ {% if allow_password_change %} +

{% trans %}Setup a Password{% endtrans %}

+

You can also setup a password by entering your token and a new password + on the fields below:

+
+ {{ xsrf_form_html() | safe }} +
+ + +
+
+ + +
+
+ +
+
+ {% endif %} + +
+ {% endblock token_message %} + {% endif %} +
+ +{% endblock %} + + +{% block script %} +{% endblock %} diff --git a/server/jupyter_server/templates/logout.html b/server/jupyter_server/templates/logout.html new file mode 100644 index 0000000..81121e1 --- /dev/null +++ b/server/jupyter_server/templates/logout.html @@ -0,0 +1,34 @@ +{% extends "page.html" %} + +{# This template is rendered in response to an authenticated request, so the +user is technically logged in. But when the user sees it, the cookie is +cleared by the Javascript, so we should render this as if the user was logged +out, without e.g. authentication tokens. +#} +{% set logged_in = False %} + +{% block stylesheet %} +{% endblock %} + +{% block site %} + +
+ + {% if message %} + {% for key in message %} +
+ {{message[key]}} +
+ {% endfor %} + {% endif %} + + {% if not login_available %} + {% trans %}Proceed to the dashboard{% endtrans %}. + {% else %} + {% trans %}Proceed to the login page{% endtrans %}. + {% endif %} + + +
+ + {% endblock %} diff --git a/server/jupyter_server/templates/main.html b/server/jupyter_server/templates/main.html new file mode 100644 index 0000000..0aafe76 --- /dev/null +++ b/server/jupyter_server/templates/main.html @@ -0,0 +1,7 @@ +{% extends "page.html" %} + +{% block site %} +
+

A ElixirNote Server is running.

+
+{% endblock site %} diff --git a/server/jupyter_server/templates/page.html b/server/jupyter_server/templates/page.html new file mode 100644 index 0000000..6c9fcda --- /dev/null +++ b/server/jupyter_server/templates/page.html @@ -0,0 +1,93 @@ + + + + + + + + {% block title %}ElixirNote Server{% endblock %} + {% block favicon %} + {% endblock %} + + + + + + + {% block stylesheet %} + {% endblock stylesheet %} + + {% block meta %} + {% endblock meta %} + + + + + + + + + +
+ {% block site %} + {% endblock site %} +
+ + {% block after_site %} + {% endblock after_site %} + + {% block script %} + {% endblock script %} + + + + + diff --git a/server/jupyter_server/templates/view.html b/server/jupyter_server/templates/view.html new file mode 100644 index 0000000..553712a --- /dev/null +++ b/server/jupyter_server/templates/view.html @@ -0,0 +1,35 @@ + + + + + + ElixirNote + + + + + +
+ +
+ + + diff --git a/server/jupyter_server/terminal/__init__.py b/server/jupyter_server/terminal/__init__.py new file mode 100644 index 0000000..8bac278 --- /dev/null +++ b/server/jupyter_server/terminal/__init__.py @@ -0,0 +1,51 @@ +import os +import sys + +import terminado + +from ..utils import check_version + +if not check_version(terminado.__version__, "0.8.3"): + raise ImportError("terminado >= 0.8.3 required, found %s" % terminado.__version__) + +from ipython_genutils.py3compat import which +from jupyter_server.utils import url_path_join as ujoin +from . import api_handlers +from .handlers import TermSocket +from .terminalmanager import TerminalManager + + +def initialize(webapp, root_dir, connection_url, settings): + if os.name == "nt": + default_shell = "powershell.exe" + else: + default_shell = which("sh") + shell_override = settings.get("shell_command") + shell = [os.environ.get("SHELL") or default_shell] if shell_override is None else shell_override + # When the notebook server is not running in a terminal (e.g. when + # it's launched by a JupyterHub spawner), it's likely that the user + # environment hasn't been fully set up. In that case, run a login + # shell to automatically source /etc/profile and the like, unless + # the user has specifically set a preferred shell command. + if os.name != "nt" and shell_override is None and not sys.stdout.isatty(): + shell.append("-l") + terminal_manager = webapp.settings["terminal_manager"] = TerminalManager( + shell_command=shell, + extra_env={ + "JUPYTER_SERVER_ROOT": root_dir, + "JUPYTER_SERVER_URL": connection_url, + }, + parent=webapp.settings["serverapp"], + ) + terminal_manager.log = webapp.settings["serverapp"].log + base_url = webapp.settings["base_url"] + handlers = [ + ( + ujoin(base_url, r"/terminals/websocket/(\w+)"), + TermSocket, + {"term_manager": terminal_manager}, + ), + (ujoin(base_url, r"/api/terminals"), api_handlers.TerminalRootHandler), + (ujoin(base_url, r"/api/terminals/(\w+)"), api_handlers.TerminalHandler), + ] + webapp.add_handlers(".*$", handlers) diff --git a/server/jupyter_server/terminal/api_handlers.py b/server/jupyter_server/terminal/api_handlers.py new file mode 100644 index 0000000..99f7e91 --- /dev/null +++ b/server/jupyter_server/terminal/api_handlers.py @@ -0,0 +1,47 @@ +import json + +from tornado import web + +from ..base.handlers import APIHandler +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "terminals" + + +class TerminalAPIHandler(APIHandler): + auth_resource = AUTH_RESOURCE + + +class TerminalRootHandler(TerminalAPIHandler): + @web.authenticated + @authorized + def get(self): + models = self.terminal_manager.list() + self.finish(json.dumps(models)) + + @web.authenticated + @authorized + def post(self): + """POST /terminals creates a new terminal and redirects to it""" + data = self.get_json_body() or {} + + model = self.terminal_manager.create(**data) + self.finish(json.dumps(model)) + + +class TerminalHandler(TerminalAPIHandler): + SUPPORTED_METHODS = ("GET", "DELETE") + + @web.authenticated + @authorized + def get(self, name): + model = self.terminal_manager.get(name) + self.finish(json.dumps(model)) + + @web.authenticated + @authorized + async def delete(self, name): + await self.terminal_manager.terminate(name, force=True) + self.set_status(204) + self.finish() diff --git a/server/jupyter_server/terminal/handlers.py b/server/jupyter_server/terminal/handlers.py new file mode 100644 index 0000000..7c11170 --- /dev/null +++ b/server/jupyter_server/terminal/handlers.py @@ -0,0 +1,50 @@ +# encoding: utf-8 +"""Tornado handlers for the terminal emulator.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import terminado +from tornado import web + +from ..base.handlers import JupyterHandler +from ..base.zmqhandlers import WebSocketMixin +from jupyter_server._tz import utcnow + +AUTH_RESOURCE = "terminals" + + +class TermSocket(WebSocketMixin, JupyterHandler, terminado.TermSocket): + + auth_resource = AUTH_RESOURCE + + def origin_check(self): + """Terminado adds redundant origin_check + Tornado already calls check_origin, so don't do anything here. + """ + return True + + def get(self, *args, **kwargs): + user = self.current_user + + if not user: + raise web.HTTPError(403) + + if not self.authorizer.is_authorized(self, user, "execute", self.auth_resource): + raise web.HTTPError(403) + + if not args[0] in self.term_manager.terminals: + raise web.HTTPError(404) + return super(TermSocket, self).get(*args, **kwargs) + + def on_message(self, message): + super(TermSocket, self).on_message(message) + self._update_activity() + + def write_message(self, message, binary=False): + super(TermSocket, self).write_message(message, binary=binary) + self._update_activity() + + def _update_activity(self): + self.application.settings["terminal_last_activity"] = utcnow() + # terminal may not be around on deletion/cull + if self.term_name in self.terminal_manager.terminals: + self.terminal_manager.terminals[self.term_name].last_activity = utcnow() diff --git a/server/jupyter_server/terminal/terminalmanager.py b/server/jupyter_server/terminal/terminalmanager.py new file mode 100644 index 0000000..1db545e --- /dev/null +++ b/server/jupyter_server/terminal/terminalmanager.py @@ -0,0 +1,169 @@ +"""A MultiTerminalManager for use in the notebook webserver +- raises HTTPErrors +- creates REST API models +""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from datetime import timedelta + +import terminado +from tornado import web +from tornado.ioloop import IOLoop +from tornado.ioloop import PeriodicCallback +from traitlets import Integer +from traitlets.config import LoggingConfigurable + +from ..prometheus.metrics import TERMINAL_CURRENTLY_RUNNING_TOTAL +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcnow + + +class TerminalManager(LoggingConfigurable, terminado.NamedTermManager): + """ """ + + _culler_callback = None + + _initialized_culler = False + + cull_inactive_timeout = Integer( + 0, + config=True, + help="""Timeout (in seconds) in which a terminal has been inactive and ready to be culled. + Values of 0 or lower disable culling.""", + ) + + cull_interval_default = 300 # 5 minutes + cull_interval = Integer( + cull_interval_default, + config=True, + help="""The interval (in seconds) on which to check for terminals exceeding the inactive timeout value.""", + ) + + # ------------------------------------------------------------------------- + # Methods for managing terminals + # ------------------------------------------------------------------------- + def __init__(self, *args, **kwargs): + super(TerminalManager, self).__init__(*args, **kwargs) + + def create(self, **kwargs): + """Create a new terminal.""" + name, term = self.new_named_terminal(**kwargs) + # Monkey-patch last-activity, similar to kernels. Should we need + # more functionality per terminal, we can look into possible sub- + # classing or containment then. + term.last_activity = utcnow() + model = self.get_terminal_model(name) + # Increase the metric by one because a new terminal was created + TERMINAL_CURRENTLY_RUNNING_TOTAL.inc() + # Ensure culler is initialized + self._initialize_culler() + return model + + def get(self, name): + """Get terminal 'name'.""" + model = self.get_terminal_model(name) + return model + + def list(self): + """Get a list of all running terminals.""" + models = [self.get_terminal_model(name) for name in self.terminals] + + # Update the metric below to the length of the list 'terms' + TERMINAL_CURRENTLY_RUNNING_TOTAL.set(len(models)) + return models + + async def terminate(self, name, force=False): + """Terminate terminal 'name'.""" + self._check_terminal(name) + await super(TerminalManager, self).terminate(name, force=force) + + # Decrease the metric below by one + # because a terminal has been shutdown + TERMINAL_CURRENTLY_RUNNING_TOTAL.dec() + + async def terminate_all(self): + """Terminate all terminals.""" + terms = [name for name in self.terminals] + for term in terms: + await self.terminate(term, force=True) + + def get_terminal_model(self, name): + """Return a JSON-safe dict representing a terminal. + For use in representing terminals in the JSON APIs. + """ + self._check_terminal(name) + term = self.terminals[name] + model = { + "name": name, + "last_activity": isoformat(term.last_activity), + } + return model + + def _check_terminal(self, name): + """Check a that terminal 'name' exists and raise 404 if not.""" + if name not in self.terminals: + raise web.HTTPError(404, "Terminal not found: %s" % name) + + def _initialize_culler(self): + """Start culler if 'cull_inactive_timeout' is greater than zero. + Regardless of that value, set flag that we've been here. + """ + if not self._initialized_culler and self.cull_inactive_timeout > 0: + if self._culler_callback is None: + loop = IOLoop.current() + if self.cull_interval <= 0: # handle case where user set invalid value + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", + self.cull_interval, + self.cull_interval_default, + ) + self.cull_interval = self.cull_interval_default + self._culler_callback = PeriodicCallback( + self._cull_terminals, 1000 * self.cull_interval + ) + self.log.info( + "Culling terminals with inactivity > %s seconds at %s second intervals ...", + self.cull_inactive_timeout, + self.cull_interval, + ) + self._culler_callback.start() + + self._initialized_culler = True + + async def _cull_terminals(self): + self.log.debug( + "Polling every %s seconds for terminals inactive for > %s seconds...", + self.cull_interval, + self.cull_inactive_timeout, + ) + # Create a separate list of terminals to avoid conflicting updates while iterating + for name in list(self.terminals): + try: + await self._cull_inactive_terminal(name) + except Exception as e: + self.log.exception( + "The following exception was encountered while checking the " + "activity of terminal {}: {}".format(name, e) + ) + + async def _cull_inactive_terminal(self, name): + try: + term = self.terminals[name] + except KeyError: + return # KeyErrors are somewhat expected since the terminal can be terminated as the culling check is made. + + self.log.debug("name=%s, last_activity=%s", name, term.last_activity) + if hasattr(term, "last_activity"): + dt_now = utcnow() + dt_inactive = dt_now - term.last_activity + # Compute idle properties + is_time = dt_inactive > timedelta(seconds=self.cull_inactive_timeout) + # Cull the kernel if all three criteria are met + if is_time: + inactivity = int(dt_inactive.total_seconds()) + self.log.warning( + "Culling terminal '%s' due to %s seconds of inactivity.", + name, + inactivity, + ) + await self.terminate(name, force=True) diff --git a/server/jupyter_server/tests/__init__.py b/server/jupyter_server/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/auth/__init__.py b/server/jupyter_server/tests/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/auth/test_authorizer.py b/server/jupyter_server/tests/auth/test_authorizer.py new file mode 100644 index 0000000..a045320 --- /dev/null +++ b/server/jupyter_server/tests/auth/test_authorizer.py @@ -0,0 +1,277 @@ +"""Tests for authorization""" +import json + +import pytest +from jupyter_client.kernelspec import NATIVE_KERNEL_NAME +from nbformat import writes +from nbformat.v4 import new_notebook +from tornado.httpclient import HTTPClientError +from tornado.websocket import WebSocketHandler + +from jupyter_server.auth.authorizer import Authorizer +from jupyter_server.auth.utils import HTTP_METHOD_TO_AUTH_ACTION +from jupyter_server.auth.utils import match_url_to_resource +from jupyter_server.services.security import csp_report_uri + + +class AuthorizerforTesting(Authorizer): + + # Set these class attributes from within a test + # to verify that they match the arguments passed + # by the REST API. + permissions = {} + + def normalize_url(self, path): + """Drop the base URL and make sure path leads with a /""" + base_url = self.parent.base_url + # Remove base_url + if path.startswith(base_url): + path = path[len(base_url) :] + # Make sure path starts with / + if not path.startswith("/"): + path = "/" + path + return path + + def is_authorized(self, handler, user, action, resource): + # Parse Request + if isinstance(handler, WebSocketHandler): + method = "WEBSOCKET" + else: + method = handler.request.method + url = self.normalize_url(handler.request.path) + + # Map request parts to expected action and resource. + expected_action = HTTP_METHOD_TO_AUTH_ACTION[method] + expected_resource = match_url_to_resource(url) + + # Assert that authorization layer returns the + # correct action + resource. + assert action == expected_action + assert resource == expected_resource + + # Now, actually apply the authorization layer. + return all( + [ + action in self.permissions.get("actions", []), + resource in self.permissions.get("resources", []), + ] + ) + + +@pytest.fixture +def jp_server_config(): + return {"ServerApp": {"authorizer_class": AuthorizerforTesting}} + + +@pytest.fixture +def send_request(jp_fetch, jp_ws_fetch): + """Send to Jupyter Server and return response code.""" + + async def _(url, **fetch_kwargs): + if url.endswith("channels") or "/websocket/" in url: + fetch = jp_ws_fetch + else: + fetch = jp_fetch + + try: + r = await fetch(url, **fetch_kwargs, allow_nonstandard_methods=True) + code = r.code + except HTTPClientError as err: + code = err.code + else: + if fetch is jp_ws_fetch: + r.close() + + print(code, url, fetch_kwargs) + return code + + return _ + + +HTTP_REQUESTS = [ + { + "method": "GET", + "url": "/view/{nbpath}", + }, + { + "method": "GET", + "url": "/api/contents", + }, + { + "method": "POST", + "url": "/api/contents", + "body": json.dumps({"type": "directory"}), + }, + { + "method": "PUT", + "url": "/api/contents/foo", + "body": json.dumps({"type": "directory"}), + }, + { + "method": "PATCH", + "url": "/api/contents/{nbpath}", + "body": json.dumps({"path": "/newpath"}), + }, + { + "method": "DELETE", + "url": "/api/contents/{nbpath}", + }, + { + "method": "GET", + "url": "/api/kernels", + }, + { + "method": "GET", + "url": "/api/kernels/{kernel_id}", + }, + { + "method": "GET", + "url": "/api/kernels/{kernel_id}/channels", + }, + { + "method": "POST", + "url": "/api/kernels/{kernel_id}/interrupt", + }, + { + "method": "POST", + "url": "/api/kernels/{kernel_id}/restart", + }, + { + "method": "DELETE", + "url": "/api/kernels/{kernel_id}", + }, + { + "method": "POST", + "url": "/api/kernels", + }, + {"method": "GET", "url": "/api/kernelspecs"}, + {"method": "GET", "url": "/api/kernelspecs/{kernelspec}"}, + {"method": "GET", "url": "/api/nbconvert"}, + {"method": "GET", "url": "/api/spec.yaml"}, + {"method": "GET", "url": "/api/status"}, + {"method": "GET", "url": "/api/config/foo"}, + {"method": "PUT", "url": "/api/config/foo", "body": "{}"}, + {"method": "PATCH", "url": "/api/config/foo", "body": "{}"}, + { + "method": "POST", + "url": "/".join(tuple(csp_report_uri.split("/")[1:])), + }, + { + "method": "GET", + "url": "/api/sessions", + }, + { + "method": "GET", + "url": "/api/sessions/{session_id}", + }, + {"method": "PATCH", "url": "/api/sessions/{session_id}", "body": "{}"}, + { + "method": "DELETE", + "url": "/api/sessions/{session_id}", + }, + { + "method": "POST", + "url": "/api/sessions", + "body": json.dumps({"path": "foo", "type": "bar"}), + }, + { + "method": "POST", + "url": "/api/terminals", + "body": "", + }, + { + "method": "GET", + "url": "/api/terminals", + }, + { + "method": "GET", + "url": "/terminals/websocket/{term_name}", + }, + { + "method": "DELETE", + "url": "/api/terminals/{term_name}", + }, +] + +HTTP_REQUESTS_PARAMETRIZED = [(req["method"], req["url"], req.get("body")) for req in HTTP_REQUESTS] + +# -------- Test scenarios ----------- + + +@pytest.mark.parametrize("method, url, body", HTTP_REQUESTS_PARAMETRIZED) +@pytest.mark.parametrize("allowed", (True, False)) +async def test_authorized_requests( + request, + io_loop, + send_request, + tmp_path, + jp_serverapp, + method, + url, + body, + allowed, +): + ### Setup stuff for the Contents API + # Add a notebook on disk + contents_dir = tmp_path / jp_serverapp.root_dir + p = contents_dir / "dir_for_testing" + p.mkdir(parents=True, exist_ok=True) + + # Create a notebook + nb = writes(new_notebook(), version=4) + nbname = p.joinpath("nb_for_testing.ipynb") + nbname.write_text(nb, encoding="utf-8") + + ### Setup + nbpath = "dir_for_testing/nb_for_testing.ipynb" + kernelspec = NATIVE_KERNEL_NAME + km = jp_serverapp.kernel_manager + + if "session" in url: + request.addfinalizer(lambda: io_loop.run_sync(km.shutdown_all)) + session_model = await jp_serverapp.session_manager.create_session(path="foo") + session_id = session_model["id"] + + if "kernel" in url: + request.addfinalizer(lambda: io_loop.run_sync(km.shutdown_all)) + kernel_id = await km.start_kernel() + kernel = km.get_kernel(kernel_id) + # kernels take a moment to be ready + # wait for it to respond + kc = kernel.client() + kc.start_channels() + await kc.wait_for_ready() + kc.stop_channels() + + if "terminal" in url: + term_manager = jp_serverapp.web_app.settings["terminal_manager"] + request.addfinalizer(lambda: io_loop.run_sync(term_manager.terminate_all)) + term_model = term_manager.create() + term_name = term_model["name"] + + url = url.format(**locals()) + if allowed: + # Create a server with full permissions + permissions = { + "actions": ["read", "write", "execute"], + "resources": [ + "contents", + "kernels", + "kernelspecs", + "nbconvert", + "sessions", + "api", + "config", + "csp", + "server", + "terminals", + ], + } + expected_codes = {200, 201, 204, None} # Websockets don't return a code + else: + permissions = {"actions": [], "resources": []} + expected_codes = {403} + jp_serverapp.authorizer.permissions = permissions + + code = await send_request(url, body=body, method=method) + assert code in expected_codes diff --git a/server/jupyter_server/tests/auth/test_login.py b/server/jupyter_server/tests/auth/test_login.py new file mode 100644 index 0000000..6f1c358 --- /dev/null +++ b/server/jupyter_server/tests/auth/test_login.py @@ -0,0 +1,95 @@ +"""Tests for login redirects""" +from functools import partial +from urllib.parse import urlencode + +import pytest +from tornado.httpclient import HTTPClientError +from tornado.httputil import parse_cookie +from tornado.httputil import url_concat + +from jupyter_server.utils import url_path_join + + +# override default config to ensure a non-empty base url is used +@pytest.fixture +def jp_base_url(): + return "/a%40b/" + + +@pytest.fixture +def jp_server_config(jp_base_url): + return { + "ServerApp": { + "base_url": jp_base_url, + }, + } + + +async def _login(jp_serverapp, http_server_client, jp_base_url, next): + # first: request login page with no creds + login_url = url_path_join(jp_base_url, "login") + first = await http_server_client.fetch(login_url) + cookie_header = first.headers["Set-Cookie"] + cookies = parse_cookie(cookie_header) + + # second, submit login form with credentials + try: + resp = await http_server_client.fetch( + url_concat(login_url, {"next": next}), + method="POST", + body=urlencode( + { + "password": jp_serverapp.token, + "_xsrf": cookies.get("_xsrf", ""), + } + ), + headers={"Cookie": cookie_header}, + follow_redirects=False, + ) + except HTTPClientError as e: + if e.code != 302: + raise + return e.response.headers["Location"] + else: + assert resp.code == 302, "Should have returned a redirect!" + + +@pytest.fixture +def login(jp_serverapp, http_server_client, jp_base_url): + """Fixture to return a function to login to a Jupyter server + + by submitting the login page form + """ + yield partial(_login, jp_serverapp, http_server_client, jp_base_url) + + +@pytest.mark.parametrize( + "bad_next", + ( + r"\\tree", + "//some-host", + "//host{base_url}tree", + "https://google.com", + "/absolute/not/base_url", + ), +) +async def test_next_bad(login, jp_base_url, bad_next): + bad_next = bad_next.format(base_url=jp_base_url) + url = await login(bad_next) + assert url == jp_base_url + + +@pytest.mark.parametrize( + "next_path", + ( + "tree/", + "//{base_url}tree", + "notebooks/notebook.ipynb", + "tree//something", + ), +) +async def test_next_ok(login, jp_base_url, next_path): + next_path = next_path.format(base_url=jp_base_url) + expected = jp_base_url + next_path + actual = await login(next=expected) + assert actual == expected diff --git a/server/jupyter_server/tests/auth/test_security.py b/server/jupyter_server/tests/auth/test_security.py new file mode 100644 index 0000000..1a3f172 --- /dev/null +++ b/server/jupyter_server/tests/auth/test_security.py @@ -0,0 +1,31 @@ +from jupyter_server.auth.security import passwd +from jupyter_server.auth.security import passwd_check + + +def test_passwd_structure(): + p = passwd("passphrase") + algorithm, hashed = p.split(":") + assert algorithm == "argon2", algorithm + assert hashed.startswith("$argon2id$"), hashed + + +def test_roundtrip(): + p = passwd("passphrase") + assert passwd_check(p, "passphrase") + + +def test_bad(): + p = passwd("passphrase") + assert not passwd_check(p, p) + assert not passwd_check(p, "a:b:c:d") + assert not passwd_check(p, "a:b") + + +def test_passwd_check_unicode(): + # GH issue #4524 + phash = "sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f" + assert passwd_check(phash, "łe¶ŧ←↓→") + phash = ( + "argon2:$argon2id$v=19$m=10240,t=10,p=8$" "qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg" + ) + assert passwd_check(phash, "łe¶ŧ←↓→") diff --git a/server/jupyter_server/tests/auth/test_utils.py b/server/jupyter_server/tests/auth/test_utils.py new file mode 100644 index 0000000..4927c22 --- /dev/null +++ b/server/jupyter_server/tests/auth/test_utils.py @@ -0,0 +1,37 @@ +import pytest + +from jupyter_server.auth.utils import match_url_to_resource + + +@pytest.mark.parametrize( + "url,expected_resource", + [ + ("/api/kernels", "kernels"), + ("/api/kernelspecs", "kernelspecs"), + ("/api/contents", "contents"), + ("/api/sessions", "sessions"), + ("/api/terminals", "terminals"), + ("/api/nbconvert", "nbconvert"), + ("/api/config/x", "config"), + ("/api/shutdown", "server"), + ("/nbconvert/py", "nbconvert"), + ], +) +def test_match_url_to_resource(url, expected_resource): + resource = match_url_to_resource(url) + assert resource == expected_resource + + +@pytest.mark.parametrize( + "url", + [ + "/made/up/url", + # Misspell. + "/api/kernel", + # Not a resource + "/tree", + ], +) +def test_bad_match_url_to_resource(url): + resource = match_url_to_resource(url) + assert resource is None diff --git a/server/jupyter_server/tests/extension/__init__.py b/server/jupyter_server/tests/extension/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/extension/conftest.py b/server/jupyter_server/tests/extension/conftest.py new file mode 100644 index 0000000..af7b714 --- /dev/null +++ b/server/jupyter_server/tests/extension/conftest.py @@ -0,0 +1,51 @@ +import pytest + +from .mockextensions.app import MockExtensionApp + + +mock_html = """ + + + + + {% block title %}Jupyter Server 1{% endblock %} + + + {% block meta %} + {% endblock %} + + +
+ {% block site %} + {% endblock site %} +
+ {% block after_site %} + {% endblock after_site %} + + +""" + + +@pytest.fixture +def mock_template(jp_template_dir): + index = jp_template_dir.joinpath("index.html") + index.write_text(mock_html) + + +@pytest.fixture +def extension_manager(jp_serverapp): + return jp_serverapp.extension_manager + + +@pytest.fixture +def config_file(jp_config_dir): + """""" + f = jp_config_dir.joinpath("jupyter_mockextension_config.py") + f.write_text("c.MockExtensionApp.mock_trait ='config from file'") + return f + + +@pytest.fixture(autouse=True) +def jp_mockextension_cleanup(): + yield + MockExtensionApp.clear_instance() diff --git a/server/jupyter_server/tests/extension/mockextensions/__init__.py b/server/jupyter_server/tests/extension/mockextensions/__init__.py new file mode 100644 index 0000000..7b60ae5 --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/__init__.py @@ -0,0 +1,18 @@ +"""A mock extension module with a list of extensions +to load in various tests. +""" +from .app import MockExtensionApp + + +# Function that makes these extensions discoverable +# by the test functions. +def _jupyter_server_extension_points(): + return [ + { + "module": "jupyter_server.tests.extension.mockextensions.app", + "app": MockExtensionApp, + }, + {"module": "jupyter_server.tests.extension.mockextensions.mock1"}, + {"module": "jupyter_server.tests.extension.mockextensions.mock2"}, + {"module": "jupyter_server.tests.extension.mockextensions.mock3"}, + ] diff --git a/server/jupyter_server/tests/extension/mockextensions/app.py b/server/jupyter_server/tests/extension/mockextensions/app.py new file mode 100644 index 0000000..7045417 --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/app.py @@ -0,0 +1,55 @@ +import os + +from traitlets import List +from traitlets import Unicode + +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.application import ExtensionAppJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerMixin + +STATIC_PATH = os.path.join(os.path.dirname(__file__), "static") + +# Function that makes these extensions discoverable +# by the test functions. +def _jupyter_server_extension_points(): + return [{"module": __name__, "app": MockExtensionApp}] + + +class MockExtensionHandler(ExtensionHandlerMixin, JupyterHandler): + def get(self): + self.finish(self.config.mock_trait) + + +class MockExtensionTemplateHandler( + ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler +): + def get(self): + self.write(self.render_template("index.html")) + + +class MockExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): + + name = "mockextension" + template_paths = List().tag(config=True) + static_paths = [STATIC_PATH] + mock_trait = Unicode("mock trait", config=True) + loaded = False + + serverapp_config = { + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions.mock1": True} + } + + @staticmethod + def get_extension_package(): + return "jupyter_server.tests.extension.mockextensions" + + def initialize_handlers(self): + self.handlers.append(("/mock", MockExtensionHandler)) + self.handlers.append(("/mock_template", MockExtensionTemplateHandler)) + self.loaded = True + + +if __name__ == "__main__": + MockExtensionApp.launch_instance() diff --git a/server/jupyter_server/tests/extension/mockextensions/mock1.py b/server/jupyter_server/tests/extension/mockextensions/mock1.py new file mode 100644 index 0000000..49f6d77 --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mock1.py @@ -0,0 +1,10 @@ +"""A mock extension named `mock1` for testing purposes. +""" +# by the test functions. +def _jupyter_server_extension_paths(): + return [{"module": "jupyter_server.tests.extension.mockextensions.mock1"}] + + +def _load_jupyter_server_extension(serverapp): + serverapp.mockI = True + serverapp.mock_shared = "I" diff --git a/server/jupyter_server/tests/extension/mockextensions/mock2.py b/server/jupyter_server/tests/extension/mockextensions/mock2.py new file mode 100644 index 0000000..87b8f45 --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mock2.py @@ -0,0 +1,10 @@ +"""A mock extension named `mock2` for testing purposes. +""" +# by the test functions. +def _jupyter_server_extension_paths(): + return [{"module": "jupyter_server.tests.extension.mockextensions.mock2"}] + + +def _load_jupyter_server_extension(serverapp): + serverapp.mockII = True + serverapp.mock_shared = "II" diff --git a/server/jupyter_server/tests/extension/mockextensions/mock3.py b/server/jupyter_server/tests/extension/mockextensions/mock3.py new file mode 100644 index 0000000..cdbffef --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mock3.py @@ -0,0 +1,6 @@ +"""A mock extension named `mock3` for testing purposes. +""" + + +def _load_jupyter_server_extension(serverapp): + pass diff --git a/server/jupyter_server/tests/extension/mockextensions/mockext_both.py b/server/jupyter_server/tests/extension/mockextensions/mockext_both.py new file mode 100644 index 0000000..38076e5 --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mockext_both.py @@ -0,0 +1,10 @@ +"""A mock extension named `mockext_both` for testing purposes. +""" +# Function that makes these extensions discoverable +# by the test functions. +def _jupyter_server_extension_paths(): + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_both"}] + + +def _load_jupyter_server_extension(serverapp): + pass diff --git a/server/jupyter_server/tests/extension/mockextensions/mockext_py.py b/server/jupyter_server/tests/extension/mockextensions/mockext_py.py new file mode 100644 index 0000000..d0cf69b --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mockext_py.py @@ -0,0 +1,10 @@ +"""A mock extension named `mockext_py` for testing purposes. +""" +# Function that makes these extensions discoverable +# by the test functions. +def _jupyter_server_extension_paths(): + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_py"}] + + +def _load_jupyter_server_extension(serverapp): + pass diff --git a/server/jupyter_server/tests/extension/mockextensions/mockext_sys.py b/server/jupyter_server/tests/extension/mockextensions/mockext_sys.py new file mode 100644 index 0000000..70506e2 --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mockext_sys.py @@ -0,0 +1,10 @@ +"""A mock extension named `mockext_py` for testing purposes. +""" +# Function that makes these extensions discoverable +# by the test functions. +def _jupyter_server_extension_paths(): + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_sys"}] + + +def _load_jupyter_server_extension(serverapp): + pass diff --git a/server/jupyter_server/tests/extension/mockextensions/mockext_user.py b/server/jupyter_server/tests/extension/mockextensions/mockext_user.py new file mode 100644 index 0000000..c1e8eaf --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/mockext_user.py @@ -0,0 +1,10 @@ +"""A mock extension named `mockext_user` for testing purposes. +""" +# Function that makes these extensions discoverable +# by the test functions. +def _jupyter_server_extension_paths(): + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_user"}] + + +def _load_jupyter_server_extension(serverapp): + pass diff --git a/server/jupyter_server/tests/extension/mockextensions/static/mock.txt b/server/jupyter_server/tests/extension/mockextensions/static/mock.txt new file mode 100644 index 0000000..36dd88b --- /dev/null +++ b/server/jupyter_server/tests/extension/mockextensions/static/mock.txt @@ -0,0 +1 @@ +mock static content diff --git a/server/jupyter_server/tests/extension/test_app.py b/server/jupyter_server/tests/extension/test_app.py new file mode 100644 index 0000000..5078a5c --- /dev/null +++ b/server/jupyter_server/tests/extension/test_app.py @@ -0,0 +1,158 @@ +import pytest +from traitlets.config import Config + +from .mockextensions.app import MockExtensionApp +from jupyter_server.serverapp import ServerApp +from jupyter_server.utils import run_sync + + +@pytest.fixture +def jp_server_config(jp_template_dir): + config = { + "ServerApp": { + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True}, + }, + "MockExtensionApp": { + "template_paths": [str(jp_template_dir)], + "log_level": "DEBUG", + }, + } + return config + + +@pytest.fixture +def mock_extension(extension_manager): + name = "jupyter_server.tests.extension.mockextensions" + pkg = extension_manager.extensions[name] + point = pkg.extension_points["mockextension"] + app = point.app + return app + + +def test_initialize(jp_serverapp, jp_template_dir, mock_extension): + # Check that settings and handlers were added to the mock extension. + assert isinstance(mock_extension.serverapp, ServerApp) + assert len(mock_extension.handlers) > 0 + assert mock_extension.loaded + assert mock_extension.template_paths == [str(jp_template_dir)] + + +@pytest.mark.parametrize( + "trait_name, trait_value, jp_argv", + ( + [ + "mock_trait", + "test mock trait", + ["--MockExtensionApp.mock_trait=test mock trait"], + ], + ), +) +def test_instance_creation_with_argv( + trait_name, + trait_value, + jp_argv, + mock_extension, +): + assert getattr(mock_extension, trait_name) == trait_value + + +def test_extensionapp_load_config_file( + config_file, + jp_serverapp, + mock_extension, +): + # Assert default config_file_paths is the same in the app and extension. + assert mock_extension.config_file_paths == jp_serverapp.config_file_paths + assert mock_extension.config_dir == jp_serverapp.config_dir + assert mock_extension.config_file_name == "jupyter_mockextension_config" + # Assert that the trait is updated by config file + assert mock_extension.mock_trait == "config from file" + + +OPEN_BROWSER_COMBINATIONS = ( + (True, {}), + (True, {"ServerApp": {"open_browser": True}}), + (False, {"ServerApp": {"open_browser": False}}), + (True, {"MockExtensionApp": {"open_browser": True}}), + (False, {"MockExtensionApp": {"open_browser": False}}), + ( + True, + { + "ServerApp": {"open_browser": True}, + "MockExtensionApp": {"open_browser": True}, + }, + ), + ( + False, + { + "ServerApp": {"open_browser": True}, + "MockExtensionApp": {"open_browser": False}, + }, + ), + ( + True, + { + "ServerApp": {"open_browser": False}, + "MockExtensionApp": {"open_browser": True}, + }, + ), + ( + False, + { + "ServerApp": {"open_browser": False}, + "MockExtensionApp": {"open_browser": False}, + }, + ), +) + + +@pytest.mark.parametrize("expected_value, config", OPEN_BROWSER_COMBINATIONS) +def test_browser_open(monkeypatch, jp_environ, config, expected_value): + serverapp = MockExtensionApp.initialize_server(config=Config(config)) + assert serverapp.open_browser == expected_value + + +def test_load_parallel_extensions(monkeypatch, jp_environ): + serverapp = MockExtensionApp.initialize_server() + exts = serverapp.extension_manager.extensions + assert "jupyter_server.tests.extension.mockextensions.mock1" in exts + assert "jupyter_server.tests.extension.mockextensions" in exts + + exts = serverapp.jpserver_extensions + assert exts["jupyter_server.tests.extension.mockextensions.mock1"] + assert exts["jupyter_server.tests.extension.mockextensions"] + + +def test_stop_extension(jp_serverapp, caplog): + """Test the stop_extension method. + + This should be fired by ServerApp.cleanup_extensions. + """ + calls = 0 + + # load extensions (make sure we only have the one extension loaded + jp_serverapp.extension_manager.load_all_extensions() + extension_name = "jupyter_server.tests.extension.mockextensions" + assert list(jp_serverapp.extension_manager.extension_apps) == [extension_name] + + # add a stop_extension method for the extension app + async def _stop(*args): + nonlocal calls + calls += 1 + + for apps in jp_serverapp.extension_manager.extension_apps.values(): + for app in apps: + if app: + app.stop_extension = _stop + + # call cleanup_extensions, check the logging is correct + caplog.clear() + run_sync(jp_serverapp.cleanup_extensions()) + assert [msg for *_, msg in caplog.record_tuples] == [ + "Shutting down 1 extension", + '{} | extension app "mockextension" stopping'.format(extension_name), + '{} | extension app "mockextension" stopped'.format(extension_name), + ] + + # check the shutdown method was called once + assert calls == 1 diff --git a/server/jupyter_server/tests/extension/test_config.py b/server/jupyter_server/tests/extension/test_config.py new file mode 100644 index 0000000..8669697 --- /dev/null +++ b/server/jupyter_server/tests/extension/test_config.py @@ -0,0 +1,60 @@ +import pytest +from jupyter_core.paths import jupyter_config_path + +from jupyter_server.extension.config import ( + ExtensionConfigManager, +) + +# Use ServerApps environment because it monkeypatches +# jupyter_core.paths and provides a config directory +# that's not cross contaminating the user config directory. +pytestmark = pytest.mark.usefixtures("jp_environ") + + +@pytest.fixture +def configd(jp_env_config_path): + """A pathlib.Path object that acts like a jupyter_server_config.d folder.""" + configd = jp_env_config_path.joinpath("jupyter_server_config.d") + configd.mkdir() + return configd + + +ext1_json_config = """\ +{ + "ServerApp": { + "jpserver_extensions": { + "ext1_config": true + } + } +} +""" + + +@pytest.fixture +def ext1_config(configd): + config = configd.joinpath("ext1_config.json") + config.write_text(ext1_json_config) + + +ext2_json_config = """\ +{ + "ServerApp": { + "jpserver_extensions": { + "ext2_config": false + } + } +} +""" + + +@pytest.fixture +def ext2_config(configd): + config = configd.joinpath("ext2_config.json") + config.write_text(ext2_json_config) + + +def test_list_extension_from_configd(ext1_config, ext2_config): + manager = ExtensionConfigManager(read_config_path=jupyter_config_path()) + extensions = manager.get_jpserver_extensions() + assert "ext2_config" in extensions + assert "ext1_config" in extensions diff --git a/server/jupyter_server/tests/extension/test_entrypoint.py b/server/jupyter_server/tests/extension/test_entrypoint.py new file mode 100644 index 0000000..5f7d10d --- /dev/null +++ b/server/jupyter_server/tests/extension/test_entrypoint.py @@ -0,0 +1,15 @@ +import pytest + + +# All test coroutines will be treated as marked. +pytestmark = pytest.mark.script_launch_mode("subprocess") + + +def test_server_extension_list(jp_environ, script_runner): + ret = script_runner.run( + "jupyter", + "server", + "extension", + "list", + ) + assert ret.success diff --git a/server/jupyter_server/tests/extension/test_handler.py b/server/jupyter_server/tests/extension/test_handler.py new file mode 100644 index 0000000..d920c66 --- /dev/null +++ b/server/jupyter_server/tests/extension/test_handler.py @@ -0,0 +1,85 @@ +import pytest + + +@pytest.fixture +def jp_server_config(jp_template_dir): + return { + "ServerApp": { + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True} + }, + "MockExtensionApp": {"template_paths": [str(jp_template_dir)]}, + } + + +async def test_handler(jp_fetch): + r = await jp_fetch("mock", method="GET") + assert r.code == 200 + assert r.body.decode() == "mock trait" + + +async def test_handler_template(jp_fetch, mock_template): + r = await jp_fetch("mock_template", method="GET") + assert r.code == 200 + + +@pytest.mark.parametrize( + "jp_server_config", + [ + { + "ServerApp": { + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True} + }, + "MockExtensionApp": { + # Change a trait in the MockExtensionApp using + # the following config value. + "mock_trait": "test mock trait" + }, + } + ], +) +async def test_handler_setting(jp_fetch, jp_server_config): + # Test that the extension trait was picked up by the webapp. + r = await jp_fetch("mock", method="GET") + assert r.code == 200 + assert r.body.decode() == "test mock trait" + + +@pytest.mark.parametrize("jp_argv", (["--MockExtensionApp.mock_trait=test mock trait"],)) +async def test_handler_argv(jp_fetch, jp_argv): + # Test that the extension trait was picked up by the webapp. + r = await jp_fetch("mock", method="GET") + assert r.code == 200 + assert r.body.decode() == "test mock trait" + + +@pytest.mark.parametrize( + "jp_server_config,jp_base_url", + [ + ( + { + "ServerApp": { + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True}, + # Move extension handlers behind a url prefix + "base_url": "test_prefix", + }, + "MockExtensionApp": { + # Change a trait in the MockExtensionApp using + # the following config value. + "mock_trait": "test mock trait" + }, + }, + "/test_prefix/", + ) + ], +) +async def test_base_url(jp_fetch, jp_server_config, jp_base_url): + # Test that the extension's handlers were properly prefixed + r = await jp_fetch("mock", method="GET") + assert r.code == 200 + assert r.body.decode() == "test mock trait" + + # Test that the static namespace was prefixed by base_url + r = await jp_fetch("static", "mockextension", "mock.txt", method="GET") + assert r.code == 200 + body = r.body.decode() + assert "mock static content" in body diff --git a/server/jupyter_server/tests/extension/test_launch.py b/server/jupyter_server/tests/extension/test_launch.py new file mode 100644 index 0000000..e5cc12e --- /dev/null +++ b/server/jupyter_server/tests/extension/test_launch.py @@ -0,0 +1,111 @@ +"""Test launching Jupyter Server Applications +through as ExtensionApp launch_instance. +""" +import os +import subprocess +import sys +import time +from binascii import hexlify +from pathlib import Path + +import pytest +import requests + + +HERE = os.path.dirname(os.path.abspath(__file__)) + + +@pytest.fixture +def port(): + return 9999 + + +@pytest.fixture +def token(): + return hexlify(os.urandom(4)).decode("ascii") + + +@pytest.fixture +def auth_header(token): + return {"Authorization": "token %s" % token} + + +def wait_up(url, interval=0.1, check=None): + while True: + try: + r = requests.get(url) + except Exception: + if check: + assert check() + # print("waiting for %s" % url) + time.sleep(interval) + else: + break + + +@pytest.fixture +def launch_instance(request, port, token): + def _run_in_subprocess(argv=[], add_token=True): + def _kill_extension_app(): + try: + process.terminate() + except OSError: + # Already dead. + pass + process.wait(10) + + if add_token: + f'--ServerApp.token="{token}"', + + process = subprocess.Popen( + [ + sys.executable, + "-m", + "mockextensions.app", + f"--port={port}", + "--ip=127.0.0.1", + "--no-browser", + *argv, + ], + cwd=HERE, + ) + + request.addfinalizer(_kill_extension_app) + url = f"http://127.0.0.1:{port}" + wait_up(url, check=lambda: process.poll() is None) + return process + + return _run_in_subprocess + + +@pytest.fixture +def fetch(port, auth_header): + def _get(endpoint): + url = f"http://127.0.0.1:{port}" + endpoint + return requests.get(url, headers=auth_header) + + return _get + + +def test_launch_instance(launch_instance, fetch): + launch_instance() + r = fetch("/mock") + assert r.status_code == 200 + + +def test_base_url(launch_instance, fetch): + launch_instance(["--ServerApp.base_url=/foo"]) + r = fetch("/foo/mock") + assert r.status_code == 200 + + +def test_token_file(launch_instance, fetch, token): + token_file = HERE / Path("token_file.txt") + os.environ["JUPYTER_TOKEN_FILE"] = str(token_file) + token_file.write_text(token, encoding="utf-8") + + launch_instance(add_token=False) + r = fetch("/mock") + del os.environ["JUPYTER_TOKEN_FILE"] + token_file.unlink() + assert r.status_code == 200 diff --git a/server/jupyter_server/tests/extension/test_manager.py b/server/jupyter_server/tests/extension/test_manager.py new file mode 100644 index 0000000..148b6ea --- /dev/null +++ b/server/jupyter_server/tests/extension/test_manager.py @@ -0,0 +1,132 @@ +import os +import unittest.mock as mock + +import pytest +from jupyter_core.paths import jupyter_config_path + +from jupyter_server.extension.manager import ExtensionManager +from jupyter_server.extension.manager import ExtensionMetadataError +from jupyter_server.extension.manager import ExtensionModuleNotFound +from jupyter_server.extension.manager import ExtensionPackage +from jupyter_server.extension.manager import ExtensionPoint + +# Use ServerApps environment because it monkeypatches +# jupyter_core.paths and provides a config directory +# that's not cross contaminating the user config directory. +pytestmark = pytest.mark.usefixtures("jp_environ") + + +def test_extension_point_api(): + # Import mock extension metadata + from .mockextensions import _jupyter_server_extension_points + + # Testing the first path (which is an extension app). + metadata_list = _jupyter_server_extension_points() + point = metadata_list[0] + + module = point["module"] + app = point["app"] + + e = ExtensionPoint(metadata=point) + assert e.module_name == module + assert e.name == app.name + assert app is not None + assert callable(e.load) + assert callable(e.link) + assert e.validate() + + +def test_extension_point_metadata_error(): + # Missing the "module" key. + bad_metadata = {"name": "nonexistent"} + with pytest.raises(ExtensionMetadataError): + ExtensionPoint(metadata=bad_metadata) + + +def test_extension_point_notfound_error(): + bad_metadata = {"module": "nonexistent"} + with pytest.raises(ExtensionModuleNotFound): + ExtensionPoint(metadata=bad_metadata) + + +def test_extension_package_api(): + # Import mock extension metadata + from .mockextensions import _jupyter_server_extension_points + + # Testing the first path (which is an extension app). + metadata_list = _jupyter_server_extension_points() + path1 = metadata_list[0] + app = path1["app"] + + e = ExtensionPackage(name="jupyter_server.tests.extension.mockextensions") + e.extension_points + assert hasattr(e, "extension_points") + assert len(e.extension_points) == len(metadata_list) + assert app.name in e.extension_points + assert e.validate() + + +def test_extension_package_notfound_error(): + with pytest.raises(ExtensionModuleNotFound): + ExtensionPackage(name="nonexistent") + + +def _normalize_path(path_list): + return [p.rstrip(os.path.sep) for p in path_list] + + +def test_extension_manager_api(jp_serverapp): + jpserver_extensions = {"jupyter_server.tests.extension.mockextensions": True} + manager = ExtensionManager(serverapp=jp_serverapp) + assert manager.config_manager + expected = _normalize_path(os.path.join(jupyter_config_path()[0], "serverconfig")) + assert _normalize_path(manager.config_manager.read_config_path[0]) == expected + manager.from_jpserver_extensions(jpserver_extensions) + assert len(manager.extensions) == 1 + assert "jupyter_server.tests.extension.mockextensions" in manager.extensions + + +def test_extension_manager_linked_extensions(jp_serverapp): + name = "jupyter_server.tests.extension.mockextensions" + manager = ExtensionManager(serverapp=jp_serverapp) + manager.add_extension(name, enabled=True) + manager.link_extension(name) + assert name in manager.linked_extensions + + +def test_extension_manager_fail_add(jp_serverapp): + name = "jupyter_server.tests.extension.notanextension" + manager = ExtensionManager(serverapp=jp_serverapp) + manager.add_extension(name, enabled=True) # should only warn + jp_serverapp.reraise_server_extension_failures = True + with pytest.raises(ExtensionModuleNotFound): + manager.add_extension(name, enabled=True) + + +def test_extension_manager_fail_link(jp_serverapp): + name = "jupyter_server.tests.extension.mockextensions.app" + with mock.patch( + "jupyter_server.tests.extension.mockextensions.app.MockExtensionApp.parse_command_line", + side_effect=RuntimeError, + ): + manager = ExtensionManager(serverapp=jp_serverapp) + manager.add_extension(name, enabled=True) + manager.link_extension(name) # should only warn + jp_serverapp.reraise_server_extension_failures = True + with pytest.raises(RuntimeError): + manager.link_extension(name) + + +def test_extension_manager_fail_load(jp_serverapp): + name = "jupyter_server.tests.extension.mockextensions.app" + with mock.patch( + "jupyter_server.tests.extension.mockextensions.app.MockExtensionApp.initialize_handlers", + side_effect=RuntimeError, + ): + manager = ExtensionManager(serverapp=jp_serverapp) + manager.add_extension(name, enabled=True) + manager.link_extension(name) + manager.load_extension(name) # should only warn + jp_serverapp.reraise_server_extension_failures = True + with pytest.raises(RuntimeError): + manager.load_extension(name) diff --git a/server/jupyter_server/tests/extension/test_serverextension.py b/server/jupyter_server/tests/extension/test_serverextension.py new file mode 100644 index 0000000..5140cdf --- /dev/null +++ b/server/jupyter_server/tests/extension/test_serverextension.py @@ -0,0 +1,106 @@ +from collections import OrderedDict + +import pytest +from traitlets.tests.utils import check_help_all_output + +from jupyter_server.config_manager import BaseJSONConfigManager +from jupyter_server.extension.serverextension import _get_config_dir +from jupyter_server.extension.serverextension import toggle_server_extension_python + + +# Use ServerApps environment because it monkeypatches +# jupyter_core.paths and provides a config directory +# that's not cross contaminating the user config directory. +pytestmark = pytest.mark.usefixtures("jp_environ") + + +def test_help_output(): + check_help_all_output("jupyter_server.extension.serverextension") + check_help_all_output("jupyter_server.extension.serverextension", ["enable"]) + check_help_all_output("jupyter_server.extension.serverextension", ["disable"]) + check_help_all_output("jupyter_server.extension.serverextension", ["install"]) + check_help_all_output("jupyter_server.extension.serverextension", ["uninstall"]) + + +def get_config(sys_prefix=True): + cm = BaseJSONConfigManager(config_dir=_get_config_dir(sys_prefix=sys_prefix)) + data = cm.get("jupyter_server_config") + return data.get("ServerApp", {}).get("jpserver_extensions", {}) + + +def test_enable(jp_env_config_path, jp_extension_environ): + toggle_server_extension_python("mock1", True) + config = get_config() + assert config["mock1"] + + +def test_disable(jp_env_config_path, jp_extension_environ): + toggle_server_extension_python("mock1", True) + toggle_server_extension_python("mock1", False) + + config = get_config() + assert not config["mock1"] + + +def test_merge_config(jp_env_config_path, jp_configurable_serverapp, jp_extension_environ): + # Toggle each extension module with a JSON config file + # at the sys-prefix config dir. + toggle_server_extension_python( + "jupyter_server.tests.extension.mockextensions.mockext_sys", + enabled=True, + sys_prefix=True, + ) + toggle_server_extension_python( + "jupyter_server.tests.extension.mockextensions.mockext_user", + enabled=True, + user=True, + ) + + # Write this configuration in two places, sys-prefix and user. + # sys-prefix supercedes users, so the extension should be disabled + # when these two configs merge. + toggle_server_extension_python( + "jupyter_server.tests.extension.mockextensions.mockext_both", + enabled=True, + sys_prefix=True, + ) + toggle_server_extension_python( + "jupyter_server.tests.extension.mockextensions.mockext_both", + enabled=False, + user=True, + ) + + arg = "--ServerApp.jpserver_extensions={{'{mockext_py}': True}}".format( + mockext_py="jupyter_server.tests.extension.mockextensions.mockext_py" + ) + + # Enable the last extension, mockext_py, using the CLI interface. + app = jp_configurable_serverapp(config_dir=str(jp_env_config_path), argv=[arg]) + # Verify that extensions are enabled and merged in proper order. + extensions = app.jpserver_extensions + assert extensions["jupyter_server.tests.extension.mockextensions.mockext_user"] + assert extensions["jupyter_server.tests.extension.mockextensions.mockext_sys"] + assert extensions["jupyter_server.tests.extension.mockextensions.mockext_py"] + # Merging should causes this extension to be disabled. + assert not extensions["jupyter_server.tests.extension.mockextensions.mockext_both"] + + +@pytest.mark.parametrize( + "jp_server_config", + [ + { + "ServerApp": { + "jpserver_extensions": OrderedDict( + [ + ("jupyter_server.tests.extension.mockextensions.mock2", True), + ("jupyter_server.tests.extension.mockextensions.mock1", True), + ] + ) + } + } + ], +) +def test_load_ordered(jp_serverapp, jp_server_config): + assert jp_serverapp.mockII is True, "Mock II should have been loaded" + assert jp_serverapp.mockI is True, "Mock I should have been loaded" + assert jp_serverapp.mock_shared == "II", "Mock II should be loaded after Mock I" diff --git a/server/jupyter_server/tests/extension/test_utils.py b/server/jupyter_server/tests/extension/test_utils.py new file mode 100644 index 0000000..425b6ba --- /dev/null +++ b/server/jupyter_server/tests/extension/test_utils.py @@ -0,0 +1,20 @@ +import pytest + +from jupyter_server.extension.utils import validate_extension + + +# Use ServerApps environment because it monkeypatches +# jupyter_core.paths and provides a config directory +# that's not cross contaminating the user config directory. +pytestmark = pytest.mark.usefixtures("jp_environ") + + +def test_validate_extension(): + # enabled at sys level + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_sys") + # enabled at sys, disabled at user + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_both") + # enabled at user + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_user") + # enabled at Python + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_py") diff --git a/server/jupyter_server/tests/namespace-package-test/README.md b/server/jupyter_server/tests/namespace-package-test/README.md new file mode 100644 index 0000000..f72158b --- /dev/null +++ b/server/jupyter_server/tests/namespace-package-test/README.md @@ -0,0 +1,3 @@ +Blank namespace package for use in testing. + +https://www.python.org/dev/peps/pep-0420/ diff --git a/server/jupyter_server/tests/namespace-package-test/setup.cfg b/server/jupyter_server/tests/namespace-package-test/setup.cfg new file mode 100644 index 0000000..105be78 --- /dev/null +++ b/server/jupyter_server/tests/namespace-package-test/setup.cfg @@ -0,0 +1,5 @@ +[metadata] +name = namespace-package-test + +[options] +packages = find_namespace: diff --git a/server/jupyter_server/tests/namespace-package-test/test_namespace/test_package/__init__.py b/server/jupyter_server/tests/namespace-package-test/test_namespace/test_package/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/nbconvert/__init__.py b/server/jupyter_server/tests/nbconvert/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/nbconvert/test_handlers.py b/server/jupyter_server/tests/nbconvert/test_handlers.py new file mode 100644 index 0000000..75d2372 --- /dev/null +++ b/server/jupyter_server/tests/nbconvert/test_handlers.py @@ -0,0 +1,150 @@ +# coding: utf-8 +import json +from base64 import encodebytes +from shutil import which + +import pytest +import tornado +from nbformat import writes +from nbformat.v4 import new_code_cell +from nbformat.v4 import new_markdown_cell +from nbformat.v4 import new_notebook +from nbformat.v4 import new_output + +from ..utils import expected_http_error + + +png_green_pixel = encodebytes( + b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00" + b"\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT" + b"\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82" +).decode("ascii") + + +@pytest.fixture +def notebook(jp_root_dir): + # Build sub directory. + subdir = jp_root_dir / "foo" + if not jp_root_dir.joinpath("foo").is_dir(): + subdir.mkdir() + + # Build a notebook programmatically. + nb = new_notebook() + nb.cells.append(new_markdown_cell("Created by test ³")) + cc1 = new_code_cell(source="print(2*6)") + cc1.outputs.append(new_output(output_type="stream", text="12")) + cc1.outputs.append( + new_output( + output_type="execute_result", + data={"image/png": png_green_pixel}, + execution_count=1, + ) + ) + nb.cells.append(cc1) + + # Write file to tmp dir. + nbfile = subdir / "testnb.ipynb" + nbfile.write_text(writes(nb, version=4), encoding="utf-8") + + +pytestmark = pytest.mark.skipif(not which("pandoc"), reason="Command 'pandoc' is not available") + + +async def test_from_file(jp_fetch, notebook): + r = await jp_fetch( + "nbconvert", + "html", + "foo", + "testnb.ipynb", + method="GET", + params={"download": False}, + ) + + assert r.code == 200 + assert "text/html" in r.headers["Content-Type"] + assert "Created by test" in r.body.decode() + assert "print" in r.body.decode() + + r = await jp_fetch( + "nbconvert", + "python", + "foo", + "testnb.ipynb", + method="GET", + params={"download": False}, + ) + + assert r.code == 200 + assert "text/x-python" in r.headers["Content-Type"] + assert "print(2*6)" in r.body.decode() + + +async def test_from_file_404(jp_fetch, notebook): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "nbconvert", + "html", + "foo", + "thisdoesntexist.ipynb", + method="GET", + params={"download": False}, + ) + assert expected_http_error(e, 404) + + +async def test_from_file_download(jp_fetch, notebook): + r = await jp_fetch( + "nbconvert", + "python", + "foo", + "testnb.ipynb", + method="GET", + params={"download": True}, + ) + content_disposition = r.headers["Content-Disposition"] + assert "attachment" in content_disposition + assert "testnb.py" in content_disposition + + +async def test_from_file_zip(jp_fetch, notebook): + r = await jp_fetch( + "nbconvert", + "latex", + "foo", + "testnb.ipynb", + method="GET", + params={"download": True}, + ) + assert "application/zip" in r.headers["Content-Type"] + assert ".zip" in r.headers["Content-Disposition"] + + +async def test_from_post(jp_fetch, notebook): + r = await jp_fetch( + "api/contents/foo/testnb.ipynb", + method="GET", + ) + nbmodel = json.loads(r.body.decode()) + + r = await jp_fetch("nbconvert", "html", method="POST", body=json.dumps(nbmodel)) + assert r.code == 200 + assert "text/html" in r.headers["Content-Type"] + assert "Created by test" in r.body.decode() + assert "print" in r.body.decode() + + r = await jp_fetch("nbconvert", "python", method="POST", body=json.dumps(nbmodel)) + assert r.code == 200 + assert "text/x-python" in r.headers["Content-Type"] + assert "print(2*6)" in r.body.decode() + + +async def test_from_post_zip(jp_fetch, notebook): + r = await jp_fetch( + "api/contents/foo/testnb.ipynb", + method="GET", + ) + nbmodel = json.loads(r.body.decode()) + + r = await jp_fetch("nbconvert", "latex", method="POST", body=json.dumps(nbmodel)) + assert "application/zip" in r.headers["Content-Type"] + assert ".zip" in r.headers["Content-Disposition"] diff --git a/server/jupyter_server/tests/services/__init__.py b/server/jupyter_server/tests/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/api/__init__.py b/server/jupyter_server/tests/services/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/api/test_api.py b/server/jupyter_server/tests/services/api/test_api.py new file mode 100644 index 0000000..c1620ff --- /dev/null +++ b/server/jupyter_server/tests/services/api/test_api.py @@ -0,0 +1,23 @@ +import json + + +async def test_get_spec(jp_fetch): + response = await jp_fetch("api", "spec.yaml", method="GET") + assert response.code == 200 + + +async def test_get_status(jp_fetch): + response = await jp_fetch("api", "status", method="GET") + assert response.code == 200 + assert response.headers.get("Content-Type") == "application/json" + status = json.loads(response.body.decode("utf8")) + assert sorted(status.keys()) == [ + "connections", + "kernels", + "last_activity", + "started", + ] + assert status["connections"] == 0 + assert status["kernels"] == 0 + assert status["last_activity"].endswith("Z") + assert status["started"].endswith("Z") diff --git a/server/jupyter_server/tests/services/config/__init__.py b/server/jupyter_server/tests/services/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/config/test_api.py b/server/jupyter_server/tests/services/config/test_api.py new file mode 100644 index 0000000..9d4a4b2 --- /dev/null +++ b/server/jupyter_server/tests/services/config/test_api.py @@ -0,0 +1,50 @@ +import json + + +async def test_create_retrieve_config(jp_fetch): + sample = {"foo": "bar", "baz": 73} + response = await jp_fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) + assert response.code == 204 + + response2 = await jp_fetch( + "api", + "config", + "example", + method="GET", + ) + assert response2.code == 200 + assert json.loads(response2.body.decode()) == sample + + +async def test_modify(jp_fetch): + sample = {"foo": "bar", "baz": 73, "sub": {"a": 6, "b": 7}, "sub2": {"c": 8}} + + modified_sample = { + "foo": None, # should delete foo + "baz": 75, + "wib": [1, 2, 3], + "sub": {"a": 8, "b": None, "d": 9}, + "sub2": {"c": None}, # should delete sub2 + } + + diff = {"baz": 75, "wib": [1, 2, 3], "sub": {"a": 8, "d": 9}} + + await jp_fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) + + response2 = await jp_fetch( + "api", "config", "example", method="PATCH", body=json.dumps(modified_sample) + ) + + assert response2.code == 200 + assert json.loads(response2.body.decode()) == diff + + +async def test_get_unknown(jp_fetch): + response = await jp_fetch( + "api", + "config", + "nonexistant", + method="GET", + ) + assert response.code == 200 + assert json.loads(response.body.decode()) == {} diff --git a/server/jupyter_server/tests/services/contents/__init__.py b/server/jupyter_server/tests/services/contents/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/contents/test_api.py b/server/jupyter_server/tests/services/contents/test_api.py new file mode 100644 index 0000000..2d15cc2 --- /dev/null +++ b/server/jupyter_server/tests/services/contents/test_api.py @@ -0,0 +1,802 @@ +import json +import pathlib +import sys +from base64 import decodebytes +from base64 import encodebytes +from unicodedata import normalize + +import pytest +import tornado +from nbformat import from_dict +from nbformat import writes +from nbformat.v4 import new_markdown_cell +from nbformat.v4 import new_notebook + +from ...utils import expected_http_error +from jupyter_server.utils import url_path_join + + +def notebooks_only(dir_model): + return [nb for nb in dir_model["content"] if nb["type"] == "notebook"] + + +def dirs_only(dir_model): + return [x for x in dir_model["content"] if x["type"] == "directory"] + + +dirs = [ + ("", "inroot"), + ("Directory with spaces in", "inspace"), + ("unicodé", "innonascii"), + ("foo", "a"), + ("foo", "b"), + ("foo", "name with spaces"), + ("foo", "unicodé"), + ("foo/bar", "baz"), + ("ordering", "A"), + ("ordering", "b"), + ("ordering", "C"), + ("å b", "ç d"), +] + + +@pytest.fixture(params=["FileContentsManager", "AsyncFileContentsManager"]) +def jp_argv(request): + return [ + "--ServerApp.contents_manager_class=jupyter_server.services.contents.filemanager." + + request.param + ] + + +@pytest.fixture +def contents_dir(tmp_path, jp_serverapp): + return tmp_path / jp_serverapp.root_dir + + +@pytest.fixture +def contents(contents_dir): + # Create files in temporary directory + paths = { + "notebooks": [], + "textfiles": [], + "blobs": [], + } + for d, name in dirs: + p = contents_dir / d + p.mkdir(parents=True, exist_ok=True) + + # Create a notebook + nb = writes(new_notebook(), version=4) + nbname = p.joinpath("{}.ipynb".format(name)) + nbname.write_text(nb, encoding="utf-8") + paths["notebooks"].append(nbname.relative_to(contents_dir)) + + # Create a text file + txt = "{} text file".format(name) + txtname = p.joinpath("{}.txt".format(name)) + txtname.write_text(txt, encoding="utf-8") + paths["textfiles"].append(txtname.relative_to(contents_dir)) + + # Create a random blob + blob = name.encode("utf-8") + b"\xFF" + blobname = p.joinpath("{}.blob".format(name)) + blobname.write_bytes(blob) + paths["blobs"].append(blobname.relative_to(contents_dir)) + paths["all"] = list(paths.values()) + return paths + + +@pytest.fixture +def folders(): + return list(set(item[0] for item in dirs)) + + +@pytest.mark.parametrize("path,name", dirs) +async def test_list_notebooks(jp_fetch, contents, path, name): + response = await jp_fetch( + "api", + "contents", + path, + method="GET", + ) + data = json.loads(response.body.decode()) + nbs = notebooks_only(data) + assert len(nbs) > 0 + assert name + ".ipynb" in [normalize("NFC", n["name"]) for n in nbs] + assert url_path_join(path, name + ".ipynb") in [normalize("NFC", n["path"]) for n in nbs] + + +@pytest.mark.parametrize("path,name", dirs) +async def test_get_dir_no_contents(jp_fetch, contents, path, name): + response = await jp_fetch( + "api", + "contents", + path, + method="GET", + params=dict( + content="0", + ), + ) + model = json.loads(response.body.decode()) + assert model["path"] == path + assert model["type"] == "directory" + assert "content" in model + assert model["content"] is None + + +async def test_list_nonexistant_dir(jp_fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError): + await jp_fetch( + "api", + "contents", + "nonexistant", + method="GET", + ) + + +@pytest.mark.parametrize("path,name", dirs) +async def test_get_nb_contents(jp_fetch, contents, path, name): + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(content="1")) + model = json.loads(r.body.decode()) + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert model["format"] == "json" + assert "metadata" in model["content"] + assert isinstance(model["content"]["metadata"], dict) + + +@pytest.mark.parametrize("path,name", dirs) +async def test_get_nb_no_contents(jp_fetch, contents, path, name): + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(content="0")) + model = json.loads(r.body.decode()) + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert model["content"] is None + + +async def test_get_nb_invalid(contents_dir, jp_fetch, contents): + nb = { + "nbformat": 4, + "metadata": {}, + "cells": [ + { + "cell_type": "wrong", + "metadata": {}, + } + ], + } + nbpath = "å b/Validate tést.ipynb" + (contents_dir / nbpath).write_text(json.dumps(nb)) + r = await jp_fetch( + "api", + "contents", + nbpath, + method="GET", + ) + model = json.loads(r.body.decode()) + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert "message" in model + assert "validation failed" in model["message"].lower() + + +async def test_get_contents_no_such_file(jp_fetch): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "foo/q.ipynb", + method="GET", + ) + assert e.value.code == 404 + + +@pytest.mark.parametrize("path,name", dirs) +async def test_get_text_file_contents(jp_fetch, contents, path, name): + txtname = name + ".txt" + txtpath = (path + "/" + txtname).lstrip("/") + r = await jp_fetch("api", "contents", txtpath, method="GET", params=dict(content="1")) + model = json.loads(r.body.decode()) + assert model["name"] == txtname + assert model["path"] == txtpath + assert "content" in model + assert model["format"] == "text" + assert model["type"] == "file" + assert model["content"] == "{} text file".format(name) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "foo/q.txt", + method="GET", + ) + assert expected_http_error(e, 404) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "foo/bar/baz.blob", + method="GET", + params=dict(type="file", format="text"), + ) + assert expected_http_error(e, 400) + + +@pytest.mark.parametrize("path,name", dirs) +async def test_get_binary_file_contents(jp_fetch, contents, path, name): + blobname = name + ".blob" + blobpath = (path + "/" + blobname).lstrip("/") + r = await jp_fetch("api", "contents", blobpath, method="GET", params=dict(content="1")) + model = json.loads(r.body.decode()) + assert model["name"] == blobname + assert model["path"] == blobpath + assert "content" in model + assert model["format"] == "base64" + assert model["type"] == "file" + data_out = decodebytes(model["content"].encode("ascii")) + data_in = name.encode("utf-8") + b"\xFF" + assert data_in == data_out + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "foo/q.txt", + method="GET", + ) + assert expected_http_error(e, 404) + + +async def test_get_bad_type(jp_fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + path = "unicodé" + type = "file" + await jp_fetch( + "api", + "contents", + path, + method="GET", + params=dict(type=type), # This should be a directory, and thus throw and error + ) + assert expected_http_error(e, 400, "%s is a directory, not a %s" % (path, type)) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + path = "unicodé/innonascii.ipynb" + type = "directory" + await jp_fetch( + "api", + "contents", + path, + method="GET", + params=dict(type=type), # This should be a file, and thus throw and error + ) + assert expected_http_error(e, 400, "%s is not a directory" % path) + + +@pytest.fixture +def _check_created(jp_base_url): + def _inner(r, contents_dir, path, name, type="notebook"): + fpath = path + "/" + name + assert r.code == 201 + location = jp_base_url + "api/contents/" + tornado.escape.url_escape(fpath, plus=False) + assert r.headers["Location"] == location + model = json.loads(r.body.decode()) + assert model["name"] == name + assert model["path"] == fpath + assert model["type"] == type + path = contents_dir + "/" + fpath + if type == "directory": + assert pathlib.Path(path).is_dir() + else: + assert pathlib.Path(path).is_file() + + return _inner + + +async def test_create_untitled(jp_fetch, contents, contents_dir, _check_created): + path = "å b" + name = "Untitled.ipynb" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) + _check_created(r, str(contents_dir), path, name, type="notebook") + + name = "Untitled1.ipynb" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) + _check_created(r, str(contents_dir), path, name, type="notebook") + + path = "foo/bar" + name = "Untitled.ipynb" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) + _check_created(r, str(contents_dir), path, name, type="notebook") + + +async def test_create_untitled_txt(jp_fetch, contents, contents_dir, _check_created): + name = "untitled.txt" + path = "foo/bar" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".txt"})) + _check_created(r, str(contents_dir), path, name, type="file") + + r = await jp_fetch("api", "contents", path, name, method="GET") + model = json.loads(r.body.decode()) + assert model["type"] == "file" + assert model["format"] == "text" + assert model["content"] == "" + + +async def test_upload(jp_fetch, contents, contents_dir, _check_created): + nb = new_notebook() + nbmodel = {"content": nb, "type": "notebook"} + path = "å b" + name = "Upload tést.ipynb" + r = await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(nbmodel)) + _check_created(r, str(contents_dir), path, name) + + +async def test_mkdir_untitled(jp_fetch, contents, contents_dir, _check_created): + name = "Untitled Folder" + path = "å b" + r = await jp_fetch( + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) + ) + _check_created(r, str(contents_dir), path, name, type="directory") + + name = "Untitled Folder 1" + r = await jp_fetch( + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) + ) + _check_created(r, str(contents_dir), path, name, type="directory") + + name = "Untitled Folder" + path = "foo/bar" + r = await jp_fetch( + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) + ) + _check_created(r, str(contents_dir), path, name, type="directory") + + +async def test_mkdir(jp_fetch, contents, contents_dir, _check_created): + name = "New ∂ir" + path = "å b" + r = await jp_fetch( + "api", + "contents", + path, + name, + method="PUT", + body=json.dumps({"type": "directory"}), + ) + _check_created(r, str(contents_dir), path, name, type="directory") + + +async def test_mkdir_hidden_400(jp_fetch): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "å b/.hidden", + method="PUT", + body=json.dumps({"type": "directory"}), + ) + assert expected_http_error(e, 400) + + +async def test_upload_txt(jp_fetch, contents, contents_dir, _check_created): + body = "ünicode téxt" + model = { + "content": body, + "format": "text", + "type": "file", + } + path = "å b" + name = "Upload tést.txt" + await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) + + # check roundtrip + r = await jp_fetch("api", "contents", path, name, method="GET") + model = json.loads(r.body.decode()) + assert model["type"] == "file" + assert model["format"] == "text" + assert model["path"] == path + "/" + name + assert model["content"] == body + + +async def test_upload_b64(jp_fetch, contents, contents_dir, _check_created): + body = b"\xFFblob" + b64body = encodebytes(body).decode("ascii") + model = { + "content": b64body, + "format": "base64", + "type": "file", + } + path = "å b" + name = "Upload tést.blob" + await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) + # check roundtrip + r = await jp_fetch("api", "contents", path, name, method="GET") + model = json.loads(r.body.decode()) + assert model["type"] == "file" + assert model["path"] == path + "/" + name + assert model["format"] == "base64" + decoded = decodebytes(model["content"].encode("ascii")) + assert decoded == body + + +async def test_copy(jp_fetch, contents, contents_dir, _check_created): + path = "å b" + name = "ç d.ipynb" + copy = "ç d-Copy1.ipynb" + r = await jp_fetch( + "api", + "contents", + path, + method="POST", + body=json.dumps({"copy_from": path + "/" + name}), + ) + _check_created(r, str(contents_dir), path, copy, type="notebook") + + # Copy the same file name + copy2 = "ç d-Copy2.ipynb" + r = await jp_fetch( + "api", + "contents", + path, + method="POST", + body=json.dumps({"copy_from": path + "/" + name}), + ) + _check_created(r, str(contents_dir), path, copy2, type="notebook") + + # copy a copy. + copy3 = "ç d-Copy3.ipynb" + r = await jp_fetch( + "api", + "contents", + path, + method="POST", + body=json.dumps({"copy_from": path + "/" + copy2}), + ) + _check_created(r, str(contents_dir), path, copy3, type="notebook") + + +async def test_copy_path(jp_fetch, contents, contents_dir, _check_created): + path1 = "foo" + path2 = "å b" + name = "a.ipynb" + copy = "a-Copy1.ipynb" + r = await jp_fetch( + "api", + "contents", + path2, + method="POST", + body=json.dumps({"copy_from": path1 + "/" + name}), + ) + _check_created(r, str(contents_dir), path2, name, type="notebook") + + r = await jp_fetch( + "api", + "contents", + path2, + method="POST", + body=json.dumps({"copy_from": path1 + "/" + name}), + ) + _check_created(r, str(contents_dir), path2, copy, type="notebook") + + +async def test_copy_put_400(jp_fetch, contents, contents_dir, _check_created): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "å b/cøpy.ipynb", + method="PUT", + body=json.dumps({"copy_from": "å b/ç d.ipynb"}), + ) + assert expected_http_error(e, 400) + + +async def test_copy_dir_400(jp_fetch, contents, contents_dir, _check_created): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "foo", + method="POST", + body=json.dumps({"copy_from": "å b"}), + ) + assert expected_http_error(e, 400) + + +@pytest.mark.parametrize("path,name", dirs) +async def test_delete(jp_fetch, contents, contents_dir, path, name, _check_created): + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch( + "api", + "contents", + nbpath, + method="DELETE", + ) + assert r.code == 204 + + +async def test_delete_dirs(jp_fetch, contents, folders): + # Iterate over folders + for name in sorted(folders + ["/"], key=len, reverse=True): + r = await jp_fetch("api", "contents", name, method="GET") + # Get JSON blobs for each content. + listing = json.loads(r.body.decode())["content"] + # Delete all content + for model in listing: + await jp_fetch("api", "contents", model["path"], method="DELETE") + # Make sure all content has been deleted. + r = await jp_fetch("api", "contents", method="GET") + model = json.loads(r.body.decode()) + assert model["content"] == [] + + +@pytest.mark.skipif(sys.platform == "win32", reason="Disabled deleting non-empty dirs on Windows") +async def test_delete_non_empty_dir(jp_fetch, contents): + # Delete a folder + await jp_fetch("api", "contents", "å b", method="DELETE") + # Check that the folder was been deleted. + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "contents", "å b", method="GET") + assert expected_http_error(e, 404) + + +async def test_rename(jp_fetch, jp_base_url, contents, contents_dir): + path = "foo" + name = "a.ipynb" + new_name = "z.ipynb" + # Rename the file + r = await jp_fetch( + "api", + "contents", + path, + name, + method="PATCH", + body=json.dumps({"path": path + "/" + new_name}), + ) + fpath = path + "/" + new_name + assert r.code == 200 + location = url_path_join(jp_base_url, "api/contents/", fpath) + assert r.headers["Location"] == location + model = json.loads(r.body.decode()) + assert model["name"] == new_name + assert model["path"] == fpath + fpath = str(contents_dir / fpath) + assert pathlib.Path(fpath).is_file() + + # Check that the files have changed + r = await jp_fetch("api", "contents", path, method="GET") + listing = json.loads(r.body.decode()) + nbnames = [name["name"] for name in listing["content"]] + assert "z.ipynb" in nbnames + assert "a.ipynb" not in nbnames + + +async def test_checkpoints_follow_file(jp_fetch, contents): + path = "foo" + name = "a.ipynb" + + # Read initial file. + r = await jp_fetch("api", "contents", path, name, method="GET") + model = json.loads(r.body.decode()) + + # Create a checkpoint of initial state + r = await jp_fetch( + "api", + "contents", + path, + name, + "checkpoints", + method="POST", + allow_nonstandard_methods=True, + ) + cp1 = json.loads(r.body.decode()) + + # Modify file and save. + nbcontent = model["content"] + nb = from_dict(nbcontent) + hcell = new_markdown_cell("Created by test") + nb.cells.append(hcell) + nbmodel = {"content": nb, "type": "notebook"} + await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(nbmodel)) + + # List checkpoints + r = await jp_fetch( + "api", + "contents", + path, + name, + "checkpoints", + method="GET", + ) + cps = json.loads(r.body.decode()) + assert cps == [cp1] + + r = await jp_fetch("api", "contents", path, name, method="GET") + model = json.loads(r.body.decode()) + nbcontent = model["content"] + nb = from_dict(nbcontent) + assert nb.cells[0].source == "Created by test" + + +async def test_rename_existing(jp_fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + path = "foo" + name = "a.ipynb" + new_name = "b.ipynb" + # Rename the file + await jp_fetch( + "api", + "contents", + path, + name, + method="PATCH", + body=json.dumps({"path": path + "/" + new_name}), + ) + assert expected_http_error(e, 409) + + +async def test_save(jp_fetch, contents): + r = await jp_fetch("api", "contents", "foo/a.ipynb", method="GET") + model = json.loads(r.body.decode()) + nbmodel = model["content"] + nb = from_dict(nbmodel) + nb.cells.append(new_markdown_cell("Created by test ³")) + nbmodel = {"content": nb, "type": "notebook"} + await jp_fetch("api", "contents", "foo/a.ipynb", method="PUT", body=json.dumps(nbmodel)) + # Round trip. + r = await jp_fetch("api", "contents", "foo/a.ipynb", method="GET") + model = json.loads(r.body.decode()) + newnb = from_dict(model["content"]) + assert newnb.cells[0].source == "Created by test ³" + + +async def test_checkpoints(jp_fetch, contents): + path = "foo/a.ipynb" + resp = await jp_fetch("api", "contents", path, method="GET") + model = json.loads(resp.body.decode()) + r = await jp_fetch( + "api", + "contents", + path, + "checkpoints", + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 201 + cp1 = json.loads(r.body.decode()) + assert set(cp1) == {"id", "last_modified"} + assert r.headers["Location"].split("/")[-1] == cp1["id"] + + # Modify it. + nbcontent = model["content"] + nb = from_dict(nbcontent) + hcell = new_markdown_cell("Created by test") + nb.cells.append(hcell) + + # Save it. + nbmodel = {"content": nb, "type": "notebook"} + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(nbmodel)) + + # List checkpoints + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") + cps = json.loads(r.body.decode()) + assert cps == [cp1] + + r = await jp_fetch("api", "contents", path, method="GET") + nbcontent = json.loads(r.body.decode())["content"] + nb = from_dict(nbcontent) + assert nb.cells[0].source == "Created by test" + + # Restore Checkpoint cp1 + r = await jp_fetch( + "api", + "contents", + path, + "checkpoints", + cp1["id"], + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 204 + + r = await jp_fetch("api", "contents", path, method="GET") + nbcontent = json.loads(r.body.decode())["content"] + nb = from_dict(nbcontent) + assert nb.cells == [] + + # Delete cp1 + r = await jp_fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") + assert r.code == 204 + + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") + cps = json.loads(r.body.decode()) + assert cps == [] + + +async def test_file_checkpoints(jp_fetch, contents): + path = "foo/a.txt" + resp = await jp_fetch("api", "contents", path, method="GET") + orig_content = json.loads(resp.body.decode())["content"] + r = await jp_fetch( + "api", + "contents", + path, + "checkpoints", + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 201 + cp1 = json.loads(r.body.decode()) + assert set(cp1) == {"id", "last_modified"} + assert r.headers["Location"].split("/")[-1] == cp1["id"] + + # Modify it. + new_content = orig_content + "\nsecond line" + model = { + "content": new_content, + "type": "file", + "format": "text", + } + + # Save it. + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) + + # List checkpoints + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") + cps = json.loads(r.body.decode()) + assert cps == [cp1] + + r = await jp_fetch("api", "contents", path, method="GET") + content = json.loads(r.body.decode())["content"] + assert content == new_content + + # Restore Checkpoint cp1 + r = await jp_fetch( + "api", + "contents", + path, + "checkpoints", + cp1["id"], + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 204 + + r = await jp_fetch("api", "contents", path, method="GET") + restored_content = json.loads(r.body.decode())["content"] + assert restored_content == orig_content + + # Delete cp1 + r = await jp_fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") + assert r.code == 204 + + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") + cps = json.loads(r.body.decode()) + assert cps == [] + + +async def test_trust(jp_fetch, contents): + # It should be able to trust a notebook that exists + for path in contents["notebooks"]: + r = await jp_fetch( + "api", + "contents", + str(path), + "trust", + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 201 diff --git a/server/jupyter_server/tests/services/contents/test_config.py b/server/jupyter_server/tests/services/contents/test_config.py new file mode 100644 index 0000000..741c10c --- /dev/null +++ b/server/jupyter_server/tests/services/contents/test_config.py @@ -0,0 +1,27 @@ +import pytest + +from jupyter_server.services.contents.checkpoints import AsyncCheckpoints +from jupyter_server.services.contents.filecheckpoints import AsyncGenericFileCheckpoints +from jupyter_server.services.contents.filecheckpoints import GenericFileCheckpoints +from jupyter_server.services.contents.manager import AsyncContentsManager + + +@pytest.fixture(params=[AsyncGenericFileCheckpoints, GenericFileCheckpoints]) +def jp_server_config(request): + return {"FileContentsManager": {"checkpoints_class": request.param}} + + +def test_config_did_something(jp_server_config, jp_serverapp): + assert isinstance( + jp_serverapp.contents_manager.checkpoints, + jp_server_config["FileContentsManager"]["checkpoints_class"], + ) + + +async def test_async_contents_manager(jp_configurable_serverapp): + config = {"ContentsManager": {"checkpoints_class": AsyncCheckpoints}} + argv = [ + "--ServerApp.contents_manager_class=jupyter_server.services.contents.manager.AsyncContentsManager" + ] + app = jp_configurable_serverapp(config=config, argv=argv) + assert isinstance(app.contents_manager, AsyncContentsManager) diff --git a/server/jupyter_server/tests/services/contents/test_fileio.py b/server/jupyter_server/tests/services/contents/test_fileio.py new file mode 100644 index 0000000..98f3a31 --- /dev/null +++ b/server/jupyter_server/tests/services/contents/test_fileio.py @@ -0,0 +1,139 @@ +import functools +import io +import os +import stat +import sys + +import decorator +import pytest +from ipython_genutils.testing.decorators import skip_win32 as _skip_win32 + +from jupyter_server.services.contents.fileio import atomic_writing + + +@functools.wraps(_skip_win32) +def skip_win32(f): + # Patches the "skip_win32" method to allow pytest fixtures + # in methods wrapped by this decorator. + def inner(f, *args, **kwargs): + decorated_f = _skip_win32(f) + return decorated_f(*args, **kwargs) + + return decorator.decorator(inner, f) + + +umask = 0 + + +def test_atomic_writing(tmp_path): + class CustomExc(Exception): + pass + + f1 = tmp_path / "penguin" + f1.write_text("Before") + + if os.name != "nt": + os.chmod(str(f1), 0o701) + orig_mode = stat.S_IMODE(os.stat(str(f1)).st_mode) + + f2 = tmp_path / "flamingo" + try: + os.symlink(str(f1), str(f2)) + have_symlink = True + except (AttributeError, NotImplementedError, OSError): + # AttributeError: Python doesn't support it + # NotImplementedError: The system doesn't support it + # OSError: The user lacks the privilege (Windows) + have_symlink = False + + with pytest.raises(CustomExc): + with atomic_writing(str(f1)) as f: + f.write("Failing write") + raise CustomExc + + with io.open(str(f1), "r") as f: + assert f.read() == "Before" + + with atomic_writing(str(f1)) as f: + f.write("Overwritten") + + with io.open(str(f1), "r") as f: + assert f.read() == "Overwritten" + + if os.name != "nt": + mode = stat.S_IMODE(os.stat(str(f1)).st_mode) + assert mode == orig_mode + + if have_symlink: + # Check that writing over a file preserves a symlink + with atomic_writing(str(f2)) as f: + f.write("written from symlink") + + with io.open(str(f1), "r") as f: + assert f.read() == "written from symlink" + + +@pytest.fixture +def handle_umask(): + global umask + umask = os.umask(0) + os.umask(umask) + yield + os.umask(umask) + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows") +def test_atomic_writing_umask(handle_umask, tmp_path): + + os.umask(0o022) + f1 = str(tmp_path / "1") + with atomic_writing(f1) as f: + f.write("1") + mode = stat.S_IMODE(os.stat(f1).st_mode) + assert mode == 0o644 + + os.umask(0o057) + f2 = str(tmp_path / "2") + + with atomic_writing(f2) as f: + f.write("2") + + mode = stat.S_IMODE(os.stat(f2).st_mode) + assert mode == 0o620 + + +def test_atomic_writing_newlines(tmp_path): + path = str(tmp_path / "testfile") + + lf = "a\nb\nc\n" + plat = lf.replace("\n", os.linesep) + crlf = lf.replace("\n", "\r\n") + + # test default + with io.open(path, "w") as f: + f.write(lf) + with io.open(path, "r", newline="") as f: + read = f.read() + assert read == plat + + # test newline=LF + with io.open(path, "w", newline="\n") as f: + f.write(lf) + with io.open(path, "r", newline="") as f: + read = f.read() + assert read == lf + + # test newline=CRLF + with atomic_writing(str(path), newline="\r\n") as f: + f.write(lf) + with io.open(path, "r", newline="") as f: + read = f.read() + assert read == crlf + + # test newline=no convert + text = "crlf\r\ncr\rlf\n" + with atomic_writing(str(path), newline="") as f: + f.write(text) + with io.open(path, "r", newline="") as f: + read = f.read() + assert read == text diff --git a/server/jupyter_server/tests/services/contents/test_largefilemanager.py b/server/jupyter_server/tests/services/contents/test_largefilemanager.py new file mode 100644 index 0000000..a53ae66 --- /dev/null +++ b/server/jupyter_server/tests/services/contents/test_largefilemanager.py @@ -0,0 +1,109 @@ +import pytest +import tornado + +from ...utils import expected_http_error +from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager +from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.utils import ensure_async + + +@pytest.fixture(params=[LargeFileManager, AsyncLargeFileManager]) +def jp_large_contents_manager(request, tmp_path): + """Returns a LargeFileManager instance.""" + file_manager = request.param + return file_manager(root_dir=str(tmp_path)) + + +async def test_save(jp_large_contents_manager): + cm = jp_large_contents_manager + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] + + # Get the model with 'content' + full_model = await ensure_async(cm.get(path)) + # Save the notebook + model = await ensure_async(cm.save(full_model, path)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert model["name"] == name + assert model["path"] == path + + +@pytest.mark.parametrize( + "model,err_message", + [ + ( + {"name": "test", "path": "test", "chunk": 1}, + "HTTP 400: Bad Request (No file type provided)", + ), + ( + {"name": "test", "path": "test", "chunk": 1, "type": "notebook"}, + 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)', + ), + ( + {"name": "test", "path": "test", "chunk": 1, "type": "file"}, + "HTTP 400: Bad Request (No file content provided)", + ), + ( + { + "name": "test", + "path": "test", + "chunk": 2, + "type": "file", + "content": "test", + "format": "json", + }, + "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", + ), + ], +) +async def test_bad_save(jp_large_contents_manager, model, err_message): + with pytest.raises(tornado.web.HTTPError) as e: + await ensure_async(jp_large_contents_manager.save(model, model["path"])) + assert expected_http_error(e, 400, expected_message=err_message) + + +async def test_saving_different_chunks(jp_large_contents_manager): + cm = jp_large_contents_manager + model = { + "name": "test", + "path": "test", + "type": "file", + "content": "test==", + "format": "text", + } + name = model["name"] + path = model["path"] + await ensure_async(cm.save(model, path)) + + for chunk in (1, 2, -1): + for fm in ("text", "base64"): + full_model = await ensure_async(cm.get(path)) + full_model["chunk"] = chunk + full_model["format"] = fm + model_res = await ensure_async(cm.save(full_model, path)) + assert isinstance(model_res, dict) + assert "name" in model_res + assert "path" in model_res + assert "chunk" not in model_res + assert model_res["name"] == name + assert model_res["path"] == path + + +async def test_save_in_subdirectory(jp_large_contents_manager, tmp_path): + cm = jp_large_contents_manager + sub_dir = tmp_path / "foo" + sub_dir.mkdir() + model = await ensure_async(cm.new_untitled(path="/foo/", type="notebook")) + path = model["path"] + model = await ensure_async(cm.get(path)) + + # Change the name in the model for rename + model = await ensure_async(cm.save(model, path)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "foo/Untitled.ipynb" diff --git a/server/jupyter_server/tests/services/contents/test_manager.py b/server/jupyter_server/tests/services/contents/test_manager.py new file mode 100644 index 0000000..4da26d3 --- /dev/null +++ b/server/jupyter_server/tests/services/contents/test_manager.py @@ -0,0 +1,669 @@ +import os +import sys +import time +from itertools import combinations + +import pytest +from nbformat import v4 as nbformat +from tornado.web import HTTPError +from traitlets import TraitError + +from ...utils import expected_http_error +from jupyter_server.services.contents.filemanager import AsyncFileContentsManager +from jupyter_server.services.contents.filemanager import FileContentsManager +from jupyter_server.utils import ensure_async + + +@pytest.fixture( + params=[ + (FileContentsManager, True), + (FileContentsManager, False), + (AsyncFileContentsManager, True), + (AsyncFileContentsManager, False), + ] +) +def jp_contents_manager(request, tmp_path): + contents_manager, use_atomic_writing = request.param + return contents_manager(root_dir=str(tmp_path), use_atomic_writing=use_atomic_writing) + + +@pytest.fixture(params=[FileContentsManager, AsyncFileContentsManager]) +def jp_file_contents_manager_class(request, tmp_path): + return request.param + + +# -------------- Functions ---------------------------- + + +def _make_dir(jp_contents_manager, api_path): + """ + Make a directory. + """ + os_path = jp_contents_manager._get_os_path(api_path) + try: + os.makedirs(os_path) + except OSError: + print("Directory already exists: %r" % os_path) + + +def symlink(jp_contents_manager, src, dst): + """Make a symlink to src from dst + + src and dst are api_paths + """ + src_os_path = jp_contents_manager._get_os_path(src) + dst_os_path = jp_contents_manager._get_os_path(dst) + print(src_os_path, dst_os_path, os.path.isfile(src_os_path)) + os.symlink(src_os_path, dst_os_path) + + +def add_code_cell(notebook): + output = nbformat.new_output("display_data", {"application/javascript": "alert('hi');"}) + cell = nbformat.new_code_cell("print('hi')", outputs=[output]) + notebook.cells.append(cell) + + +async def new_notebook(jp_contents_manager): + cm = jp_contents_manager + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] + + full_model = await ensure_async(cm.get(path)) + nb = full_model["content"] + nb["metadata"]["counter"] = int(1e6 * time.time()) + add_code_cell(nb) + + await ensure_async(cm.save(full_model, path)) + return nb, name, path + + +async def make_populated_dir(jp_contents_manager, api_path): + cm = jp_contents_manager + _make_dir(cm, api_path) + await ensure_async(cm.new(path="/".join([api_path, "nb.ipynb"]))) + await ensure_async(cm.new(path="/".join([api_path, "file.txt"]))) + + +async def check_populated_dir_files(jp_contents_manager, api_path): + dir_model = await ensure_async(jp_contents_manager.get(api_path)) + + assert dir_model["path"] == api_path + assert dir_model["type"] == "directory" + + for entry in dir_model["content"]: + if entry["type"] == "directory": + continue + elif entry["type"] == "file": + assert entry["name"] == "file.txt" + complete_path = "/".join([api_path, "file.txt"]) + assert entry["path"] == complete_path + elif entry["type"] == "notebook": + assert entry["name"] == "nb.ipynb" + complete_path = "/".join([api_path, "nb.ipynb"]) + assert entry["path"] == complete_path + + +# ----------------- Tests ---------------------------------- + + +def test_root_dir(jp_file_contents_manager_class, tmp_path): + fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) + assert fm.root_dir == str(tmp_path) + + +def test_missing_root_dir(jp_file_contents_manager_class, tmp_path): + root = tmp_path / "notebook" / "dir" / "is" / "missing" + with pytest.raises(TraitError): + jp_file_contents_manager_class(root_dir=str(root)) + + +def test_invalid_root_dir(jp_file_contents_manager_class, tmp_path): + temp_file = tmp_path / "file.txt" + temp_file.write_text("") + with pytest.raises(TraitError): + jp_file_contents_manager_class(root_dir=str(temp_file)) + + +def test_get_os_path(jp_file_contents_manager_class, tmp_path): + fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) + path = fm._get_os_path("/path/to/notebook/test.ipynb") + rel_path_list = "/path/to/notebook/test.ipynb".split("/") + fs_path = os.path.join(fm.root_dir, *rel_path_list) + assert path == fs_path + + fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) + path = fm._get_os_path("test.ipynb") + fs_path = os.path.join(fm.root_dir, "test.ipynb") + assert path == fs_path + + fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) + path = fm._get_os_path("////test.ipynb") + fs_path = os.path.join(fm.root_dir, "test.ipynb") + assert path == fs_path + + +def test_checkpoint_subdir(jp_file_contents_manager_class, tmp_path): + subd = "sub ∂ir" + cp_name = "test-cp.ipynb" + fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) + tmp_path.joinpath(subd).mkdir() + cpm = fm.checkpoints + cp_dir = cpm.checkpoint_path("cp", "test.ipynb") + cp_subdir = cpm.checkpoint_path("cp", "/%s/test.ipynb" % subd) + assert cp_dir != cp_subdir + assert cp_dir == os.path.join(str(tmp_path), cpm.checkpoint_dir, cp_name) + + +async def test_bad_symlink(jp_file_contents_manager_class, tmp_path): + td = str(tmp_path) + + cm = jp_file_contents_manager_class(root_dir=td) + path = "test bad symlink" + _make_dir(cm, path) + + file_model = await ensure_async(cm.new_untitled(path=path, ext=".txt")) + + # create a broken symlink + symlink(cm, "target", "%s/%s" % (path, "bad symlink")) + model = await ensure_async(cm.get(path)) + + contents = {content["name"]: content for content in model["content"]} + assert "untitled.txt" in contents + assert contents["untitled.txt"] == file_model + assert "bad symlink" in contents + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows doesn't detect symlink loops") +async def test_recursive_symlink(jp_file_contents_manager_class, tmp_path): + td = str(tmp_path) + + cm = jp_file_contents_manager_class(root_dir=td) + path = "test recursive symlink" + _make_dir(cm, path) + + file_model = await ensure_async(cm.new_untitled(path=path, ext=".txt")) + + # create recursive symlink + symlink(cm, "%s/%s" % (path, "recursive"), "%s/%s" % (path, "recursive")) + model = await ensure_async(cm.get(path)) + + contents = {content["name"]: content for content in model["content"]} + assert "untitled.txt" in contents + assert contents["untitled.txt"] == file_model + # recursive symlinks should not be shown in the contents manager + assert "recursive" not in contents + + +async def test_good_symlink(jp_file_contents_manager_class, tmp_path): + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + parent = "test good symlink" + name = "good symlink" + path = "{0}/{1}".format(parent, name) + _make_dir(cm, parent) + + file_model = await ensure_async(cm.new(path=parent + "/zfoo.txt")) + + # create a good symlink + symlink(cm, file_model["path"], path) + symlink_model = await ensure_async(cm.get(path, content=False)) + dir_model = await ensure_async(cm.get(parent)) + assert sorted(dir_model["content"], key=lambda x: x["name"]) == [ + symlink_model, + file_model, + ] + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Can't test permissions on Windows") +async def test_403(jp_file_contents_manager_class, tmp_path): + if hasattr(os, "getuid"): + if os.getuid() == 0: + raise pytest.skip("Can't test permissions as root") + + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + model = await ensure_async(cm.new_untitled(type="file")) + os_path = cm._get_os_path(model["path"]) + + os.chmod(os_path, 0o400) + try: + with cm.open(os_path, "w") as f: + f.write("don't care") + except HTTPError as e: + assert e.status_code == 403 + + +async def test_escape_root(jp_file_contents_manager_class, tmp_path): + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + # make foo, bar next to root + with open(os.path.join(cm.root_dir, "..", "foo"), "w") as f: + f.write("foo") + with open(os.path.join(cm.root_dir, "..", "bar"), "w") as f: + f.write("bar") + + with pytest.raises(HTTPError) as e: + await ensure_async(cm.get("..")) + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + await ensure_async(cm.get("foo/../../../bar")) + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + await ensure_async(cm.delete("../foo")) + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + await ensure_async(cm.rename("../foo", "../bar")) + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + await ensure_async( + cm.save( + model={ + "type": "file", + "content": "", + "format": "text", + }, + path="../foo", + ) + ) + expected_http_error(e, 404) + + +async def test_new_untitled(jp_contents_manager): + cm = jp_contents_manager + # Test in root directory + model = await ensure_async(cm.new_untitled(type="notebook")) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "notebook" + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "Untitled.ipynb" + + # Test in sub-directory + model = await ensure_async(cm.new_untitled(type="directory")) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "directory" + assert model["name"] == "Untitled Folder" + assert model["path"] == "Untitled Folder" + sub_dir = model["path"] + + model = await ensure_async(cm.new_untitled(path=sub_dir)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "file" + assert model["name"] == "untitled" + assert model["path"] == "%s/untitled" % sub_dir + + # Test with a compound extension + model = await ensure_async(cm.new_untitled(path=sub_dir, ext=".foo.bar")) + assert model["name"] == "untitled.foo.bar" + model = await ensure_async(cm.new_untitled(path=sub_dir, ext=".foo.bar")) + assert model["name"] == "untitled1.foo.bar" + + +async def test_modified_date(jp_contents_manager): + cm = jp_contents_manager + # Create a new notebook. + nb, name, path = await new_notebook(cm) + model = await ensure_async(cm.get(path)) + + # Add a cell and save. + add_code_cell(model["content"]) + await ensure_async(cm.save(model, path)) + + # Reload notebook and verify that last_modified incremented. + saved = await ensure_async(cm.get(path)) + assert saved["last_modified"] >= model["last_modified"] + + # Move the notebook and verify that last_modified stayed the same. + # (The frontend fires a warning if last_modified increases on the + # renamed file.) + new_path = "renamed.ipynb" + await ensure_async(cm.rename(path, new_path)) + renamed = await ensure_async(cm.get(new_path)) + assert renamed["last_modified"] >= saved["last_modified"] + + +async def test_get(jp_contents_manager): + cm = jp_contents_manager + # Create a notebook + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] + + # Check that we 'get' on the notebook we just created + model2 = await ensure_async(cm.get(path)) + assert isinstance(model2, dict) + assert "name" in model2 + assert "path" in model2 + assert model["name"] == name + assert model["path"] == path + + nb_as_file = await ensure_async(cm.get(path, content=True, type="file")) + assert nb_as_file["path"] == path + assert nb_as_file["type"] == "file" + assert nb_as_file["format"] == "text" + assert not isinstance(nb_as_file["content"], dict) + + nb_as_bin_file = await ensure_async(cm.get(path, content=True, type="file", format="base64")) + assert nb_as_bin_file["format"] == "base64" + + # Test in sub-directory + sub_dir = "/foo/" + _make_dir(cm, "foo") + await ensure_async(cm.new_untitled(path=sub_dir, ext=".ipynb")) + model2 = await ensure_async(cm.get(sub_dir + name)) + assert isinstance(model2, dict) + assert "name" in model2 + assert "path" in model2 + assert "content" in model2 + assert model2["name"] == "Untitled.ipynb" + assert model2["path"] == "{0}/{1}".format(sub_dir.strip("/"), name) + + # Test with a regular file. + file_model_path = (await ensure_async(cm.new_untitled(path=sub_dir, ext=".txt")))["path"] + file_model = await ensure_async(cm.get(file_model_path)) + expected_model = { + "content": "", + "format": "text", + "mimetype": "text/plain", + "name": "untitled.txt", + "path": "foo/untitled.txt", + "type": "file", + "writable": True, + } + # Assert expected model is in file_model + for key, value in expected_model.items(): + assert file_model[key] == value + assert "created" in file_model + assert "last_modified" in file_model + + # Create a sub-sub directory to test getting directory contents with a + # subdir. + _make_dir(cm, "foo/bar") + dirmodel = await ensure_async(cm.get("foo")) + assert dirmodel["type"] == "directory" + assert isinstance(dirmodel["content"], list) + assert len(dirmodel["content"]) == 3 + assert dirmodel["path"] == "foo" + assert dirmodel["name"] == "foo" + + # Directory contents should match the contents of each individual entry + # when requested with content=False. + model2_no_content = await ensure_async(cm.get(sub_dir + name, content=False)) + file_model_no_content = await ensure_async(cm.get("foo/untitled.txt", content=False)) + sub_sub_dir_no_content = await ensure_async(cm.get("foo/bar", content=False)) + assert sub_sub_dir_no_content["path"] == "foo/bar" + assert sub_sub_dir_no_content["name"] == "bar" + + for entry in dirmodel["content"]: + # Order isn't guaranteed by the spec, so this is a hacky way of + # verifying that all entries are matched. + if entry["path"] == sub_sub_dir_no_content["path"]: + assert entry == sub_sub_dir_no_content + elif entry["path"] == model2_no_content["path"]: + assert entry == model2_no_content + elif entry["path"] == file_model_no_content["path"]: + assert entry == file_model_no_content + else: + assert False, "Unexpected directory entry: %s" % entry() + + with pytest.raises(HTTPError): + await ensure_async(cm.get("foo", type="file")) + + +async def test_update(jp_contents_manager): + cm = jp_contents_manager + # Create a notebook. + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] + + # Change the name in the model for rename + model["path"] = "test.ipynb" + model = await ensure_async(cm.update(model, path)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert model["name"] == "test.ipynb" + + # Make sure the old name is gone + with pytest.raises(HTTPError): + await ensure_async(cm.get(path)) + + # Test in sub-directory + # Create a directory and notebook in that directory + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = await ensure_async(cm.new_untitled(path=sub_dir, type="notebook")) + path = model["path"] + + # Change the name in the model for rename + d = path.rsplit("/", 1)[0] + new_path = model["path"] = d + "/test_in_sub.ipynb" + model = await ensure_async(cm.update(model, path)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert model["name"] == "test_in_sub.ipynb" + assert model["path"] == new_path + + # Make sure the old name is gone + with pytest.raises(HTTPError): + await ensure_async(cm.get(path)) + + +async def test_save(jp_contents_manager): + cm = jp_contents_manager + # Create a notebook + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] + + # Get the model with 'content' + full_model = await ensure_async(cm.get(path)) + + # Save the notebook + model = await ensure_async(cm.save(full_model, path)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert model["name"] == name + assert model["path"] == path + + # Test in sub-directory + # Create a directory and notebook in that directory + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = await ensure_async(cm.new_untitled(path=sub_dir, type="notebook")) + path = model["path"] + model = await ensure_async(cm.get(path)) + + # Change the name in the model for rename + model = await ensure_async(cm.save(model, path)) + assert isinstance(model, dict) + assert "name" in model + assert "path" in model + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "foo/Untitled.ipynb" + + +async def test_delete(jp_contents_manager): + cm = jp_contents_manager + # Create a notebook + nb, name, path = await new_notebook(cm) + + # Delete the notebook + await ensure_async(cm.delete(path)) + + # Check that deleting a non-existent path raises an error. + with pytest.raises(HTTPError): + await ensure_async(cm.delete(path)) + + # Check that a 'get' on the deleted notebook raises and error + with pytest.raises(HTTPError): + await ensure_async(cm.get(path)) + + +@pytest.mark.parametrize( + "delete_to_trash, always_delete, error", + ( + [True, True, False], + # on linux test folder may not be on home folder drive + # => if this is the case, _check_trash will be False + [True, False, None], + [False, True, False], + [False, False, True], + ), +) +async def test_delete_non_empty_folder(delete_to_trash, always_delete, error, jp_contents_manager): + cm = jp_contents_manager + cm.delete_to_trash = delete_to_trash + cm.always_delete_dir = always_delete + + dir = "to_delete" + + await make_populated_dir(cm, dir) + await check_populated_dir_files(cm, dir) + + if error is None: + error = False + if sys.platform == "win32": + error = True + elif sys.platform == "linux": + file_dev = os.stat(cm.root_dir).st_dev + home_dev = os.stat(os.path.expanduser("~")).st_dev + error = file_dev != home_dev + + if error: + with pytest.raises( + HTTPError, + match=r"HTTP 400: Bad Request \(Directory .*?to_delete not empty\)", + ): + await ensure_async(cm.delete_file(dir)) + else: + await ensure_async(cm.delete_file(dir)) + assert await ensure_async(cm.dir_exists(dir)) == False + + +async def test_rename(jp_contents_manager): + cm = jp_contents_manager + # Create a new notebook + nb, name, path = await new_notebook(cm) + + # Rename the notebook + await ensure_async(cm.rename(path, "changed_path")) + + # Attempting to get the notebook under the old name raises an error + with pytest.raises(HTTPError): + await ensure_async(cm.get(path)) + # Fetching the notebook under the new name is successful + assert isinstance(await ensure_async(cm.get("changed_path")), dict) + + # Ported tests on nested directory renaming from pgcontents + all_dirs = ["foo", "bar", "foo/bar", "foo/bar/foo", "foo/bar/foo/bar"] + unchanged_dirs = all_dirs[:2] + changed_dirs = all_dirs[2:] + + for _dir in all_dirs: + await make_populated_dir(cm, _dir) + await check_populated_dir_files(cm, _dir) + + # Renaming to an existing directory should fail + for src, dest in combinations(all_dirs, 2): + with pytest.raises(HTTPError) as e: + await ensure_async(cm.rename(src, dest)) + assert expected_http_error(e, 409) + + # Creating a notebook in a non_existant directory should fail + with pytest.raises(HTTPError) as e: + await ensure_async(cm.new_untitled("foo/bar_diff", ext=".ipynb")) + assert expected_http_error(e, 404) + + await ensure_async(cm.rename("foo/bar", "foo/bar_diff")) + + # Assert that unchanged directories remain so + for unchanged in unchanged_dirs: + await check_populated_dir_files(cm, unchanged) + + # Assert changed directories can no longer be accessed under old names + for changed_dirname in changed_dirs: + with pytest.raises(HTTPError) as e: + await ensure_async(cm.get(changed_dirname)) + assert expected_http_error(e, 404) + new_dirname = changed_dirname.replace("foo/bar", "foo/bar_diff", 1) + await check_populated_dir_files(cm, new_dirname) + + # Created a notebook in the renamed directory should work + await ensure_async(cm.new_untitled("foo/bar_diff", ext=".ipynb")) + + +async def test_delete_root(jp_contents_manager): + cm = jp_contents_manager + with pytest.raises(HTTPError) as e: + await ensure_async(cm.delete("")) + assert expected_http_error(e, 400) + + +async def test_copy(jp_contents_manager): + cm = jp_contents_manager + parent = "å b" + name = "nb √.ipynb" + path = "{0}/{1}".format(parent, name) + _make_dir(cm, parent) + + orig = await ensure_async(cm.new(path=path)) + # copy with unspecified name + copy = await ensure_async(cm.copy(path)) + assert copy["name"] == orig["name"].replace(".ipynb", "-Copy1.ipynb") + + # copy with specified name + copy2 = await ensure_async(cm.copy(path, "å b/copy 2.ipynb")) + assert copy2["name"] == "copy 2.ipynb" + assert copy2["path"] == "å b/copy 2.ipynb" + # copy with specified path + copy2 = await ensure_async(cm.copy(path, "/")) + assert copy2["name"] == name + assert copy2["path"] == name + + +async def test_mark_trusted_cells(jp_contents_manager): + cm = jp_contents_manager + nb, name, path = await new_notebook(cm) + + cm.mark_trusted_cells(nb, path) + for cell in nb.cells: + if cell.cell_type == "code": + assert not cell.metadata.trusted + + await ensure_async(cm.trust_notebook(path)) + nb = (await ensure_async(cm.get(path)))["content"] + for cell in nb.cells: + if cell.cell_type == "code": + assert cell.metadata.trusted + + +async def test_check_and_sign(jp_contents_manager): + cm = jp_contents_manager + nb, name, path = await new_notebook(cm) + + cm.mark_trusted_cells(nb, path) + cm.check_and_sign(nb, path) + assert not cm.notary.check_signature(nb) + + await ensure_async(cm.trust_notebook(path)) + nb = (await ensure_async(cm.get(path)))["content"] + cm.mark_trusted_cells(nb, path) + cm.check_and_sign(nb, path) + assert cm.notary.check_signature(nb) diff --git a/server/jupyter_server/tests/services/kernels/__init__.py b/server/jupyter_server/tests/services/kernels/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/kernels/test_api.py b/server/jupyter_server/tests/services/kernels/test_api.py new file mode 100644 index 0000000..50be5fb --- /dev/null +++ b/server/jupyter_server/tests/services/kernels/test_api.py @@ -0,0 +1,277 @@ +import json +import os +import time + +import jupyter_client +import pytest +import tornado +from jupyter_client.kernelspec import NATIVE_KERNEL_NAME +from tornado.httpclient import HTTPClientError + +from ...utils import expected_http_error +from jupyter_server.utils import url_path_join + + +TEST_TIMEOUT = 20 + + +@pytest.fixture +def pending_kernel_is_ready(jp_serverapp): + async def _(kernel_id): + km = jp_serverapp.kernel_manager + if getattr(km, "use_pending_kernels", False): + kernel = km.get_kernel(kernel_id) + if getattr(kernel, "ready"): + await kernel.ready + + return _ + + +configs = [ + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.MappingKernelManager" + } + }, + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" + } + }, +] + + +# Pending kernels was released in Jupyter Client 7.1 +# It is currently broken on Windows (Jan 2022). When fixed, we can remove the Windows check. +# See https://github.com/jupyter-server/jupyter_server/issues/672 +if os.name != "nt" and jupyter_client._version.version_info >= (7, 1): + # Add a pending kernels condition + c = { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" + }, + "AsyncMappingKernelManager": {"use_pending_kernels": True}, + } + configs.append(c) + + +@pytest.fixture(params=configs) +def jp_server_config(request): + return request.param + + +async def test_no_kernels(jp_fetch): + r = await jp_fetch("api", "kernels", method="GET") + kernels = json.loads(r.body.decode()) + assert kernels == [] + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_default_kernels(jp_fetch, jp_base_url, jp_cleanup_subprocesses): + r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) + kernel = json.loads(r.body.decode()) + assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel["id"]) + assert r.code == 201 + assert isinstance(kernel, dict) + + report_uri = url_path_join(jp_base_url, "/api/security/csp-report") + expected_csp = "; ".join( + ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] + ) + assert r.headers["Content-Security-Policy"] == expected_csp + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_main_kernel_handler( + jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, pending_kernel_is_ready +): + # Start the first kernel + r = await jp_fetch( + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) + ) + kernel1 = json.loads(r.body.decode()) + assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel1["id"]) + assert r.code == 201 + assert isinstance(kernel1, dict) + + report_uri = url_path_join(jp_base_url, "/api/security/csp-report") + expected_csp = "; ".join( + ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] + ) + assert r.headers["Content-Security-Policy"] == expected_csp + + # Check that the kernel is found in the kernel list + r = await jp_fetch("api", "kernels", method="GET") + kernel_list = json.loads(r.body.decode()) + assert r.code == 200 + assert isinstance(kernel_list, list) + assert kernel_list[0]["id"] == kernel1["id"] + assert kernel_list[0]["name"] == kernel1["name"] + + # Start a second kernel + r = await jp_fetch( + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) + ) + kernel2 = json.loads(r.body.decode()) + assert isinstance(kernel2, dict) + + # Get kernel list again + r = await jp_fetch("api", "kernels", method="GET") + kernel_list = json.loads(r.body.decode()) + assert r.code == 200 + assert isinstance(kernel_list, list) + assert len(kernel_list) == 2 + + # Interrupt a kernel + await pending_kernel_is_ready(kernel2["id"]) + r = await jp_fetch( + "api", + "kernels", + kernel2["id"], + "interrupt", + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 204 + + # Restart a kernel + await pending_kernel_is_ready(kernel2["id"]) + r = await jp_fetch( + "api", + "kernels", + kernel2["id"], + "restart", + method="POST", + allow_nonstandard_methods=True, + ) + restarted_kernel = json.loads(r.body.decode()) + assert restarted_kernel["id"] == kernel2["id"] + assert restarted_kernel["name"] == kernel2["name"] + + # Start a kernel with a path + r = await jp_fetch( + "api", + "kernels", + method="POST", + body=json.dumps({"name": NATIVE_KERNEL_NAME, "path": "/foo"}), + ) + kernel3 = json.loads(r.body.decode()) + assert isinstance(kernel3, dict) + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_is_ready): + # Create a kernel + r = await jp_fetch( + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) + ) + kernel_id = json.loads(r.body.decode())["id"] + r = await jp_fetch("api", "kernels", kernel_id, method="GET") + kernel = json.loads(r.body.decode()) + assert r.code == 200 + assert isinstance(kernel, dict) + assert "id" in kernel + assert kernel["id"] == kernel_id + + # Requests a bad kernel id. + bad_id = "111-111-111-111-111" + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "kernels", bad_id, method="GET") + assert expected_http_error(e, 404) + + # Delete kernel with id. + await pending_kernel_is_ready(kernel_id) + r = await jp_fetch( + "api", + "kernels", + kernel_id, + method="DELETE", + ) + assert r.code == 204 + + # Get list of kernels + try: + await pending_kernel_is_ready(kernel_id) + # If the kernel is already deleted, no need to await. + except tornado.web.HTTPError: + pass + r = await jp_fetch("api", "kernels", method="GET") + kernel_list = json.loads(r.body.decode()) + assert kernel_list == [] + + # Request to delete a non-existent kernel id + bad_id = "111-111-111-111-111" + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "kernels", bad_id, method="DELETE") + assert expected_http_error(e, 404, "Kernel does not exist: " + bad_id) + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_kernel_handler_startup_error( + jp_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs +): + if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): + return + + # Create a kernel + with pytest.raises(HTTPClientError): + await jp_fetch("api", "kernels", method="POST", body=json.dumps({"name": "bad"})) + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_kernel_handler_startup_error_pending( + jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs +): + if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): + return + + jp_serverapp.kernel_manager.use_pending_kernels = True + # Create a kernel + r = await jp_fetch("api", "kernels", method="POST", body=json.dumps({"name": "bad"})) + kid = json.loads(r.body.decode())["id"] + + with pytest.raises(HTTPClientError): + await jp_ws_fetch("api", "kernels", kid, "channels") + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_connection( + jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header, jp_cleanup_subprocesses +): + # Create kernel + r = await jp_fetch( + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) + ) + kid = json.loads(r.body.decode())["id"] + + # Get kernel info + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 0 + + # Open a websocket connection. + ws = await jp_ws_fetch("api", "kernels", kid, "channels") + + # Test that it was opened. + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 1 + + # Close websocket + ws.close() + # give it some time to close on the other side: + for i in range(10): + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + if model["connections"] > 0: + time.sleep(0.1) + else: + break + + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 0 + await jp_cleanup_subprocesses() diff --git a/server/jupyter_server/tests/services/kernels/test_config.py b/server/jupyter_server/tests/services/kernels/test_config.py new file mode 100644 index 0000000..9b58a8c --- /dev/null +++ b/server/jupyter_server/tests/services/kernels/test_config.py @@ -0,0 +1,23 @@ +import pytest +from traitlets.config import Config + +from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager + + +@pytest.fixture +def jp_server_config(): + return Config( + {"ServerApp": {"MappingKernelManager": {"allowed_message_types": ["kernel_info_request"]}}} + ) + + +def test_config(jp_serverapp): + assert jp_serverapp.kernel_manager.allowed_message_types == ["kernel_info_request"] + + +async def test_async_kernel_manager(jp_configurable_serverapp): + argv = [ + "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" + ] + app = jp_configurable_serverapp(argv=argv) + assert isinstance(app.kernel_manager, AsyncMappingKernelManager) diff --git a/server/jupyter_server/tests/services/kernels/test_cull.py b/server/jupyter_server/tests/services/kernels/test_cull.py new file mode 100644 index 0000000..e2b5f0f --- /dev/null +++ b/server/jupyter_server/tests/services/kernels/test_cull.py @@ -0,0 +1,127 @@ +import asyncio +import json +import os +import platform + +import jupyter_client +import pytest +from tornado.httpclient import HTTPClientError +from traitlets.config import Config + + +CULL_TIMEOUT = 30 if platform.python_implementation() == "PyPy" else 5 +CULL_INTERVAL = 1 + + +@pytest.mark.parametrize( + "jp_server_config", + [ + # Test the synchronous case + Config( + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.MappingKernelManager", + "MappingKernelManager": { + "cull_idle_timeout": CULL_TIMEOUT, + "cull_interval": CULL_INTERVAL, + "cull_connected": False, + }, + } + } + ), + # Test the async case + Config( + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager", + "AsyncMappingKernelManager": { + "cull_idle_timeout": CULL_TIMEOUT, + "cull_interval": CULL_INTERVAL, + "cull_connected": False, + }, + } + } + ), + ], +) +async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): + r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) + kernel = json.loads(r.body.decode()) + kid = kernel["id"] + + # Open a websocket connection. + ws = await jp_ws_fetch("api", "kernels", kid, "channels") + + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 1 + culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled + assert not culled + ws.close() + culled = await get_cull_status(kid, jp_fetch) # not connected, should be culled + assert culled + await jp_cleanup_subprocesses() + + +# Pending kernels was released in Jupyter Client 7.1 +# It is currently broken on Windows (Jan 2022). When fixed, we can remove the Windows check. +# See https://github.com/jupyter-server/jupyter_server/issues/672 +@pytest.mark.skipif( + os.name == "nt" or jupyter_client._version.version_info < (7, 1), + reason="Pending kernels require jupyter_client >= 7.1 on non-Windows", +) +@pytest.mark.parametrize( + "jp_server_config", + [ + Config( + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager", + "AsyncMappingKernelManager": { + "cull_idle_timeout": CULL_TIMEOUT, + "cull_interval": CULL_INTERVAL, + "cull_connected": False, + "default_kernel_name": "bad", + "use_pending_kernels": True, + }, + } + } + ) + ], +) +@pytest.mark.timeout(30) +async def test_cull_dead( + jp_fetch, jp_ws_fetch, jp_serverapp, jp_cleanup_subprocesses, jp_kernelspecs +): + r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) + kernel = json.loads(r.body.decode()) + kid = kernel["id"] + + # Open a websocket connection. + with pytest.raises(HTTPClientError): + await jp_ws_fetch("api", "kernels", kid, "channels") + + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 0 + culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled + assert culled + await jp_cleanup_subprocesses() + + +async def get_cull_status(kid, jp_fetch): + frequency = 0.5 + culled = False + for _ in range( + int((CULL_TIMEOUT + CULL_INTERVAL) / frequency) + ): # Timeout + Interval will ensure cull + try: + r = await jp_fetch("api", "kernels", kid, method="GET") + json.loads(r.body.decode()) + except HTTPClientError as e: + assert e.code == 404 + culled = True + break + else: + await asyncio.sleep(frequency) + return culled diff --git a/server/jupyter_server/tests/services/kernelspecs/__init__.py b/server/jupyter_server/tests/services/kernelspecs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/kernelspecs/test_api.py b/server/jupyter_server/tests/services/kernelspecs/test_api.py new file mode 100644 index 0000000..9471420 --- /dev/null +++ b/server/jupyter_server/tests/services/kernelspecs/test_api.py @@ -0,0 +1,79 @@ +import json + +import pytest +import tornado +from jupyter_client.kernelspec import NATIVE_KERNEL_NAME + +from ...utils import expected_http_error +from ...utils import some_resource + + +async def test_list_kernelspecs_bad(jp_fetch, jp_kernelspecs, jp_data_dir): + bad_kernel_dir = jp_data_dir.joinpath(jp_data_dir, "kernels", "bad2") + bad_kernel_dir.mkdir(parents=True) + bad_kernel_json = bad_kernel_dir.joinpath("kernel.json") + bad_kernel_json.write_text("garbage") + + r = await jp_fetch("api", "kernelspecs", method="GET") + model = json.loads(r.body.decode()) + assert isinstance(model, dict) + assert model["default"] == NATIVE_KERNEL_NAME + specs = model["kernelspecs"] + assert isinstance(specs, dict) + assert len(specs) > 2 + + +async def test_list_kernelspecs(jp_fetch, jp_kernelspecs): + r = await jp_fetch("api", "kernelspecs", method="GET") + model = json.loads(r.body.decode()) + assert isinstance(model, dict) + assert model["default"] == NATIVE_KERNEL_NAME + specs = model["kernelspecs"] + assert isinstance(specs, dict) + assert len(specs) > 2 + + def is_sample_kernelspec(s): + return s["name"] == "sample" and s["spec"]["display_name"] == "Test kernel" + + def is_default_kernelspec(s): + return s["name"] == NATIVE_KERNEL_NAME and s["spec"]["display_name"].startswith("Python") + + assert any(is_sample_kernelspec(s) for s in specs.values()), specs + assert any(is_default_kernelspec(s) for s in specs.values()), specs + + +async def test_get_kernelspecs(jp_fetch, jp_kernelspecs): + r = await jp_fetch("api", "kernelspecs", "Sample", method="GET") + model = json.loads(r.body.decode()) + assert model["name"].lower() == "sample" + assert isinstance(model["spec"], dict) + assert model["spec"]["display_name"] == "Test kernel" + assert isinstance(model["resources"], dict) + + +async def test_get_kernelspec_spaces(jp_fetch, jp_kernelspecs): + r = await jp_fetch("api", "kernelspecs", "sample%202", method="GET") + model = json.loads(r.body.decode()) + assert model["name"].lower() == "sample 2" + + +async def test_get_nonexistant_kernelspec(jp_fetch, jp_kernelspecs): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "kernelspecs", "nonexistant", method="GET") + assert expected_http_error(e, 404) + + +async def test_get_kernel_resource_file(jp_fetch, jp_kernelspecs): + r = await jp_fetch("kernelspecs", "sAmple", "resource.txt", method="GET") + res = r.body.decode("utf-8") + assert res == some_resource + + +async def test_get_nonexistant_resource(jp_fetch, jp_kernelspecs): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("kernelspecs", "nonexistant", "resource.txt", method="GET") + assert expected_http_error(e, 404) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("kernelspecs", "sample", "nonexistant.txt", method="GET") + assert expected_http_error(e, 404) diff --git a/server/jupyter_server/tests/services/nbconvert/__init__.py b/server/jupyter_server/tests/services/nbconvert/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/nbconvert/test_api.py b/server/jupyter_server/tests/services/nbconvert/test_api.py new file mode 100644 index 0000000..ae028dd --- /dev/null +++ b/server/jupyter_server/tests/services/nbconvert/test_api.py @@ -0,0 +1,14 @@ +import json + + +async def test_list_formats(jp_fetch): + r = await jp_fetch("api", "nbconvert", method="GET") + formats = json.loads(r.body.decode()) + # Verify the type of the response. + assert isinstance(formats, dict) + # Verify that all returned formats have an + # output mimetype defined. + required_keys_present = [] + for name, data in formats.items(): + required_keys_present.append("output_mimetype" in data) + assert all(required_keys_present), "All returned formats must have a `output_mimetype` key." diff --git a/server/jupyter_server/tests/services/sessions/__init__.py b/server/jupyter_server/tests/services/sessions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/services/sessions/test_api.py b/server/jupyter_server/tests/services/sessions/test_api.py new file mode 100644 index 0000000..3bbc5d0 --- /dev/null +++ b/server/jupyter_server/tests/services/sessions/test_api.py @@ -0,0 +1,600 @@ +import json +import os +import shutil +import time + +import jupyter_client +import pytest +import tornado +from jupyter_client.ioloop import AsyncIOLoopKernelManager +from nbformat import writes +from nbformat.v4 import new_notebook +from tornado.httpclient import HTTPClientError +from traitlets import default + +from ...utils import expected_http_error +from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager +from jupyter_server.utils import url_path_join + + +TEST_TIMEOUT = 20 + + +j = lambda r: json.loads(r.body.decode()) + + +class NewPortsKernelManager(AsyncIOLoopKernelManager): + @default("cache_ports") + def _default_cache_ports(self) -> bool: + return False + + async def restart_kernel(self, now: bool = False, newports: bool = True, **kw) -> None: + self.log.debug(f"DEBUG**** calling super().restart_kernel with newports={newports}") + return await super().restart_kernel(now=now, newports=newports, **kw) + + +class NewPortsMappingKernelManager(AsyncMappingKernelManager): + @default("kernel_manager_class") + def _default_kernel_manager_class(self): + self.log.debug("NewPortsMappingKernelManager in _default_kernel_manager_class!") + return "jupyter_server.tests.services.sessions.test_api.NewPortsKernelManager" + + +configs = [ + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.MappingKernelManager" + } + }, + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" + } + }, + { + "ServerApp": { + "kernel_manager_class": "jupyter_server.tests.services.sessions.test_api.NewPortsMappingKernelManager" + } + }, +] + + +# Pending kernels was released in Jupyter Client 7.1 +# It is currently broken on Windows (Jan 2022). When fixed, we can remove the Windows check. +# See https://github.com/jupyter-server/jupyter_server/issues/672 +if os.name != "nt" and jupyter_client._version.version_info >= (7, 1): + # Add a pending kernels condition + c = { + "ServerApp": { + "kernel_manager_class": "jupyter_server.tests.services.sessions.test_api.NewPortsMappingKernelManager" + }, + "AsyncMappingKernelManager": {"use_pending_kernels": True}, + } + configs.append(c) + + +@pytest.fixture(params=configs) +def jp_server_config(request): + return request.param + + +class SessionClient: + def __init__(self, fetch_callable): + self.jp_fetch = fetch_callable + + async def _req(self, *args, method, body=None): + if body is not None: + body = json.dumps(body) + + r = await self.jp_fetch( + "api", + "sessions", + *args, + method=method, + body=body, + allow_nonstandard_methods=True, + ) + return r + + async def list(self): + return await self._req(method="GET") + + async def get(self, id): + return await self._req(id, method="GET") + + async def create(self, path, type="notebook", kernel_name=None, kernel_id=None): + body = { + "path": path, + "type": type, + "kernel": {"name": kernel_name, "id": kernel_id}, + } + return await self._req(method="POST", body=body) + + def create_deprecated(self, path): + body = {"notebook": {"path": path}, "kernel": {"name": "python", "id": "foo"}} + return self._req(method="POST", body=body) + + def modify_path(self, id, path): + body = {"path": path} + return self._req(id, method="PATCH", body=body) + + def modify_path_deprecated(self, id, path): + body = {"notebook": {"path": path}} + return self._req(id, method="PATCH", body=body) + + def modify_type(self, id, type): + body = {"type": type} + return self._req(id, method="PATCH", body=body) + + def modify_kernel_name(self, id, kernel_name): + body = {"kernel": {"name": kernel_name}} + return self._req(id, method="PATCH", body=body) + + def modify_kernel_id(self, id, kernel_id): + # Also send a dummy name to show that id takes precedence. + body = {"kernel": {"id": kernel_id, "name": "foo"}} + return self._req(id, method="PATCH", body=body) + + async def delete(self, id): + return await self._req(id, method="DELETE") + + async def cleanup(self): + resp = await self.list() + sessions = j(resp) + for session in sessions: + await self.delete(session["id"]) + time.sleep(0.1) + + +@pytest.fixture +def session_is_ready(jp_serverapp): + """Wait for the kernel started by a session to be ready. + + This is useful when working with pending kernels. + """ + + async def _(session_id): + mkm = jp_serverapp.kernel_manager + if getattr(mkm, "use_pending_kernels", False): + sm = jp_serverapp.session_manager + session = await sm.get_session(session_id=session_id) + kernel_id = session["kernel"]["id"] + kernel = mkm.get_kernel(kernel_id) + if getattr(kernel, "ready"): + await kernel.ready + + return _ + + +@pytest.fixture +def session_client(jp_root_dir, jp_fetch): + subdir = jp_root_dir.joinpath("foo") + subdir.mkdir() + + # Write a notebook to subdir. + nb = new_notebook() + nb_str = writes(nb, version=4) + nbpath = subdir.joinpath("nb1.ipynb") + nbpath.write_text(nb_str, encoding="utf-8") + + # Yield a session client + client = SessionClient(jp_fetch) + yield client + + # Remove subdir + shutil.rmtree(str(subdir), ignore_errors=True) + + +def assert_kernel_equality(actual, expected): + """Compares kernel models after taking into account that execution_states + may differ from 'starting' to 'idle'. The 'actual' argument is the + current state (which may have an 'idle' status) while the 'expected' + argument is the previous state (which may have a 'starting' status). + """ + actual.pop("execution_state", None) + actual.pop("last_activity", None) + expected.pop("execution_state", None) + expected.pop("last_activity", None) + assert actual == expected + + +def assert_session_equality(actual, expected): + """Compares session models. `actual` is the most current session, + while `expected` is the target of the comparison. This order + matters when comparing the kernel sub-models. + """ + assert actual["id"] == expected["id"] + assert actual["path"] == expected["path"] + assert actual["type"] == expected["type"] + assert_kernel_equality(actual["kernel"], expected["kernel"]) + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create(session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp): + # Make sure no sessions exist. + resp = await session_client.list() + sessions = j(resp) + assert len(sessions) == 0 + + # Create a session. + resp = await session_client.create("foo/nb1.ipynb") + assert resp.code == 201 + new_session = j(resp) + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["type"] == "notebook" + assert resp.headers["Location"] == url_path_join( + jp_base_url, "/api/sessions/", new_session["id"] + ) + + # Make sure kernel is in expected state + kid = new_session["kernel"]["id"] + kernel = jp_serverapp.kernel_manager.get_kernel(kid) + + if hasattr(kernel, "ready") and os.name != "nt": + km = jp_serverapp.kernel_manager + if isinstance(km, AsyncMappingKernelManager): + assert kernel.ready.done() == (not km.use_pending_kernels) + else: + assert kernel.ready.done() + + # Check that the new session appears in list. + resp = await session_client.list() + sessions = j(resp) + assert len(sessions) == 1 + assert_session_equality(sessions[0], new_session) + + # Retrieve that session. + sid = new_session["id"] + resp = await session_client.get(sid) + got = j(resp) + assert_session_equality(got, new_session) + + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_bad( + session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs +): + if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): + return + + # Make sure no sessions exist. + jp_serverapp.kernel_manager.default_kernel_name = "bad" + resp = await session_client.list() + sessions = j(resp) + assert len(sessions) == 0 + + # Create a session. + with pytest.raises(HTTPClientError): + await session_client.create("foo/nb1.ipynb") + + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_bad_pending( + session_client, + jp_base_url, + jp_ws_fetch, + jp_cleanup_subprocesses, + jp_serverapp, + jp_kernelspecs, +): + if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): + return + + # Make sure no sessions exist. + jp_serverapp.kernel_manager.default_kernel_name = "bad" + resp = await session_client.list() + sessions = j(resp) + assert len(sessions) == 0 + + # Create a session. + resp = await session_client.create("foo/nb1.ipynb") + assert resp.code == 201 + + # Open a websocket connection. + kid = j(resp)["kernel"]["id"] + with pytest.raises(HTTPClientError): + await jp_ws_fetch("api", "kernels", kid, "channels") + + # Get the updated kernel state + resp = await session_client.list() + session = j(resp)[0] + assert session["kernel"]["execution_state"] == "dead" + if os.name != "nt": + assert "non_existent_path" in session["kernel"]["reason"] + + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_file_session( + session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready +): + resp = await session_client.create("foo/nb1.py", type="file") + assert resp.code == 201 + newsession = j(resp) + assert newsession["path"] == "foo/nb1.py" + assert newsession["type"] == "file" + sid = newsession["id"] + await session_is_ready(sid) + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_console_session( + session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready +): + resp = await session_client.create("foo/abc123", type="console") + assert resp.code == 201 + newsession = j(resp) + assert newsession["path"] == "foo/abc123" + assert newsession["type"] == "console" + # Need to find a better solution to this. + sid = newsession["id"] + await session_is_ready(sid) + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_deprecated(session_client, jp_cleanup_subprocesses, jp_serverapp): + resp = await session_client.create_deprecated("foo/nb1.ipynb") + assert resp.code == 201 + newsession = j(resp) + assert newsession["path"] == "foo/nb1.ipynb" + assert newsession["type"] == "notebook" + assert newsession["notebook"]["path"] == "foo/nb1.ipynb" + # Need to find a better solution to this. + sid = newsession["id"] + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_with_kernel_id( + session_client, jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp +): + # create a new kernel + resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) + kernel = j(resp) + + resp = await session_client.create("foo/nb1.ipynb", kernel_id=kernel["id"]) + assert resp.code == 201 + new_session = j(resp) + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["kernel"]["id"] == kernel["id"] + assert resp.headers["Location"] == url_path_join( + jp_base_url, "/api/sessions/{0}".format(new_session["id"]) + ) + + resp = await session_client.list() + sessions = j(resp) + assert len(sessions) == 1 + assert_session_equality(sessions[0], new_session) + + # Retrieve it + sid = new_session["id"] + resp = await session_client.get(sid) + got = j(resp) + assert_session_equality(got, new_session) + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_create_with_bad_kernel_id( + session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready +): + resp = await session_client.create("foo/nb1.py", type="file") + assert resp.code == 201 + newsession = j(resp) + sid = newsession["id"] + await session_is_ready(sid) + + # TODO + assert newsession["path"] == "foo/nb1.py" + assert newsession["type"] == "file" + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_delete(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): + resp = await session_client.create("foo/nb1.ipynb") + + newsession = j(resp) + sid = newsession["id"] + await session_is_ready(sid) + + resp = await session_client.delete(sid) + assert resp.code == 204 + + resp = await session_client.list() + sessions = j(resp) + assert sessions == [] + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await session_client.get(sid) + assert expected_http_error(e, 404) + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_modify_path(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): + resp = await session_client.create("foo/nb1.ipynb") + newsession = j(resp) + sid = newsession["id"] + await session_is_ready(sid) + + resp = await session_client.modify_path(sid, "nb2.ipynb") + changed = j(resp) + assert changed["id"] == sid + assert changed["path"] == "nb2.ipynb" + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_modify_path_deprecated( + session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready +): + resp = await session_client.create("foo/nb1.ipynb") + newsession = j(resp) + sid = newsession["id"] + await session_is_ready(sid) + + resp = await session_client.modify_path_deprecated(sid, "nb2.ipynb") + changed = j(resp) + assert changed["id"] == sid + assert changed["notebook"]["path"] == "nb2.ipynb" + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_modify_type(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): + resp = await session_client.create("foo/nb1.ipynb") + newsession = j(resp) + sid = newsession["id"] + await session_is_ready(sid) + + resp = await session_client.modify_type(sid, "console") + changed = j(resp) + assert changed["id"] == sid + assert changed["type"] == "console" + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_modify_kernel_name( + session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp, session_is_ready +): + resp = await session_client.create("foo/nb1.ipynb") + before = j(resp) + sid = before["id"] + await session_is_ready(sid) + + resp = await session_client.modify_kernel_name(sid, before["kernel"]["name"]) + after = j(resp) + assert after["id"] == sid + assert after["path"] == before["path"] + assert after["type"] == before["type"] + assert after["kernel"]["id"] != before["kernel"]["id"] + + # check kernel list, to be sure previous kernel was cleaned up + resp = await jp_fetch("api/kernels", method="GET") + kernel_list = j(resp) + after["kernel"].pop("last_activity") + [k.pop("last_activity") for k in kernel_list] + if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): + assert kernel_list == [after["kernel"]] + + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_modify_kernel_id( + session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp, session_is_ready +): + resp = await session_client.create("foo/nb1.ipynb") + before = j(resp) + sid = before["id"] + await session_is_ready(sid) + + # create a new kernel + resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) + kernel = j(resp) + + # Attach our session to the existing kernel + resp = await session_client.modify_kernel_id(sid, kernel["id"]) + after = j(resp) + assert after["id"] == sid + assert after["path"] == before["path"] + assert after["type"] == before["type"] + assert after["kernel"]["id"] != before["kernel"]["id"] + assert after["kernel"]["id"] == kernel["id"] + + # check kernel list, to be sure previous kernel was cleaned up + resp = await jp_fetch("api/kernels", method="GET") + kernel_list = j(resp) + + kernel.pop("last_activity") + [k.pop("last_activity") for k in kernel_list] + if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): + assert kernel_list == [kernel] + + # Need to find a better solution to this. + await jp_cleanup_subprocesses() + + +@pytest.mark.timeout(TEST_TIMEOUT) +async def test_restart_kernel( + session_client, jp_base_url, jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, session_is_ready +): + # Create a session. + resp = await session_client.create("foo/nb1.ipynb") + assert resp.code == 201 + new_session = j(resp) + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["type"] == "notebook" + assert resp.headers["Location"] == url_path_join( + jp_base_url, "/api/sessions/", new_session["id"] + ) + sid = new_session["id"] + await session_is_ready(sid) + + kid = new_session["kernel"]["id"] + + # Get kernel info + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 0 + + # Open a websocket connection. + ws = await jp_ws_fetch("api", "kernels", kid, "channels") + + # Test that it was opened. + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 1 + + # Restart kernel + r = await jp_fetch( + "api", "kernels", kid, "restart", method="POST", allow_nonstandard_methods=True + ) + restarted_kernel = json.loads(r.body.decode()) + assert restarted_kernel["id"] == kid + + # Close/open websocket + ws.close() + # give it some time to close on the other side: + for i in range(10): + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + if model["connections"] > 0: + time.sleep(0.1) + else: + break + + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 0 + + # Open a websocket connection. + await jp_ws_fetch("api", "kernels", kid, "channels") + + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 1 + + # Need to find a better solution to this. + await jp_cleanup_subprocesses() diff --git a/server/jupyter_server/tests/services/sessions/test_manager.py b/server/jupyter_server/tests/services/sessions/test_manager.py new file mode 100644 index 0000000..f0142be --- /dev/null +++ b/server/jupyter_server/tests/services/sessions/test_manager.py @@ -0,0 +1,365 @@ +import pytest +from tornado import web +from traitlets import TraitError + +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcnow +from jupyter_server.services.contents.manager import ContentsManager +from jupyter_server.services.kernels.kernelmanager import MappingKernelManager +from jupyter_server.services.sessions.sessionmanager import SessionManager + + +class DummyKernel(object): + def __init__(self, kernel_name="python"): + self.kernel_name = kernel_name + + +dummy_date = utcnow() +dummy_date_s = isoformat(dummy_date) + + +class DummyMKM(MappingKernelManager): + """MappingKernelManager interface that doesn't start kernels, for testing""" + + def __init__(self, *args, **kwargs): + super(DummyMKM, self).__init__(*args, **kwargs) + self.id_letters = iter("ABCDEFGHIJK") + + def _new_id(self): + return next(self.id_letters) + + async def start_kernel(self, kernel_id=None, path=None, kernel_name="python", **kwargs): + kernel_id = kernel_id or self._new_id() + k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) + self._kernel_connections[kernel_id] = 0 + k.last_activity = dummy_date + k.execution_state = "idle" + return kernel_id + + async def shutdown_kernel(self, kernel_id, now=False): + del self._kernels[kernel_id] + + +@pytest.fixture +def session_manager(): + return SessionManager(kernel_manager=DummyMKM(), contents_manager=ContentsManager()) + + +async def create_multiple_sessions(session_manager, *kwargs_list): + sessions = [] + for kwargs in kwargs_list: + kwargs.setdefault("type", "notebook") + session = await session_manager.create_session(**kwargs) + sessions.append(session) + return sessions + + +async def test_get_session(session_manager): + session = await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="bar", type="notebook" + ) + session_id = session["id"] + model = await session_manager.get_session(session_id=session_id) + expected = { + "id": session_id, + "path": "/path/to/test.ipynb", + "notebook": {"path": "/path/to/test.ipynb", "name": None}, + "type": "notebook", + "name": None, + "kernel": { + "id": "A", + "name": "bar", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + } + assert model == expected + + +async def test_bad_get_session(session_manager): + session = await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="foo", type="notebook" + ) + with pytest.raises(TypeError): + await session_manager.get_session(bad_id=session["id"]) + + +async def test_get_session_dead_kernel(session_manager): + session = await session_manager.create_session( + path="/path/to/1/test1.ipynb", kernel_name="python", type="notebook" + ) + # Kill the kernel + await session_manager.kernel_manager.shutdown_kernel(session["kernel"]["id"]) + with pytest.raises(web.HTTPError): + await session_manager.get_session(session_id=session["id"]) + # no session left + listed = await session_manager.list_sessions() + assert listed == [] + + +async def test_list_session(session_manager): + sessions = await create_multiple_sessions( + session_manager, + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.py", type="file", kernel_name="python"), + dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), + ) + sessions = await session_manager.list_sessions() + expected = [ + { + "id": sessions[0]["id"], + "path": "/path/to/1/test1.ipynb", + "type": "notebook", + "notebook": {"path": "/path/to/1/test1.ipynb", "name": None}, + "name": None, + "kernel": { + "id": "A", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[1]["id"], + "path": "/path/to/2/test2.py", + "type": "file", + "name": None, + "kernel": { + "id": "B", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[2]["id"], + "path": "/path/to/3", + "type": "console", + "name": "foo", + "kernel": { + "id": "C", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + ] + assert sessions == expected + + +async def test_list_sessions_dead_kernel(session_manager): + sessions = await create_multiple_sessions( + session_manager, + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.ipynb", kernel_name="python"), + ) + # kill one of the kernels + await session_manager.kernel_manager.shutdown_kernel(sessions[0]["kernel"]["id"]) + listed = await session_manager.list_sessions() + expected = [ + { + "id": sessions[1]["id"], + "path": "/path/to/2/test2.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": "/path/to/2/test2.ipynb", "name": None}, + "kernel": { + "id": "B", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + } + ] + assert listed == expected + + +async def test_update_session(session_manager): + session = await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="julia", type="notebook" + ) + session_id = session["id"] + await session_manager.update_session(session_id, path="/path/to/new_name.ipynb") + model = await session_manager.get_session(session_id=session_id) + expected = { + "id": session_id, + "path": "/path/to/new_name.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": "/path/to/new_name.ipynb", "name": None}, + "kernel": { + "id": "A", + "name": "julia", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + } + assert model == expected + + +async def test_bad_update_session(session_manager): + # try to update a session with a bad keyword ~ raise error + session = await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="ir", type="notegbook" + ) + session_id = session["id"] + with pytest.raises(TypeError): + await session_manager.update_session( + session_id=session_id, bad_kw="test.ipynb" + ) # Bad keyword + + +async def test_delete_session(session_manager): + sessions = await create_multiple_sessions( + session_manager, + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.ipynb", kernel_name="python"), + dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), + ) + await session_manager.delete_session(sessions[1]["id"]) + new_sessions = await session_manager.list_sessions() + expected = [ + { + "id": sessions[0]["id"], + "path": "/path/to/1/test1.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": "/path/to/1/test1.ipynb", "name": None}, + "kernel": { + "id": "A", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[2]["id"], + "type": "console", + "path": "/path/to/3", + "name": "foo", + "kernel": { + "id": "C", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + ] + assert new_sessions == expected + + +async def test_bad_delete_session(session_manager): + # try to delete a session that doesn't exist ~ raise error + await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="python", type="notebook" + ) + with pytest.raises(TypeError): + await session_manager.delete_session(bad_kwarg="23424") # Bad keyword + with pytest.raises(web.HTTPError): + await session_manager.delete_session(session_id="23424") # nonexistent + + +async def test_bad_database_filepath(jp_runtime_dir): + kernel_manager = DummyMKM() + + # Try to write to a path that's a directory, not a file. + path_id_directory = str(jp_runtime_dir) + # Should raise an error because the path is a directory. + with pytest.raises(TraitError) as err: + SessionManager( + kernel_manager=kernel_manager, + contents_manager=ContentsManager(), + database_filepath=str(path_id_directory), + ) + + # Try writing to file that's not a valid SQLite 3 database file. + non_db_file = jp_runtime_dir.joinpath("non_db_file.db") + non_db_file.write_bytes(b"this is a bad file") + + # Should raise an error because the file doesn't + # start with an SQLite database file header. + with pytest.raises(TraitError) as err: + SessionManager( + kernel_manager=kernel_manager, + contents_manager=ContentsManager(), + database_filepath=str(non_db_file), + ) + + +async def test_good_database_filepath(jp_runtime_dir): + kernel_manager = DummyMKM() + + # Try writing to an empty file. + empty_file = jp_runtime_dir.joinpath("empty.db") + empty_file.write_bytes(b"") + + session_manager = SessionManager( + kernel_manager=kernel_manager, + contents_manager=ContentsManager(), + database_filepath=str(empty_file), + ) + + await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="python", type="notebook" + ) + # Assert that the database file exists + assert empty_file.exists() + + # Close the current session manager + del session_manager + + # Try writing to a file that already exists. + session_manager = SessionManager( + kernel_manager=kernel_manager, + contents_manager=ContentsManager(), + database_filepath=str(empty_file), + ) + + assert session_manager.database_filepath == str(empty_file) + + +async def test_session_persistence(jp_runtime_dir): + session_db_path = jp_runtime_dir.joinpath("test-session.db") + # Kernel manager needs to persist. + kernel_manager = DummyMKM() + + # Initialize a session and start a connection. + # This should create the session database the first time. + session_manager = SessionManager( + kernel_manager=kernel_manager, + contents_manager=ContentsManager(), + database_filepath=str(session_db_path), + ) + + session = await session_manager.create_session( + path="/path/to/test.ipynb", kernel_name="python", type="notebook" + ) + + # Assert that the database file exists + assert session_db_path.exists() + + with open(session_db_path, "rb") as f: + header = f.read(100) + + assert header.startswith(b"SQLite format 3") + + # Close the current session manager + del session_manager + + # Get a new session_manager + session_manager = SessionManager( + kernel_manager=kernel_manager, + contents_manager=ContentsManager(), + database_filepath=str(session_db_path), + ) + + # Assert that the session database persists. + session = await session_manager.get_session(session_id=session["id"]) diff --git a/server/jupyter_server/tests/test_config_manager.py b/server/jupyter_server/tests/test_config_manager.py new file mode 100644 index 0000000..5329ca8 --- /dev/null +++ b/server/jupyter_server/tests/test_config_manager.py @@ -0,0 +1,50 @@ +import json +import os + +from jupyter_server.config_manager import BaseJSONConfigManager + + +def test_json(tmp_path): + tmpdir = str(tmp_path) + + root_data = dict(a=1, x=2, nest={"a": 1, "x": 2}) + with open(os.path.join(tmpdir, "foo.json"), "w") as f: + json.dump(root_data, f) + # also make a foo.d/ directory with multiple json files + os.makedirs(os.path.join(tmpdir, "foo.d")) + with open(os.path.join(tmpdir, "foo.d", "a.json"), "w") as f: + json.dump(dict(a=2, b=1, nest={"a": 2, "b": 1}), f) + with open(os.path.join(tmpdir, "foo.d", "b.json"), "w") as f: + json.dump(dict(a=3, b=2, c=3, nest={"a": 3, "b": 2, "c": 3}, only_in_b={"x": 1}), f) + manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) + data = manager.get("foo") + assert "a" in data + assert "x" in data + assert "b" not in data + assert "c" not in data + assert data["a"] == 1 + assert "x" in data["nest"] + # if we write it out, it also shouldn't pick up the subdirectoy + manager.set("foo", data) + data = manager.get("foo") + assert data == root_data + + manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=True) + data = manager.get("foo") + assert "a" in data + assert "b" in data + assert "c" in data + # files should be read in order foo.d/a.json foo.d/b.json foo.json + assert data["a"] == 1 + assert data["b"] == 2 + assert data["c"] == 3 + assert data["nest"]["a"] == 1 + assert data["nest"]["b"] == 2 + assert data["nest"]["c"] == 3 + assert data["nest"]["x"] == 2 + + # when writing out, we don't want foo.d/*.json data to be included in the root foo.json + manager.set("foo", data) + manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) + data = manager.get("foo") + assert data == root_data diff --git a/server/jupyter_server/tests/test_files.py b/server/jupyter_server/tests/test_files.py new file mode 100644 index 0000000..c592d89 --- /dev/null +++ b/server/jupyter_server/tests/test_files.py @@ -0,0 +1,134 @@ +import os +from pathlib import Path + +import pytest +import tornado +from nbformat import writes +from nbformat.v4 import new_code_cell +from nbformat.v4 import new_markdown_cell +from nbformat.v4 import new_notebook +from nbformat.v4 import new_output + +from .utils import expected_http_error + + +@pytest.fixture( + params=[ + [False, ["å b"]], + [False, ["å b", "ç. d"]], + [True, [".å b"]], + [True, ["å b", ".ç d"]], + ] +) +def maybe_hidden(request): + return request.param + + +async def fetch_expect_200(jp_fetch, *path_parts): + r = await jp_fetch("files", *path_parts, method="GET") + assert r.body.decode() == path_parts[-1], (path_parts, r.body) + + +async def fetch_expect_404(jp_fetch, *path_parts): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("files", *path_parts, method="GET") + assert expected_http_error(e, 404), [path_parts, e] + + +async def test_hidden_files(jp_fetch, jp_serverapp, jp_root_dir, maybe_hidden): + is_hidden, path_parts = maybe_hidden + path = Path(jp_root_dir, *path_parts) + path.mkdir(parents=True, exist_ok=True) + + foos = ["foo", ".foo"] + for foo in foos: + (path / foo).write_text(foo) + + if is_hidden: + for foo in foos: + await fetch_expect_404(jp_fetch, *path_parts, foo) + else: + await fetch_expect_404(jp_fetch, *path_parts, ".foo") + await fetch_expect_200(jp_fetch, *path_parts, "foo") + + jp_serverapp.contents_manager.allow_hidden = True + + for foo in foos: + await fetch_expect_200(jp_fetch, *path_parts, foo) + + +async def test_contents_manager(jp_fetch, jp_serverapp, jp_root_dir): + """make sure ContentsManager returns right files (ipynb, bin, txt).""" + nb = new_notebook( + cells=[ + new_markdown_cell("Created by test ³"), + new_code_cell( + "print(2*6)", + outputs=[ + new_output("stream", text="12"), + ], + ), + ] + ) + jp_root_dir.joinpath("testnb.ipynb").write_text(writes(nb, version=4), encoding="utf-8") + jp_root_dir.joinpath("test.bin").write_bytes(b"\xff" + os.urandom(5)) + jp_root_dir.joinpath("test.txt").write_text("foobar") + + r = await jp_fetch("files/testnb.ipynb", method="GET") + assert r.code == 200 + assert "print(2*6)" in r.body.decode("utf-8") + + r = await jp_fetch("files/test.bin", method="GET") + assert r.code == 200 + assert r.headers["content-type"] == "application/octet-stream" + assert r.body[:1] == b"\xff" + assert len(r.body) == 6 + + r = await jp_fetch("files/test.txt", method="GET") + assert r.code == 200 + assert r.headers["content-type"] == "text/plain; charset=UTF-8" + assert r.body.decode() == "foobar" + + +async def test_download(jp_fetch, jp_serverapp, jp_root_dir): + text = "hello" + jp_root_dir.joinpath("test.txt").write_text(text) + + r = await jp_fetch("files", "test.txt", method="GET") + disposition = r.headers.get("Content-Disposition", "") + assert "attachment" not in disposition + + r = await jp_fetch("files", "test.txt", method="GET", params={"download": True}) + disposition = r.headers.get("Content-Disposition", "") + assert "attachment" in disposition + assert "filename*=utf-8''test.txt" in disposition + + +async def test_old_files_redirect(jp_fetch, jp_serverapp, jp_root_dir): + """pre-2.0 'files/' prefixed links are properly redirected""" + jp_root_dir.joinpath("files").mkdir(parents=True, exist_ok=True) + jp_root_dir.joinpath("sub", "files").mkdir(parents=True, exist_ok=True) + + for prefix in ("", "sub"): + jp_root_dir.joinpath(prefix, "files", "f1.txt").write_text(prefix + "/files/f1") + jp_root_dir.joinpath(prefix, "files", "f2.txt").write_text(prefix + "/files/f2") + jp_root_dir.joinpath(prefix, "f2.txt").write_text(prefix + "/f2") + jp_root_dir.joinpath(prefix, "f3.txt").write_text(prefix + "/f3") + + # These depend on the tree handlers + # + # def test_download(self): + # rootdir = self.root_dir + + # text = 'hello' + # with open(pjoin(rootdir, 'test.txt'), 'w') as f: + # f.write(text) + + # r = self.request('GET', 'files/test.txt') + # disposition = r.headers.get('Content-Disposition', '') + # self.assertNotIn('attachment', disposition) + + # r = self.request('GET', 'files/test.txt?download=1') + # disposition = r.headers.get('Content-Disposition', '') + # self.assertIn('attachment', disposition) + # self.assertIn("filename*=utf-8''test.txt", disposition) diff --git a/server/jupyter_server/tests/test_gateway.py b/server/jupyter_server/tests/test_gateway.py new file mode 100644 index 0000000..ec279f3 --- /dev/null +++ b/server/jupyter_server/tests/test_gateway.py @@ -0,0 +1,411 @@ +"""Test GatewayClient""" +import json +import os +import uuid +from datetime import datetime +from io import StringIO +from unittest.mock import patch + +import pytest +import tornado +from tornado.httpclient import HTTPRequest +from tornado.httpclient import HTTPResponse +from tornado.web import HTTPError + +from .utils import expected_http_error +from jupyter_server.gateway.managers import GatewayClient +from jupyter_server.utils import ensure_async + + +def generate_kernelspec(name): + argv_stanza = ["python", "-m", "ipykernel_launcher", "-f", "{connection_file}"] + spec_stanza = { + "spec": { + "argv": argv_stanza, + "env": {}, + "display_name": name, + "language": "python", + "interrupt_mode": "signal", + "metadata": {}, + } + } + kernelspec_stanza = {"name": name, "spec": spec_stanza, "resources": {}} + return kernelspec_stanza + + +# We'll mock up two kernelspecs - kspec_foo and kspec_bar +kernelspecs = { + "default": "kspec_foo", + "kernelspecs": { + "kspec_foo": generate_kernelspec("kspec_foo"), + "kspec_bar": generate_kernelspec("kspec_bar"), + }, +} + + +# maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. +running_kernels = dict() + + +def generate_model(name): + """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" + dt = datetime.utcnow().isoformat() + "Z" + kernel_id = str(uuid.uuid4()) + model = { + "id": kernel_id, + "name": name, + "last_activity": str(dt), + "execution_state": "idle", + "connections": 1, + } + return model + + +async def mock_gateway_request(url, **kwargs): + method = "GET" + if kwargs["method"]: + method = kwargs["method"] + + request = HTTPRequest(url=url, **kwargs) + + endpoint = str(url) + + # Fetch all kernelspecs + if endpoint.endswith("/api/kernelspecs") and method == "GET": + response_buf = StringIO(json.dumps(kernelspecs)) + response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) + return response + + # Fetch named kernelspec + if endpoint.rfind("/api/kernelspecs/") >= 0 and method == "GET": + requested_kernelspec = endpoint.rpartition("/")[2] + kspecs = kernelspecs.get("kernelspecs") + if requested_kernelspec in kspecs: + response_buf = StringIO(json.dumps(kspecs.get(requested_kernelspec))) + response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) + return response + else: + raise HTTPError(404, message="Kernelspec does not exist: %s" % requested_kernelspec) + + # Create kernel + if endpoint.endswith("/api/kernels") and method == "POST": + json_body = json.loads(kwargs["body"]) + name = json_body.get("name") + env = json_body.get("env") + kspec_name = env.get("KERNEL_KSPEC_NAME") + assert name == kspec_name # Ensure that KERNEL_ env values get propagated + model = generate_model(name) + running_kernels[model.get("id")] = model # Register model as a running kernel + response_buf = StringIO(json.dumps(model)) + response = await ensure_async(HTTPResponse(request, 201, buffer=response_buf)) + return response + + # Fetch list of running kernels + if endpoint.endswith("/api/kernels") and method == "GET": + kernels = [] + for kernel_id in running_kernels.keys(): + model = running_kernels.get(kernel_id) + kernels.append(model) + response_buf = StringIO(json.dumps(kernels)) + response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) + return response + + # Interrupt or restart existing kernel + if endpoint.rfind("/api/kernels/") >= 0 and method == "POST": + requested_kernel_id, sep, action = endpoint.rpartition("/api/kernels/")[2].rpartition("/") + + if action == "interrupt": + if requested_kernel_id in running_kernels: + response = await ensure_async(HTTPResponse(request, 204)) + return response + else: + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) + elif action == "restart": + if requested_kernel_id in running_kernels: + response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) + response = await ensure_async(HTTPResponse(request, 204, buffer=response_buf)) + return response + else: + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) + else: + raise HTTPError(404, message="Bad action detected: %s" % action) + + # Shutdown existing kernel + if endpoint.rfind("/api/kernels/") >= 0 and method == "DELETE": + requested_kernel_id = endpoint.rpartition("/")[2] + running_kernels.pop( + requested_kernel_id + ) # Simulate shutdown by removing kernel from running set + response = await ensure_async(HTTPResponse(request, 204)) + return response + + # Fetch existing kernel + if endpoint.rfind("/api/kernels/") >= 0 and method == "GET": + requested_kernel_id = endpoint.rpartition("/")[2] + if requested_kernel_id in running_kernels: + response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) + response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) + return response + else: + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) + + +mocked_gateway = patch("jupyter_server.gateway.managers.gateway_request", mock_gateway_request) +mock_gateway_url = "http://mock-gateway-server:8889" +mock_http_user = "alice" + + +@pytest.fixture +def init_gateway(monkeypatch): + """Initializes the server for use as a gateway client.""" + # Clear the singleton first since previous tests may not have used a gateway. + GatewayClient.clear_instance() + monkeypatch.setenv("JUPYTER_GATEWAY_URL", mock_gateway_url) + monkeypatch.setenv("JUPYTER_GATEWAY_HTTP_USER", mock_http_user) + monkeypatch.setenv("JUPYTER_GATEWAY_REQUEST_TIMEOUT", "44.4") + monkeypatch.setenv("JUPYTER_GATEWAY_CONNECT_TIMEOUT", "44.4") + yield + GatewayClient.clear_instance() + + +async def test_gateway_env_options(init_gateway, jp_serverapp): + assert jp_serverapp.gateway_config.gateway_enabled is True + assert jp_serverapp.gateway_config.url == mock_gateway_url + assert jp_serverapp.gateway_config.http_user == mock_http_user + assert ( + jp_serverapp.gateway_config.connect_timeout == jp_serverapp.gateway_config.request_timeout + ) + assert jp_serverapp.gateway_config.connect_timeout == 44.4 + + GatewayClient.instance().init_static_args() + assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == int( + jp_serverapp.gateway_config.request_timeout + ) + + +async def test_gateway_cli_options(jp_configurable_serverapp): + argv = [ + "--gateway-url=" + mock_gateway_url, + "--GatewayClient.http_user=" + mock_http_user, + "--GatewayClient.connect_timeout=44.4", + "--GatewayClient.request_timeout=96.0", + ] + + GatewayClient.clear_instance() + app = jp_configurable_serverapp(argv=argv) + + assert app.gateway_config.gateway_enabled is True + assert app.gateway_config.url == mock_gateway_url + assert app.gateway_config.http_user == mock_http_user + assert app.gateway_config.connect_timeout == 44.4 + assert app.gateway_config.request_timeout == 96.0 + GatewayClient.instance().init_static_args() + assert ( + GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == 96 + ) # Ensure KLT gets set from request-timeout + GatewayClient.clear_instance() + + +async def test_gateway_class_mappings(init_gateway, jp_serverapp): + # Ensure appropriate class mappings are in place. + assert jp_serverapp.kernel_manager_class.__name__ == "GatewayMappingKernelManager" + assert jp_serverapp.session_manager_class.__name__ == "GatewaySessionManager" + assert jp_serverapp.kernel_spec_manager_class.__name__ == "GatewayKernelSpecManager" + + +async def test_gateway_get_kernelspecs(init_gateway, jp_fetch): + # Validate that kernelspecs come from gateway. + with mocked_gateway: + r = await jp_fetch("api", "kernelspecs", method="GET") + assert r.code == 200 + content = json.loads(r.body.decode("utf-8")) + kspecs = content.get("kernelspecs") + assert len(kspecs) == 2 + assert kspecs.get("kspec_bar").get("name") == "kspec_bar" + + +async def test_gateway_get_named_kernelspec(init_gateway, jp_fetch): + # Validate that a specific kernelspec can be retrieved from gateway (and an invalid spec can't) + with mocked_gateway: + r = await jp_fetch("api", "kernelspecs", "kspec_foo", method="GET") + assert r.code == 200 + kspec_foo = json.loads(r.body.decode("utf-8")) + assert kspec_foo.get("name") == "kspec_foo" + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "kernelspecs", "no_such_spec", method="GET") + assert expected_http_error(e, 404) + + +async def test_gateway_session_lifecycle(init_gateway, jp_root_dir, jp_fetch): + # Validate session lifecycle functions; create and delete. + + # create + session_id, kernel_id = await create_session(jp_root_dir, jp_fetch, "kspec_foo") + + # ensure kernel still considered running + assert await is_kernel_running(jp_fetch, kernel_id) is True + + # interrupt + await interrupt_kernel(jp_fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(jp_fetch, kernel_id) is True + + # restart + await restart_kernel(jp_fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(jp_fetch, kernel_id) is True + + # delete + await delete_session(jp_fetch, session_id) + assert await is_kernel_running(jp_fetch, kernel_id) is False + + +async def test_gateway_kernel_lifecycle(init_gateway, jp_fetch): + # Validate kernel lifecycle functions; create, interrupt, restart and delete. + + # create + kernel_id = await create_kernel(jp_fetch, "kspec_bar") + + # ensure kernel still considered running + assert await is_kernel_running(jp_fetch, kernel_id) is True + + # interrupt + await interrupt_kernel(jp_fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(jp_fetch, kernel_id) is True + + # restart + await restart_kernel(jp_fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(jp_fetch, kernel_id) is True + + # delete + await delete_kernel(jp_fetch, kernel_id) + assert await is_kernel_running(jp_fetch, kernel_id) is False + + +# +# Test methods below... +# +async def create_session(root_dir, jp_fetch, kernel_name): + """Creates a session for a kernel. The session is created against the server + which then uses the gateway for kernel management. + """ + with mocked_gateway: + nb_path = root_dir / "testgw.ipynb" + body = json.dumps( + {"path": str(nb_path), "type": "notebook", "kernel": {"name": kernel_name}} + ) + + # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method + os.environ["KERNEL_KSPEC_NAME"] = kernel_name + + # Create the kernel... (also tests get_kernel) + r = await jp_fetch("api", "sessions", method="POST", body=body) + assert r.code == 201 + model = json.loads(r.body.decode("utf-8")) + assert model.get("path") == str(nb_path) + kernel_id = model.get("kernel").get("id") + # ensure its in the running_kernels and name matches. + running_kernel = running_kernels.get(kernel_id) + assert kernel_id == running_kernel.get("id") + assert model.get("kernel").get("name") == running_kernel.get("name") + session_id = model.get("id") + + # restore env + os.environ.pop("KERNEL_KSPEC_NAME") + return session_id, kernel_id + + +async def delete_session(jp_fetch, session_id): + """Deletes a session corresponding to the given session id.""" + with mocked_gateway: + # Delete the session (and kernel) + r = await jp_fetch("api", "sessions", session_id, method="DELETE") + assert r.code == 204 + assert r.reason == "No Content" + + +async def is_kernel_running(jp_fetch, kernel_id): + """Issues request to get the set of running kernels""" + with mocked_gateway: + # Get list of running kernels + r = await jp_fetch("api", "kernels", method="GET") + assert r.code == 200 + kernels = json.loads(r.body.decode("utf-8")) + assert len(kernels) == len(running_kernels) + for model in kernels: + if model.get("id") == kernel_id: + return True + return False + + +async def create_kernel(jp_fetch, kernel_name): + """Issues request to retart the given kernel""" + with mocked_gateway: + body = json.dumps({"name": kernel_name}) + + # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method + os.environ["KERNEL_KSPEC_NAME"] = kernel_name + + r = await jp_fetch("api", "kernels", method="POST", body=body) + assert r.code == 201 + model = json.loads(r.body.decode("utf-8")) + kernel_id = model.get("id") + # ensure its in the running_kernels and name matches. + running_kernel = running_kernels.get(kernel_id) + assert kernel_id == running_kernel.get("id") + assert model.get("name") == kernel_name + + # restore env + os.environ.pop("KERNEL_KSPEC_NAME") + return kernel_id + + +async def interrupt_kernel(jp_fetch, kernel_id): + """Issues request to interrupt the given kernel""" + with mocked_gateway: + r = await jp_fetch( + "api", + "kernels", + kernel_id, + "interrupt", + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 204 + assert r.reason == "No Content" + + +async def restart_kernel(jp_fetch, kernel_id): + """Issues request to retart the given kernel""" + with mocked_gateway: + r = await jp_fetch( + "api", + "kernels", + kernel_id, + "restart", + method="POST", + allow_nonstandard_methods=True, + ) + assert r.code == 200 + model = json.loads(r.body.decode("utf-8")) + restarted_kernel_id = model.get("id") + # ensure its in the running_kernels and name matches. + running_kernel = running_kernels.get(restarted_kernel_id) + assert restarted_kernel_id == running_kernel.get("id") + assert model.get("name") == running_kernel.get("name") + + +async def delete_kernel(jp_fetch, kernel_id): + """Deletes kernel corresponding to the given kernel id.""" + with mocked_gateway: + # Delete the session (and kernel) + r = await jp_fetch("api", "kernels", kernel_id, method="DELETE") + assert r.code == 204 + assert r.reason == "No Content" diff --git a/server/jupyter_server/tests/test_paths.py b/server/jupyter_server/tests/test_paths.py new file mode 100644 index 0000000..0789be4 --- /dev/null +++ b/server/jupyter_server/tests/test_paths.py @@ -0,0 +1,68 @@ +import re + +import pytest +import tornado + +from jupyter_server.base.handlers import path_regex +from jupyter_server.utils import url_path_join + +# build regexps that tornado uses: +path_pat = re.compile("^" + "/x%s" % path_regex + "$") + + +def test_path_regex(): + for path in ( + "/x", + "/x/", + "/x/foo", + "/x/foo.ipynb", + "/x/foo/bar", + "/x/foo/bar.txt", + ): + assert re.match(path_pat, path) + + +def test_path_regex_bad(): + for path in ( + "/xfoo", + "/xfoo/", + "/xfoo/bar", + "/xfoo/bar/", + "/x/foo/bar/", + "/x//foo", + "/y", + "/y/x/foo", + ): + assert re.match(path_pat, path) is None + + +@pytest.mark.parametrize( + "uri,expected", + [ + ("/notebooks/mynotebook/", "/notebooks/mynotebook"), + ("////foo///", "/foo"), + ("//example.com/", "/example.com"), + ("/has/param/?hasparam=true", "/has/param?hasparam=true"), + ], +) +async def test_trailing_slash( + jp_ensure_app_fixture, + uri, + expected, + http_server_client, + jp_auth_header, + jp_base_url, +): + # http_server_client raises an exception when follow_redirects=False + with pytest.raises(tornado.httpclient.HTTPClientError) as err: + await http_server_client.fetch( + url_path_join(jp_base_url, uri), + headers=jp_auth_header, + request_timeout=20, + follow_redirects=False, + ) + # Capture the response from the raised exception value. + response = err.value.response + assert response.code == 302 + assert "Location" in response.headers + assert response.headers["Location"] == url_path_join(jp_base_url, expected) diff --git a/server/jupyter_server/tests/test_serialize.py b/server/jupyter_server/tests/test_serialize.py new file mode 100644 index 0000000..56314df --- /dev/null +++ b/server/jupyter_server/tests/test_serialize.py @@ -0,0 +1,24 @@ +"""Test serialize/deserialize messages with buffers""" +import os + +from jupyter_client.session import Session + +from jupyter_server.base.zmqhandlers import deserialize_binary_message +from jupyter_server.base.zmqhandlers import serialize_binary_message + + +def test_serialize_binary(): + s = Session() + msg = s.msg("data_pub", content={"a": "b"}) + msg["buffers"] = [memoryview(os.urandom(3)) for i in range(3)] + bmsg = serialize_binary_message(msg) + assert isinstance(bmsg, bytes) + + +def test_deserialize_binary(): + s = Session() + msg = s.msg("data_pub", content={"a": "b"}) + msg["buffers"] = [memoryview(os.urandom(2)) for i in range(3)] + bmsg = serialize_binary_message(msg) + msg2 = deserialize_binary_message(bmsg) + assert msg2 == msg diff --git a/server/jupyter_server/tests/test_serverapp.py b/server/jupyter_server/tests/test_serverapp.py new file mode 100644 index 0000000..3808eb4 --- /dev/null +++ b/server/jupyter_server/tests/test_serverapp.py @@ -0,0 +1,338 @@ +import getpass +import logging +import os +import pathlib +from unittest.mock import patch + +import pytest +from jupyter_core.application import NoStart +from traitlets import TraitError +from traitlets.tests.utils import check_help_all_output + +from jupyter_server.auth.security import passwd_check +from jupyter_server.serverapp import JupyterPasswordApp +from jupyter_server.serverapp import list_running_servers +from jupyter_server.serverapp import ServerApp + + +def test_help_output(): + """jupyter server --help-all works""" + check_help_all_output("jupyter_server") + + +def test_server_info_file(tmp_path, jp_configurable_serverapp): + app = jp_configurable_serverapp(log=logging.getLogger()) + + app.write_server_info_file() + servers = list(list_running_servers(app.runtime_dir)) + + assert len(servers) == 1 + sinfo = servers[0] + + assert sinfo["port"] == app.port + assert sinfo["url"] == app.connection_url + assert sinfo["version"] == app.version + + app.remove_server_info_file() + + assert list(list_running_servers(app.runtime_dir)) == [] + app.remove_server_info_file + + +def test_root_dir(tmp_path, jp_configurable_serverapp): + app = jp_configurable_serverapp(root_dir=str(tmp_path)) + assert app.root_dir == str(tmp_path) + + +# Build a list of invalid paths +@pytest.fixture(params=[("notebooks",), ("root", "dir", "is", "missing"), ("test.txt",)]) +def invalid_root_dir(tmp_path, request): + path = tmp_path.joinpath(*request.param) + # If the path is a file, create it. + if os.path.splitext(str(path))[1] != "": + path.write_text("") + return str(path) + + +def test_invalid_root_dir(invalid_root_dir, jp_configurable_serverapp): + app = jp_configurable_serverapp() + with pytest.raises(TraitError): + app.root_dir = invalid_root_dir + + +@pytest.fixture(params=[("/",), ("first-level",), ("first-level", "second-level")]) +def valid_root_dir(tmp_path, request): + path = tmp_path.joinpath(*request.param) + if not path.exists(): + # Create path in temporary directory + path.mkdir(parents=True) + return str(path) + + +def test_valid_root_dir(valid_root_dir, jp_configurable_serverapp): + app = jp_configurable_serverapp(root_dir=valid_root_dir) + root_dir = valid_root_dir + # If nested path, the last slash should + # be stripped by the root_dir trait. + if root_dir != "/": + root_dir = valid_root_dir.rstrip("/") + assert app.root_dir == root_dir + + +def test_generate_config(tmp_path, jp_configurable_serverapp): + app = jp_configurable_serverapp(config_dir=str(tmp_path)) + app.initialize(["--generate-config", "--allow-root"]) + with pytest.raises(NoStart): + app.start() + assert tmp_path.joinpath("jupyter_server_config.py").exists() + + +def test_server_password(tmp_path, jp_configurable_serverapp): + password = "secret" + with patch.dict("os.environ", {"JUPYTER_CONFIG_DIR": str(tmp_path)}), patch.object( + getpass, "getpass", return_value=password + ): + app = JupyterPasswordApp(log_level=logging.ERROR) + app.initialize([]) + app.start() + sv = jp_configurable_serverapp() + sv.load_config_file() + assert sv.password != "" + passwd_check(sv.password, password) + + +def test_list_running_servers(jp_serverapp, jp_web_app): + servers = list(list_running_servers(jp_serverapp.runtime_dir)) + assert len(servers) >= 1 + + +@pytest.fixture +def prefix_path(jp_root_dir, tmp_path): + """If a given path is prefixed with the literal + strings `/jp_root_dir` or `/tmp_path`, replace those + strings with these fixtures. + + Returns a pathlib Path object. + """ + + def _inner(rawpath): + path = pathlib.PurePosixPath(rawpath) + if rawpath.startswith("/jp_root_dir"): + path = jp_root_dir.joinpath(*path.parts[2:]) + elif rawpath.startswith("/tmp_path"): + path = tmp_path.joinpath(*path.parts[2:]) + return pathlib.Path(path) + + return _inner + + +@pytest.mark.parametrize( + "root_dir,file_to_run,expected_output", + [ + (None, "notebook.ipynb", "notebook.ipynb"), + (None, "/tmp_path/path/to/notebook.ipynb", "notebook.ipynb"), + ("/jp_root_dir", "/tmp_path/path/to/notebook.ipynb", SystemExit), + ("/tmp_path", "/tmp_path/path/to/notebook.ipynb", "path/to/notebook.ipynb"), + ("/jp_root_dir", "notebook.ipynb", "notebook.ipynb"), + ("/jp_root_dir", "path/to/notebook.ipynb", "path/to/notebook.ipynb"), + ], +) +def test_resolve_file_to_run_and_root_dir(prefix_path, root_dir, file_to_run, expected_output): + # Verify that the Singleton instance is cleared before the test runs. + ServerApp.clear_instance() + + # Setup the file_to_run path, in case the server checks + # if the directory exists before initializing the server. + file_to_run = prefix_path(file_to_run) + if file_to_run.is_absolute(): + file_to_run.parent.mkdir(parents=True, exist_ok=True) + kwargs = {"file_to_run": str(file_to_run)} + + # Setup the root_dir path, in case the server checks + # if the directory exists before initializing the server. + if root_dir: + root_dir = prefix_path(root_dir) + if root_dir.is_absolute(): + root_dir.parent.mkdir(parents=True, exist_ok=True) + kwargs["root_dir"] = str(root_dir) + + # Create the notebook in the given location + serverapp = ServerApp.instance(**kwargs) + + if expected_output is SystemExit: + with pytest.raises(SystemExit): + serverapp._resolve_file_to_run_and_root_dir() + else: + relpath = serverapp._resolve_file_to_run_and_root_dir() + assert relpath == str(pathlib.Path(expected_output)) + + # Clear the singleton instance after each run. + ServerApp.clear_instance() + + +# Test the URLs returned by ServerApp. The `` piece +# in urls shown below will be replaced with the token +# generated by the ServerApp on instance creation. +@pytest.mark.parametrize( + "config,public_url,local_url,connection_url", + [ + # Token is hidden when configured. + ( + {"token": "test"}, + "http://localhost:8888/?token=...", + "http://127.0.0.1:8888/?token=...", + "http://localhost:8888/", + ), + # Verify port number has changed + ( + {"port": 9999}, + "http://localhost:9999/?token=", + "http://127.0.0.1:9999/?token=", + "http://localhost:9999/", + ), + ( + {"ip": "1.1.1.1"}, + "http://1.1.1.1:8888/?token=", + "http://127.0.0.1:8888/?token=", + "http://1.1.1.1:8888/", + ), + # Verify that HTTPS is returned when certfile is given + ( + {"certfile": "/path/to/dummy/file"}, + "https://localhost:8888/?token=", + "https://127.0.0.1:8888/?token=", + "https://localhost:8888/", + ), + # Verify changed port and a custom display URL + ( + {"port": 9999, "custom_display_url": "http://test.org"}, + "http://test.org/?token=", + "http://127.0.0.1:9999/?token=", + "http://localhost:9999/", + ), + ( + {"base_url": "/", "default_url": "/test/"}, + "http://localhost:8888/test/?token=", + "http://127.0.0.1:8888/test/?token=", + "http://localhost:8888/", + ), + # Verify unix socket URLs are handled properly + ( + {"sock": "/tmp/jp-test.sock"}, + "http+unix://%2Ftmp%2Fjp-test.sock/?token=", + "http+unix://%2Ftmp%2Fjp-test.sock/?token=", + "http+unix://%2Ftmp%2Fjp-test.sock/", + ), + ( + {"base_url": "/", "default_url": "/test/", "sock": "/tmp/jp-test.sock"}, + "http+unix://%2Ftmp%2Fjp-test.sock/test/?token=", + "http+unix://%2Ftmp%2Fjp-test.sock/test/?token=", + "http+unix://%2Ftmp%2Fjp-test.sock/", + ), + ], +) +def test_urls(config, public_url, local_url, connection_url): + # Verify we're working with a clean instance. + ServerApp.clear_instance() + serverapp = ServerApp.instance(**config) + # If a token is generated (not set by config), update + # expected_url with token. + if serverapp._token_generated: + public_url = public_url.replace("", serverapp.token) + local_url = local_url.replace("", serverapp.token) + connection_url = connection_url.replace("", serverapp.token) + assert serverapp.public_url == public_url + assert serverapp.local_url == local_url + assert serverapp.connection_url == connection_url + # Cleanup singleton after test. + ServerApp.clear_instance() + + +# Preferred dir tests +# ---------------------------------------------------------------------------- +def test_valid_preferred_dir(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) + assert app.root_dir == path + assert app.preferred_dir == path + assert app.root_dir == app.preferred_dir + + +def test_valid_preferred_dir_is_root_subdir(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + path_subdir = str(tmp_path / "subdir") + os.makedirs(path_subdir, exist_ok=True) + app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) + assert app.root_dir == path + assert app.preferred_dir == path_subdir + assert app.preferred_dir.startswith(app.root_dir) + + +def test_valid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + path_subdir = str(tmp_path / "subdir") + with pytest.raises(TraitError) as error: + app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) + + assert "No such preferred dir:" in str(error) + + +def test_invalid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + path_subdir = str(tmp_path / "subdir") + with pytest.raises(TraitError) as error: + app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) + + assert "No such preferred dir:" in str(error) + + +def test_invalid_preferred_dir_does_not_exist_set(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + path_subdir = str(tmp_path / "subdir") + + app = jp_configurable_serverapp(root_dir=path) + with pytest.raises(TraitError) as error: + app.preferred_dir = path_subdir + + assert "No such preferred dir:" in str(error) + + +def test_invalid_preferred_dir_not_root_subdir(tmp_path, jp_configurable_serverapp): + path = str(tmp_path / "subdir") + os.makedirs(path, exist_ok=True) + not_subdir_path = str(tmp_path) + + with pytest.raises(TraitError) as error: + app = jp_configurable_serverapp(root_dir=path, preferred_dir=not_subdir_path) + + assert "preferred_dir must be equal or a subdir of root_dir:" in str(error) + + +def test_invalid_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_serverapp): + path = str(tmp_path / "subdir") + os.makedirs(path, exist_ok=True) + not_subdir_path = str(tmp_path) + + app = jp_configurable_serverapp(root_dir=path) + with pytest.raises(TraitError) as error: + app.preferred_dir = not_subdir_path + + assert "preferred_dir must be equal or a subdir of root_dir:" in str(error) + + +def test_observed_root_dir_updates_preferred_dir(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + new_path = str(tmp_path / "subdir") + os.makedirs(new_path, exist_ok=True) + + app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) + app.root_dir = new_path + assert app.preferred_dir == new_path + + +def test_observed_root_dir_does_not_update_preferred_dir(tmp_path, jp_configurable_serverapp): + path = str(tmp_path) + new_path = str(tmp_path.parent) + app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) + app.root_dir = new_path + assert app.preferred_dir == path diff --git a/server/jupyter_server/tests/test_terminal.py b/server/jupyter_server/tests/test_terminal.py new file mode 100644 index 0000000..b9754e1 --- /dev/null +++ b/server/jupyter_server/tests/test_terminal.py @@ -0,0 +1,179 @@ +import asyncio +import json +import os +import shutil + +import pytest +from tornado.httpclient import HTTPClientError +from traitlets.config import Config + + +@pytest.fixture +def terminal_path(tmp_path): + subdir = tmp_path.joinpath("terminal_path") + subdir.mkdir() + + yield subdir + + shutil.rmtree(str(subdir), ignore_errors=True) + + +CULL_TIMEOUT = 10 +CULL_INTERVAL = 3 + + +@pytest.fixture +def jp_server_config(): + return Config( + { + "ServerApp": { + "TerminalManager": { + "cull_inactive_timeout": CULL_TIMEOUT, + "cull_interval": CULL_INTERVAL, + } + } + } + ) + + +async def test_no_terminals(jp_fetch): + resp_list = await jp_fetch( + "api", + "terminals", + method="GET", + allow_nonstandard_methods=True, + ) + + data = json.loads(resp_list.body.decode()) + + assert len(data) == 0 + + +async def test_terminal_create(jp_fetch, jp_cleanup_subprocesses): + resp = await jp_fetch( + "api", + "terminals", + method="POST", + allow_nonstandard_methods=True, + ) + term = json.loads(resp.body.decode()) + assert term["name"] == "1" + + resp_list = await jp_fetch( + "api", + "terminals", + method="GET", + allow_nonstandard_methods=True, + ) + + data = json.loads(resp_list.body.decode()) + + assert len(data) == 1 + assert data[0] == term + await jp_cleanup_subprocesses() + + +async def test_terminal_create_with_kwargs( + jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses +): + resp_create = await jp_fetch( + "api", + "terminals", + method="POST", + body=json.dumps({"cwd": str(terminal_path)}), + allow_nonstandard_methods=True, + ) + + data = json.loads(resp_create.body.decode()) + term_name = data["name"] + + resp_get = await jp_fetch( + "api", + "terminals", + term_name, + method="GET", + allow_nonstandard_methods=True, + ) + + data = json.loads(resp_get.body.decode()) + + assert data["name"] == term_name + await jp_cleanup_subprocesses() + + +async def test_terminal_create_with_cwd( + jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses +): + resp = await jp_fetch( + "api", + "terminals", + method="POST", + body=json.dumps({"cwd": str(terminal_path)}), + allow_nonstandard_methods=True, + ) + + data = json.loads(resp.body.decode()) + term_name = data["name"] + + ws = await jp_ws_fetch("terminals", "websocket", term_name) + + ws.write_message(json.dumps(["stdin", "pwd\r\n"])) + + message_stdout = "" + while True: + try: + message = await asyncio.wait_for(ws.read_message(), timeout=5.0) + except asyncio.TimeoutError: + break + + message = json.loads(message) + + if message[0] == "stdout": + message_stdout += message[1] + + ws.close() + + assert os.path.basename(terminal_path) in message_stdout + await jp_cleanup_subprocesses() + + +async def test_culling_config(jp_server_config, jp_configurable_serverapp): + terminal_mgr_config = jp_configurable_serverapp().config.ServerApp.TerminalManager + assert terminal_mgr_config.cull_inactive_timeout == CULL_TIMEOUT + assert terminal_mgr_config.cull_interval == CULL_INTERVAL + terminal_mgr_settings = jp_configurable_serverapp().web_app.settings["terminal_manager"] + assert terminal_mgr_settings.cull_inactive_timeout == CULL_TIMEOUT + assert terminal_mgr_settings.cull_interval == CULL_INTERVAL + + +async def test_culling(jp_server_config, jp_fetch, jp_cleanup_subprocesses): + # POST request + resp = await jp_fetch( + "api", + "terminals", + method="POST", + allow_nonstandard_methods=True, + ) + term = json.loads(resp.body.decode()) + term_1 = term["name"] + last_activity = term["last_activity"] + + culled = False + for i in range(CULL_TIMEOUT + CULL_INTERVAL): + try: + resp = await jp_fetch( + "api", + "terminals", + term_1, + method="GET", + allow_nonstandard_methods=True, + ) + except HTTPClientError as e: + assert e.code == 404 + culled = True + break + else: + await asyncio.sleep(1) + + assert culled + await jp_cleanup_subprocesses() diff --git a/server/jupyter_server/tests/test_traittypes.py b/server/jupyter_server/tests/test_traittypes.py new file mode 100644 index 0000000..0b1849f --- /dev/null +++ b/server/jupyter_server/tests/test_traittypes.py @@ -0,0 +1,72 @@ +import pytest +from traitlets import HasTraits +from traitlets import TraitError +from traitlets.utils.importstring import import_item + +from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.traittypes import InstanceFromClasses +from jupyter_server.traittypes import TypeFromClasses + + +class DummyClass: + """Dummy class for testing Instance""" + + +class DummyInt(int): + """Dummy class for testing types.""" + + +class Thing(HasTraits): + + a = InstanceFromClasses( + default_value=2, + klasses=[ + int, + str, + DummyClass, + ], + ) + + b = TypeFromClasses( + default_value=None, + allow_none=True, + klasses=[ + DummyClass, + int, + "jupyter_server.services.contents.manager.ContentsManager", + ], + ) + + +class TestInstanceFromClasses: + @pytest.mark.parametrize("value", [1, "test", DummyClass()]) + def test_good_values(self, value): + thing = Thing(a=value) + assert thing.a == value + + @pytest.mark.parametrize("value", [2.4, object()]) + def test_bad_values(self, value): + with pytest.raises(TraitError) as e: + thing = Thing(a=value) + + +class TestTypeFromClasses: + @pytest.mark.parametrize( + "value", + [ + DummyClass, + DummyInt, + LargeFileManager, + "jupyter_server.services.contents.manager.ContentsManager", + ], + ) + def test_good_values(self, value): + thing = Thing(b=value) + if isinstance(value, str): + value = import_item(value) + assert thing.b == value + + @pytest.mark.parametrize("value", [float, object]) + def test_bad_values(self, value): + with pytest.raises(TraitError) as e: + thing = Thing(b=value) diff --git a/server/jupyter_server/tests/test_utils.py b/server/jupyter_server/tests/test_utils.py new file mode 100644 index 0000000..c49be09 --- /dev/null +++ b/server/jupyter_server/tests/test_utils.py @@ -0,0 +1,63 @@ +from pathlib import Path +from unittest.mock import patch + +import pytest +from traitlets.tests.utils import check_help_all_output + +from jupyter_server.utils import is_namespace_package +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_unescape + + +def test_help_output(): + check_help_all_output("jupyter_server") + + +@pytest.mark.parametrize( + "unescaped,escaped", + [ + ("/this is a test/for spaces/", "/this%20is%20a%20test/for%20spaces/"), + ("notebook with space.ipynb", "notebook%20with%20space.ipynb"), + ( + "/path with a/notebook and space.ipynb", + "/path%20with%20a/notebook%20and%20space.ipynb", + ), + ( + "/ !@$#%^&* / test %^ notebook @#$ name.ipynb", + "/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb", + ), + ], +) +def test_url_escaping(unescaped, escaped): + # Test escaping. + path = url_escape(unescaped) + assert path == escaped + # Test unescaping. + path = url_unescape(escaped) + assert path == unescaped + + +@pytest.mark.parametrize( + "name, expected", + [ + # returns True if it is a namespace package + ("test_namespace", True), + # returns False if it isn't a namespace package + ("sys", False), + ("jupyter_server", False), + # returns None if it isn't importable + ("not_a_python_namespace", None), + ], +) +def test_is_namespace_package(monkeypatch, name, expected): + monkeypatch.syspath_prepend(Path(__file__).parent / "namespace-package-test") + + assert is_namespace_package(name) is expected + + +def test_is_namespace_package_no_spec(): + with patch("importlib.util.find_spec") as mocked_spec: + mocked_spec.side_effect = ValueError() + + assert is_namespace_package("dummy") is None + mocked_spec.assert_called_once_with("dummy") diff --git a/server/jupyter_server/tests/test_version.py b/server/jupyter_server/tests/test_version.py new file mode 100644 index 0000000..879c257 --- /dev/null +++ b/server/jupyter_server/tests/test_version.py @@ -0,0 +1,51 @@ +import re + +import pytest + +from jupyter_server import __version__ + + +pep440re = re.compile(r"^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$") + + +def raise_on_bad_version(version): + if not pep440re.match(version): + raise ValueError( + "Versions String does apparently not match Pep 440 specification, " + "which might lead to sdist and wheel being seen as 2 different release. " + "E.g: do not use dots for beta/alpha/rc markers." + ) + + +# --------- Meta test to test the versioning tests ------------- + + +@pytest.mark.parametrize( + "version", + [ + "4.1.0.b1", + "4.1.b1", + "4.2", + "X.y.z", + "1.2.3.dev1.post2", + ], +) +def test_invalid_pep440_versions(version): + with pytest.raises(ValueError): + raise_on_bad_version(version) + + +@pytest.mark.parametrize( + "version", + [ + "4.1.1", + "4.2.1b3", + ], +) +def test_valid_pep440_versions(version): + assert raise_on_bad_version(version) is None + + +# --------- Test current version -------------- +def test_current_version(): + raise_on_bad_version(__version__) diff --git a/server/jupyter_server/tests/test_view.py b/server/jupyter_server/tests/test_view.py new file mode 100644 index 0000000..f6fbca5 --- /dev/null +++ b/server/jupyter_server/tests/test_view.py @@ -0,0 +1,60 @@ +"""test view handler""" +from html.parser import HTMLParser + +import pytest +import tornado + +from .utils import expected_http_error +from jupyter_server.utils import url_path_join + + +class IFrameSrcFinder(HTMLParser): + """Minimal HTML parser to find iframe.src attr""" + + def __init__(self): + super().__init__() + self.iframe_src = None + + def handle_starttag(self, tag, attrs): + if tag.lower() == "iframe": + for attr, value in attrs: + if attr.lower() == "src": + self.iframe_src = value + return + + +def find_iframe_src(html): + """Find the src= attr of an iframe on the page + + Assumes only one iframe + """ + finder = IFrameSrcFinder() + finder.feed(html) + return finder.iframe_src + + +@pytest.mark.parametrize( + "exists, name", + [ + (False, "nosuchfile.html"), + (False, "nosuchfile.bin"), + (True, "exists.html"), + (True, "exists.bin"), + ], +) +async def test_view(jp_fetch, jp_serverapp, jp_root_dir, exists, name): + """Test /view/$path for a few cases""" + if exists: + jp_root_dir.joinpath(name).write_text(name) + + if not exists: + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("view", name, method="GET") + assert expected_http_error(e, 404), [name, e] + else: + r = await jp_fetch("view", name, method="GET") + assert r.code == 200 + assert r.headers["content-type"] == "text/html; charset=UTF-8" + html = r.body.decode() + src = find_iframe_src(html) + assert src == url_path_join(jp_serverapp.base_url, f"/files/{name}") diff --git a/server/jupyter_server/tests/unix_sockets/__init__.py b/server/jupyter_server/tests/unix_sockets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/tests/unix_sockets/conftest.py b/server/jupyter_server/tests/unix_sockets/conftest.py new file mode 100644 index 0000000..dffd4bb --- /dev/null +++ b/server/jupyter_server/tests/unix_sockets/conftest.py @@ -0,0 +1,34 @@ +import os +import pathlib + +import pytest + +from jupyter_server import DEFAULT_JUPYTER_SERVER_PORT + + +@pytest.fixture +def jp_process_id(): + """Choose a random unused process ID.""" + return os.getpid() + + +@pytest.fixture +def jp_unix_socket_file(jp_process_id): + """Define a temporary socket connection""" + # Rely on `/tmp` to avoid any Linux socket length max buffer + # issues. Key on PID for process-wise concurrency. + tmp_path = pathlib.Path("/tmp") + filename = "jupyter_server.{}.sock".format(jp_process_id) + jp_unix_socket_file = tmp_path.joinpath(filename) + yield str(jp_unix_socket_file) + # Clean up the file after the test runs. + if jp_unix_socket_file.exists(): + jp_unix_socket_file.unlink() + + +@pytest.fixture +def jp_http_port(): + """Set the port to the default value, since sock + and port cannot both be configured at the same time. + """ + return DEFAULT_JUPYTER_SERVER_PORT diff --git a/server/jupyter_server/tests/unix_sockets/test_api.py b/server/jupyter_server/tests/unix_sockets/test_api.py new file mode 100644 index 0000000..1653a90 --- /dev/null +++ b/server/jupyter_server/tests/unix_sockets/test_api.py @@ -0,0 +1,69 @@ +import sys + +import pytest + +# Skip this module if on Windows. Unix sockets are not available on Windows. +pytestmark = pytest.mark.skipif( + sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." +) + +import urllib + +if not sys.platform.startswith("win"): + from tornado.netutil import bind_unix_socket + +import jupyter_server.serverapp +from jupyter_server.utils import ( + url_path_join, + urlencode_unix_socket, + async_fetch, +) + + +@pytest.fixture +def jp_server_config(jp_unix_socket_file): + """Configure the serverapp fixture with the unix socket.""" + return {"ServerApp": {"sock": jp_unix_socket_file, "allow_remote_access": True}} + + +@pytest.fixture +def http_server_port(jp_unix_socket_file, jp_process_id): + """Unix socket and process ID used by tornado's HTTP Server. + + Overrides the http_server_port fixture from pytest-tornasync and replaces + it with a tuple: (unix socket, process id) + """ + return (bind_unix_socket(jp_unix_socket_file), jp_process_id) + + +@pytest.fixture +def jp_unix_socket_fetch(jp_unix_socket_file, jp_auth_header, jp_base_url, http_server, io_loop): + """A fetch fixture for Jupyter Server tests that use the unix_serverapp fixture""" + + async def client(*parts, headers={}, params={}, **kwargs): + # Handle URL strings + host_url = urlencode_unix_socket(jp_unix_socket_file) + path_url = url_path_join(jp_base_url, *parts) + params_url = urllib.parse.urlencode(params) + url = url_path_join(host_url, path_url + "?" + params_url) + r = await async_fetch(url, headers=headers, io_loop=io_loop, **kwargs) + return r + + return client + + +async def test_get_spec(jp_unix_socket_fetch): + # Handle URL strings + parts = ["api", "spec.yaml"] + + # Make request and verify it succeeds.' + response = await jp_unix_socket_fetch(*parts) + assert response.code == 200 + assert response.body != None + + +async def test_list_running_servers(jp_unix_socket_file, http_server): + """Test that a server running on unix sockets is discovered by the server list""" + servers = list(jupyter_server.serverapp.list_running_servers()) + assert len(servers) >= 1 + assert jp_unix_socket_file in {info["sock"] for info in servers} diff --git a/server/jupyter_server/tests/unix_sockets/test_serverapp_integration.py b/server/jupyter_server/tests/unix_sockets/test_serverapp_integration.py new file mode 100644 index 0000000..69be15b --- /dev/null +++ b/server/jupyter_server/tests/unix_sockets/test_serverapp_integration.py @@ -0,0 +1,165 @@ +import stat +import sys + +import pytest + +# Skip this module if on Windows. Unix sockets are not available on Windows. +pytestmark = pytest.mark.skipif( + sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." +) + +import os +import subprocess +import time + +from jupyter_server.utils import urlencode_unix_socket, urlencode_unix_socket_path + + +@pytest.mark.integration_test +def test_shutdown_sock_server_integration(jp_unix_socket_file): + url = urlencode_unix_socket(jp_unix_socket_file).encode() + encoded_sock_path = urlencode_unix_socket_path(jp_unix_socket_file) + p = subprocess.Popen( + ["jupyter-server", "--sock=%s" % jp_unix_socket_file, "--sock-mode=0700"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + complete = False + for line in iter(p.stderr.readline, b""): + if url in line: + complete = True + break + + assert complete, "did not find socket URL in stdout when launching notebook" + + socket_path = encoded_sock_path.encode() + assert socket_path in subprocess.check_output(["jupyter-server", "list"]) + + # Ensure umask is properly applied. + assert stat.S_IMODE(os.lstat(jp_unix_socket_file).st_mode) == 0o700 + + try: + subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + assert "There is currently no server running on" in e.output.decode() + else: + raise AssertionError("expected stop command to fail due to target mis-match") + + assert encoded_sock_path.encode() in subprocess.check_output(["jupyter-server", "list"]) + + subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]) + + assert encoded_sock_path.encode() not in subprocess.check_output(["jupyter-server", "list"]) + + p.wait() + + +@pytest.mark.integration_test +def test_sock_server_validate_sockmode_type(): + try: + subprocess.check_output( + ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=badbadbad"], + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as e: + assert "badbadbad" in e.output.decode() + else: + raise AssertionError("expected execution to fail due to validation of --sock-mode param") + + +@pytest.mark.integration_test +def test_sock_server_validate_sockmode_accessible(): + try: + subprocess.check_output( + ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=0444"], + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as e: + assert "0444" in e.output.decode() + else: + raise AssertionError("expected execution to fail due to validation of --sock-mode param") + + +def _ensure_stopped(check_msg="There are no running servers"): + try: + subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + assert check_msg in e.output.decode() + else: + raise AssertionError("expected all servers to be stopped") + + +@pytest.mark.integration_test +def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): + """Tests lifecycle behavior for mixed-mode server types w/ default ports. + + Mostly suitable for local dev testing due to reliance on default port binding. + """ + TEST_PORT = "9797" + MSG_TMPL = "Shutting down server on {}..." + + _ensure_stopped() + + # Default port. + p1 = subprocess.Popen(["jupyter-server", "--no-browser"]) + + # Unix socket. + p2 = subprocess.Popen(["jupyter-server", "--sock=%s" % jp_unix_socket_file]) + + # Specified port + p3 = subprocess.Popen(["jupyter-server", "--no-browser", "--port=%s" % TEST_PORT]) + + time.sleep(3) + + shutdown_msg = MSG_TMPL.format(jp_http_port) + assert shutdown_msg in subprocess.check_output(["jupyter-server", "stop"]).decode() + + _ensure_stopped("There is currently no server running on 8888") + + assert ( + MSG_TMPL.format(jp_unix_socket_file) + in subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]).decode() + ) + + assert ( + MSG_TMPL.format(TEST_PORT) + in subprocess.check_output(["jupyter-server", "stop", TEST_PORT]).decode() + ) + + _ensure_stopped() + + p1.wait() + p2.wait() + p3.wait() + + +@pytest.mark.integration_test +def test_launch_socket_collision(jp_unix_socket_file): + """Tests UNIX socket in-use detection for lifecycle correctness.""" + sock = jp_unix_socket_file + check_msg = "socket %s is already in use" % sock + + _ensure_stopped() + + # Start a server. + cmd = ["jupyter-server", "--sock=%s" % sock] + p1 = subprocess.Popen(cmd) + time.sleep(3) + + # Try to start a server bound to the same UNIX socket. + try: + subprocess.check_output(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as cpe: + assert check_msg in cpe.output.decode() + except Exception as ex: + raise AssertionError(f"expected 'already in use' error, got '{ex}'!") + else: + raise AssertionError("expected 'already in use' error, got success instead!") + + # Stop the background server, ensure it's stopped and wait on the process to exit. + subprocess.check_call(["jupyter-server", "stop", sock]) + + _ensure_stopped() + + p1.wait() diff --git a/server/jupyter_server/tests/utils.py b/server/jupyter_server/tests/utils.py new file mode 100644 index 0000000..6e6649a --- /dev/null +++ b/server/jupyter_server/tests/utils.py @@ -0,0 +1,41 @@ +import json + +import tornado + +some_resource = "The very model of a modern major general" + +sample_kernel_json = { + "argv": ["cat", "{connection_file}"], + "display_name": "Test kernel", +} + + +def mkdir(tmp_path, *parts): + path = tmp_path.joinpath(*parts) + if not path.exists(): + path.mkdir(parents=True) + return path + + +def expected_http_error(error, expected_code, expected_message=None): + """Check that the error matches the expected output error.""" + e = error.value + if isinstance(e, tornado.web.HTTPError): + if expected_code != e.status_code: + return False + if expected_message is not None and expected_message != str(e): + return False + return True + elif any( + [ + isinstance(e, tornado.httpclient.HTTPClientError), + isinstance(e, tornado.httpclient.HTTPError), + ] + ): + if expected_code != e.code: + return False + if expected_message: + message = json.loads(e.response.body.decode())["message"] + if expected_message != message: + return False + return True diff --git a/server/jupyter_server/traittypes.py b/server/jupyter_server/traittypes.py new file mode 100644 index 0000000..5c6ede5 --- /dev/null +++ b/server/jupyter_server/traittypes.py @@ -0,0 +1,224 @@ +import inspect +from ast import literal_eval + +from traitlets import ClassBasedTraitType +from traitlets import TraitError +from traitlets import Undefined +from traitlets.utils.descriptions import describe + + +class TypeFromClasses(ClassBasedTraitType): + """A trait whose value must be a subclass of a class in a specified list of classes.""" + + def __init__(self, default_value=Undefined, klasses=None, **kwargs): + """Construct a Type trait + A Type trait specifies that its values must be subclasses of + a class in a list of possible classes. + If only ``default_value`` is given, it is used for the ``klasses`` as + well. If neither are given, both default to ``object``. + Parameters + ---------- + default_value : class, str or None + The default value must be a subclass of klass. If an str, + the str must be a fully specified class name, like 'foo.bar.Bah'. + The string is resolved into real class, when the parent + :class:`HasTraits` class is instantiated. + klasses : list of class, str [ default object ] + Values of this trait must be a subclass of klass. The klass + may be specified in a string like: 'foo.bar.MyClass'. + The string is resolved into real class, when the parent + :class:`HasTraits` class is instantiated. + allow_none : bool [ default False ] + Indicates whether None is allowed as an assignable value. + """ + if default_value is Undefined: + new_default_value = object if (klasses is None) else klasses + else: + new_default_value = default_value + + if klasses is None: + if (default_value is None) or (default_value is Undefined): + klasses = [object] + else: + klasses = [default_value] + + # OneOfType requires a list of klasses to be specified (different than Type). + if not isinstance(klasses, (list, tuple, set)): + raise TraitError("`klasses` must be a list of class names (type is str) or classes.") + + for klass in klasses: + if not (inspect.isclass(klass) or isinstance(klass, str)): + raise TraitError("A OneOfType trait must specify a list of classes.") + + # Store classes. + self.klasses = klasses + + super().__init__(new_default_value, **kwargs) + + def subclass_from_klasses(self, value): + "Check that a given class is a subclasses found in the klasses list." + return any(issubclass(value, klass) for klass in self.importable_klasses) + + def validate(self, obj, value): + """Validates that the value is a valid object instance.""" + if isinstance(value, str): + try: + value = self._resolve_string(value) + except ImportError: + raise TraitError( + "The '%s' trait of %s instance must be a type, but " + "%r could not be imported" % (self.name, obj, value) + ) + try: + if self.subclass_from_klasses(value): + return value + except Exception: + pass + + self.error(obj, value) + + def info(self): + """Returns a description of the trait.""" + result = "a subclass of " + for klass in self.klasses: + if not isinstance(klass, str): + klass = klass.__module__ + "." + klass.__name__ + result += f"{klass} or " + # Strip the last "or" + result = result.strip(" or ") + if self.allow_none: + return result + " or None" + return result + + def instance_init(self, obj): + self._resolve_classes() + super().instance_init(obj) + + def _resolve_classes(self): + # Resolve all string names to actual classes. + self.importable_klasses = [] + for klass in self.klasses: + if isinstance(klass, str): + # Try importing the classes to compare. Silently, ignore if not importable. + try: + klass = self._resolve_string(klass) + self.importable_klasses.append(klass) + except: + pass + else: + self.importable_klasses.append(klass) + + if isinstance(self.default_value, str): + self.default_value = self._resolve_string(self.default_value) + + def default_value_repr(self): + value = self.default_value + if isinstance(value, str): + return repr(value) + else: + return repr(f"{value.__module__}.{value.__name__}") + + +class InstanceFromClasses(ClassBasedTraitType): + """A trait whose value must be an instance of a class in a specified list of classes. + The value can also be an instance of a subclass of the specified classes. + Subclasses can declare default classes by overriding the klass attribute + """ + + def __init__(self, klasses=None, args=None, kw=None, **kwargs): + """Construct an Instance trait. + This trait allows values that are instances of a particular + class or its subclasses. Our implementation is quite different + from that of enthough.traits as we don't allow instances to be used + for klass and we handle the ``args`` and ``kw`` arguments differently. + Parameters + ---------- + klasses : list of classes or class_names (str) + The class that forms the basis for the trait. Class names + can also be specified as strings, like 'foo.bar.Bar'. + args : tuple + Positional arguments for generating the default value. + kw : dict + Keyword arguments for generating the default value. + allow_none : bool [ default False ] + Indicates whether None is allowed as a value. + Notes + ----- + If both ``args`` and ``kw`` are None, then the default value is None. + If ``args`` is a tuple and ``kw`` is a dict, then the default is + created as ``klass(*args, **kw)``. If exactly one of ``args`` or ``kw`` is + None, the None is replaced by ``()`` or ``{}``, respectively. + """ + # If class + if klasses is None: + self.klasses = klasses + # Verify all elements are either classes or strings. + elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): + self.klasses = klasses + else: + raise TraitError( + "The klasses attribute must be a list of class names or classes" + " not: %r" % klasses + ) + + if (kw is not None) and not isinstance(kw, dict): + raise TraitError("The 'kw' argument must be a dict or None.") + if (args is not None) and not isinstance(args, tuple): + raise TraitError("The 'args' argument must be a tuple or None.") + + self.default_args = args + self.default_kwargs = kw + + super(InstanceFromClasses, self).__init__(**kwargs) + + def instance_from_importable_klasses(self, value): + "Check that a given class is a subclasses found in the klasses list." + return any(isinstance(value, klass) for klass in self.importable_klasses) + + def validate(self, obj, value): + if self.instance_from_importable_klasses(value): + return value + else: + self.error(obj, value) + + def info(self): + result = "an instance of " + for klass in self.klasses: + if isinstance(klass, str): + result += klass + else: + result += describe("a", klass) + result += " or " + result = result.strip(" or ") + if self.allow_none: + result += " or None" + return result + + def instance_init(self, obj): + self._resolve_classes() + super().instance_init(obj) + + def _resolve_classes(self): + # Resolve all string names to actual classes. + self.importable_klasses = [] + for klass in self.klasses: + if isinstance(klass, str): + # Try importing the classes to compare. Silently, ignore if not importable. + try: + klass = self._resolve_string(klass) + self.importable_klasses.append(klass) + except: + pass + else: + self.importable_klasses.append(klass) + + def make_dynamic_default(self): + if (self.default_args is None) and (self.default_kwargs is None): + return None + return self.klass(*(self.default_args or ()), **(self.default_kwargs or {})) + + def default_value_repr(self): + return repr(self.make_dynamic_default()) + + def from_string(self, s): + return literal_eval(s) diff --git a/server/jupyter_server/transutils.py b/server/jupyter_server/transutils.py new file mode 100644 index 0000000..2ca30e4 --- /dev/null +++ b/server/jupyter_server/transutils.py @@ -0,0 +1,21 @@ +"""Translation related utilities. When imported, injects _ to builtins""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import gettext +import os +import warnings + + +def _trans_gettext_deprecation_helper(*args, **kwargs): + warn_msg = "The alias `_()` will be deprecated. Use `_i18n()` instead." + warnings.warn(warn_msg, FutureWarning, stacklevel=2) + return trans.gettext(*args, **kwargs) + + +# Set up message catalog access +base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) +trans = gettext.translation( + "notebook", localedir=os.path.join(base_dir, "notebook/i18n"), fallback=True +) +_ = _trans_gettext_deprecation_helper +_i18n = trans.gettext diff --git a/server/jupyter_server/utils.py b/server/jupyter_server/utils.py new file mode 100644 index 0000000..74239c0 --- /dev/null +++ b/server/jupyter_server/utils.py @@ -0,0 +1,387 @@ +"""Notebook related utilities""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import asyncio +import errno +import importlib.util +import inspect +import os +import socket +import sys +from _frozen_importlib_external import _NamespacePath +from contextlib import contextmanager +from urllib.parse import quote +from urllib.parse import SplitResult +from urllib.parse import unquote +from urllib.parse import urljoin # noqa: F401 +from urllib.parse import urlparse +from urllib.parse import urlsplit +from urllib.parse import urlunsplit +from urllib.request import pathname2url # noqa: F401 + +from packaging.version import Version +from tornado.httpclient import AsyncHTTPClient +from tornado.httpclient import HTTPClient +from tornado.httpclient import HTTPRequest +from tornado.netutil import Resolver + + +def url_path_join(*pieces): + """Join components of url into a relative url + + Use to prevent double slash when joining subpath. This will leave the + initial and final / in place + """ + initial = pieces[0].startswith("/") + final = pieces[-1].endswith("/") + stripped = [s.strip("/") for s in pieces] + result = "/".join(s for s in stripped if s) + if initial: + result = "/" + result + if final: + result = result + "/" + if result == "//": + result = "/" + return result + + +def url_is_absolute(url): + """Determine whether a given URL is absolute""" + return urlparse(url).path.startswith("/") + + +def path2url(path): + """Convert a local file path to a URL""" + pieces = [quote(p) for p in path.split(os.sep)] + # preserve trailing / + if pieces[-1] == "": + pieces[-1] = "/" + url = url_path_join(*pieces) + return url + + +def url2path(url): + """Convert a URL to a local file path""" + pieces = [unquote(p) for p in url.split("/")] + path = os.path.join(*pieces) + return path + + +def url_escape(path): + """Escape special characters in a URL path + + Turns '/foo bar/' into '/foo%20bar/' + """ + parts = path.split("/") + return "/".join([quote(p) for p in parts]) + + +def url_unescape(path): + """Unescape special characters in a URL path + + Turns '/foo%20bar/' into '/foo bar/' + """ + return "/".join([unquote(p) for p in path.split("/")]) + + +def samefile_simple(path, other_path): + """ + Fill in for os.path.samefile when it is unavailable (Windows+py2). + + Do a case-insensitive string comparison in this case + plus comparing the full stat result (including times) + because Windows + py2 doesn't support the stat fields + needed for identifying if it's the same file (st_ino, st_dev). + + Only to be used if os.path.samefile is not available. + + Parameters + ---------- + path : String representing a path to a file + other_path : String representing a path to another file + + Returns + ------- + same: Boolean that is True if both path and other path are the same + """ + path_stat = os.stat(path) + other_path_stat = os.stat(other_path) + return path.lower() == other_path.lower() and path_stat == other_path_stat + + +def to_os_path(path, root=""): + """Convert an API path to a filesystem path + + If given, root will be prepended to the path. + root must be a filesystem path already. + """ + parts = path.strip("/").split("/") + parts = [p for p in parts if p != ""] # remove duplicate splits + path = os.path.join(root, *parts) + return path + + +def to_api_path(os_path, root=""): + """Convert a filesystem path to an API path + + If given, root will be removed from the path. + root must be a filesystem path already. + """ + if os_path.startswith(root): + os_path = os_path[len(root) :] + parts = os_path.strip(os.path.sep).split(os.path.sep) + parts = [p for p in parts if p != ""] # remove duplicate splits + path = "/".join(parts) + return path + + +def check_version(v, check): + """check version string v >= check + + If dev/prerelease tags result in TypeError for string-number comparison, + it is assumed that the dependency is satisfied. + Users on dev branches are responsible for keeping their own packages up to date. + """ + try: + return Version(v) >= Version(check) + except TypeError: + return True + + +# Copy of IPython.utils.process.check_pid: + + +def _check_pid_win32(pid): + import ctypes + + # OpenProcess returns 0 if no such process (of ours) exists + # positive int otherwise + return bool(ctypes.windll.kernel32.OpenProcess(1, 0, pid)) + + +def _check_pid_posix(pid): + """Copy of IPython.utils.process.check_pid""" + try: + os.kill(pid, 0) + except OSError as err: + if err.errno == errno.ESRCH: + return False + elif err.errno == errno.EPERM: + # Don't have permission to signal the process - probably means it exists + return True + raise + else: + return True + + +if sys.platform == "win32": + check_pid = _check_pid_win32 +else: + check_pid = _check_pid_posix + + +async def ensure_async(obj): + """Convert a non-awaitable object to a coroutine if needed, + and await it if it was not already awaited. + """ + if inspect.isawaitable(obj): + try: + result = await obj + except RuntimeError as e: + if str(e) == "cannot reuse already awaited coroutine": + # obj is already the coroutine's result + return obj + raise + return result + # obj doesn't need to be awaited + return obj + + +def run_sync(maybe_async): + """If async, runs maybe_async and blocks until it has executed, + possibly creating an event loop. + If not async, just returns maybe_async as it is the result of something + that has already executed. + + Parameters + ---------- + maybe_async : async or non-async object + The object to be executed, if it is async. + + Returns + ------- + result + Whatever the async object returns, or the object itself. + """ + if not inspect.isawaitable(maybe_async): + # that was not something async, just return it + return maybe_async + # it is async, we need to run it in an event loop + def wrapped(): + create_new_event_loop = False + try: + loop = asyncio.get_event_loop() + except RuntimeError: + create_new_event_loop = True + else: + if loop.is_closed(): + create_new_event_loop = True + if create_new_event_loop: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + result = loop.run_until_complete(maybe_async) + except RuntimeError as e: + if str(e) == "This event loop is already running": + # just return a Future, hoping that it will be awaited + result = asyncio.ensure_future(maybe_async) + else: + raise e + return result + + return wrapped() + + +async def run_sync_in_loop(maybe_async): + """Runs a function synchronously whether it is an async function or not. + + If async, runs maybe_async and blocks until it has executed. + + If not async, just returns maybe_async as it is the result of something + that has already executed. + + Parameters + ---------- + maybe_async : async or non-async object + The object to be executed, if it is async. + + Returns + ------- + result + Whatever the async object returns, or the object itself. + """ + if not inspect.isawaitable(maybe_async): + return maybe_async + return await maybe_async + + +def urlencode_unix_socket_path(socket_path): + """Encodes a UNIX socket path string from a socket path for the `http+unix` URI form.""" + return socket_path.replace("/", "%2F") + + +def urldecode_unix_socket_path(socket_path): + """Decodes a UNIX sock path string from an encoded sock path for the `http+unix` URI form.""" + return socket_path.replace("%2F", "/") + + +def urlencode_unix_socket(socket_path): + """Encodes a UNIX socket URL from a socket path for the `http+unix` URI form.""" + return "http+unix://%s" % urlencode_unix_socket_path(socket_path) + + +def unix_socket_in_use(socket_path): + """Checks whether a UNIX socket path on disk is in use by attempting to connect to it.""" + if not os.path.exists(socket_path): + return False + + try: + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.connect(socket_path) + except socket.error: + return False + else: + return True + finally: + sock.close() + + +@contextmanager +def _request_for_tornado_client(urlstring, method="GET", body=None, headers=None): + """A utility that provides a context that handles + HTTP, HTTPS, and HTTP+UNIX request. + Creates a tornado HTTPRequest object with a URL + that tornado's HTTPClients can accept. + If the request is made to a unix socket, temporarily + configure the AsyncHTTPClient to resolve the URL + and connect to the proper socket. + """ + parts = urlsplit(urlstring) + if parts.scheme in ["http", "https"]: + pass + elif parts.scheme == "http+unix": + # If unix socket, mimic HTTP. + parts = SplitResult( + scheme="http", + netloc=parts.netloc, + path=parts.path, + query=parts.query, + fragment=parts.fragment, + ) + + class UnixSocketResolver(Resolver): + """A resolver that routes HTTP requests to unix sockets + in tornado HTTP clients. + Due to constraints in Tornados' API, the scheme of the + must be `http` (not `http+unix`). Applications should replace + the scheme in URLS before making a request to the HTTP client. + """ + + def initialize(self, resolver): + self.resolver = resolver + + def close(self): + self.resolver.close() + + async def resolve(self, host, port, *args, **kwargs): + return [(socket.AF_UNIX, urldecode_unix_socket_path(host))] + + resolver = UnixSocketResolver(resolver=Resolver()) + AsyncHTTPClient.configure(None, resolver=resolver) + else: + raise Exception("Unknown URL scheme.") + + # Yield the request for the given client. + url = urlunsplit(parts) + request = HTTPRequest(url, method=method, body=body, headers=headers) + yield request + + +def fetch(urlstring, method="GET", body=None, headers=None): + """ + Send a HTTP, HTTPS, or HTTP+UNIX request + to a Tornado Web Server. Returns a tornado HTTPResponse. + """ + with _request_for_tornado_client(urlstring) as request: + response = HTTPClient(AsyncHTTPClient).fetch(request) + return response + + +async def async_fetch(urlstring, method="GET", body=None, headers=None, io_loop=None): + """ + Send an asynchronous HTTP, HTTPS, or HTTP+UNIX request + to a Tornado Web Server. Returns a tornado HTTPResponse. + """ + with _request_for_tornado_client(urlstring) as request: + response = await AsyncHTTPClient(io_loop).fetch(request) + return response + + +def is_namespace_package(namespace): + """Is the provided namespace a Python Namespace Package (PEP420). + + https://www.python.org/dev/peps/pep-0420/#specification + + Returns `None` if module is not importable. + + """ + # NOTE: using submodule_search_locations because the loader can be None + try: + spec = importlib.util.find_spec(namespace) + except ValueError: # spec is not set - see https://docs.python.org/3/library/importlib.html#importlib.util.find_spec + return None + + if not spec: + # e.g. module not installed + return None + return isinstance(spec.submodule_search_locations, _NamespacePath) diff --git a/server/jupyter_server/view/__init__.py b/server/jupyter_server/view/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/jupyter_server/view/handlers.py b/server/jupyter_server/view/handlers.py new file mode 100644 index 0000000..a7d50f2 --- /dev/null +++ b/server/jupyter_server/view/handlers.py @@ -0,0 +1,37 @@ +# encoding: utf-8 +"""Tornado handlers for viewing HTML files.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from tornado import web + +from ..base.handlers import JupyterHandler +from ..base.handlers import path_regex +from ..utils import ensure_async +from ..utils import url_escape +from ..utils import url_path_join +from jupyter_server.auth import authorized + + +AUTH_RESOURCE = "contents" + + +class ViewHandler(JupyterHandler): + """Render HTML files within an iframe.""" + + auth_resource = AUTH_RESOURCE + + @web.authenticated + @authorized + async def get(self, path): + path = path.strip("/") + if not await ensure_async(self.contents_manager.file_exists(path)): + raise web.HTTPError(404, "File does not exist: %s" % path) + + basename = path.rsplit("/", 1)[-1] + file_url = url_path_join(self.base_url, "files", url_escape(path)) + self.write(self.render_template("view.html", file_url=file_url, page_title=basename)) + + +default_handlers = [ + (r"/view%s" % path_regex, ViewHandler), +] diff --git a/server/package-lock.json b/server/package-lock.json new file mode 100644 index 0000000..3e30b1e --- /dev/null +++ b/server/package-lock.json @@ -0,0 +1,320 @@ +{ + "name": "jupyter_server", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "bootstrap": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-3.4.1.tgz", + "integrity": "sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "copyfiles": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/copyfiles/-/copyfiles-2.4.1.tgz", + "integrity": "sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg==", + "requires": { + "glob": "^7.0.5", + "minimatch": "^3.0.3", + "mkdirp": "^1.0.4", + "noms": "0.0.0", + "through2": "^2.0.1", + "untildify": "^4.0.0", + "yargs": "^16.1.0" + } + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" + }, + "noms": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/noms/-/noms-0.0.0.tgz", + "integrity": "sha1-2o69nzr51nYJGbJ9nNyAkqczKFk=", + "requires": { + "inherits": "^2.0.1", + "readable-stream": "~1.0.31" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "string-width": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + }, + "dependencies": { + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, + "untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==" + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==" + } + } +} diff --git a/server/package.json b/server/package.json new file mode 100644 index 0000000..d4c9c73 --- /dev/null +++ b/server/package.json @@ -0,0 +1,13 @@ +{ + "name": "jupyter_server", + "private": true, + "version": "1.0.0", + "license": "BSD", + "scripts": { + "build": "copyfiles -f node_modules/bootstrap/dist/css/*.min.* jupyter_server/static/style" + }, + "dependencies": { + "bootstrap": "^3.4.0", + "copyfiles": "^2.4.1" + } +} diff --git a/server/pyproject.toml b/server/pyproject.toml new file mode 100644 index 0000000..7e7bfa8 --- /dev/null +++ b/server/pyproject.toml @@ -0,0 +1,43 @@ +[build-system] +requires = ["jupyter_packaging~=0.9"] +build-backend = "jupyter_packaging.build_api" + +[tool.jupyter-packaging.builder] +factory = "jupyter_packaging.npm_builder" + +[tool.check-manifest] +ignore = ["tbump.toml", ".*", "*.yml", "package-lock.json", "bootstrap*", "conftest.py"] + +[tool.pytest.ini_options] +addopts = "--doctest-modules" +testpaths = [ + "jupyter_server/" +] +timeout = 300 +timeout_method = "thread" + +[tool.jupyter-releaser] +skip = ["check-links"] + +[tool.tbump.version] +current = "1.14.0.dev0" +regex = ''' + (?P\d+)\.(?P\d+)\.(?P\d+) + ((?Pa|b|rc|.dev)(?P\d+))? +''' + +[tool.tbump.git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +[[tool.tbump.file]] +src = "jupyter_server/_version.py" +version_template = '({major}, {minor}, {patch}, "{channel}", "{release}")' + +[[tool.tbump.field]] +name = "channel" +default = "" + +[[tool.tbump.field]] +name = "release" +default = "" diff --git a/server/readthedocs.yml b/server/readthedocs.yml new file mode 100644 index 0000000..011118f --- /dev/null +++ b/server/readthedocs.yml @@ -0,0 +1,11 @@ +version: 2 +sphinx: + configuration: docs/source/conf.py +conda: + environment: docs/environment.yml +python: + version: 3.8 + install: + # install itself with pip install . + - method: pip + path: . diff --git a/server/setup.cfg b/server/setup.cfg new file mode 100644 index 0000000..322c510 --- /dev/null +++ b/server/setup.cfg @@ -0,0 +1,76 @@ +[metadata] +name = jupyter_server +version = attr: jupyter_server.__version__ +description = The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications. +long_description = file: README.md +long_description_content_type = text/markdown +license_files = COPYING.md +author = Jupyter Development Team +author_email = jupyter@googlegroups.com +url = https://jupyter.org +platforms = Linux, Mac OS X, Windows +keywords = ipython, jupyter +classifiers = + Intended Audience :: Developers + Intended Audience :: System Administrators + Intended Audience :: Science/Research + License :: OSI Approved :: BSD License + Programming Language :: Python + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + +[options] +zip_safe = False +include_package_data = True +packages = find: +python_requires = >=3.7 +install_requires = + jinja2 + tornado>=6.1.0 + pyzmq>=17 + argon2-cffi + ipython_genutils + traitlets>=5 + jupyter_core>=4.6.0 + jupyter_client>=6.1.1 + nbformat + nbconvert + Send2Trash + terminado>=0.8.3 + prometheus_client + anyio>=3.1.0,<4 + websocket-client + packaging + pywinpty(<2);os_name=='nt' + +[options.extras_require] +test = + coverage + pytest>=6.0 + pytest-cov + pytest-mock + pytest-timeout + requests + pytest-tornasync + pytest-console-scripts + ipykernel + # NOTE: we cannot auto install examples/simple here because of: + # https://github.com/pypa/pip/issues/6658 + +[options.entry_points] +console_scripts = + jupyter-server = jupyter_server.serverapp:main + +[options.packages.find] +exclude = ['docs*', 'examples*'] + +[flake8] +ignore = E, C, W, F403, F811, F841, E402, I100, I101, D400 +builtins = c, get_config +exclude = + .cache, + .github, + docs, + setup.py diff --git a/server/setup.py b/server/setup.py new file mode 100644 index 0000000..9a33dcc --- /dev/null +++ b/server/setup.py @@ -0,0 +1,11 @@ +from setuptools import setup + +try: + from jupyter_packaging import wrap_installers, npm_builder + + ensured_targets = ["jupyter_server/static/style/bootstrap.min.css"] + cmdclass = wrap_installers(pre_develop=npm_builder(), ensured_targets=ensured_targets) +except ImportError: + cmdclass = {} + +setup(cmdclass=cmdclass) diff --git a/server/yarn.lock b/server/yarn.lock new file mode 100644 index 0000000..95bc2a3 --- /dev/null +++ b/server/yarn.lock @@ -0,0 +1,295 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +bootstrap@^3.4.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-3.4.1.tgz#c3a347d419e289ad11f4033e3c4132b87c081d72" + integrity sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +copyfiles@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/copyfiles/-/copyfiles-2.4.1.tgz#d2dcff60aaad1015f09d0b66e7f0f1c5cd3c5da5" + integrity sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg== + dependencies: + glob "^7.0.5" + minimatch "^3.0.3" + mkdirp "^1.0.4" + noms "0.0.0" + through2 "^2.0.1" + untildify "^4.0.0" + yargs "^16.1.0" + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +glob@^7.0.5: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.1, inherits@~2.0.1, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +minimatch@^3.0.3, minimatch@^3.0.4: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +mkdirp@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + +noms@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/noms/-/noms-0.0.0.tgz#da8ebd9f3af9d6760919b27d9cdc8092a7332859" + integrity sha1-2o69nzr51nYJGbJ9nNyAkqczKFk= + dependencies: + inherits "^2.0.1" + readable-stream "~1.0.31" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +readable-stream@~1.0.31: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + integrity sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw= + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@~2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +through2@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + dependencies: + readable-stream "~2.3.6" + xtend "~4.0.1" + +untildify@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" + integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +xtend@~4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs@^16.1.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2"