diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..c92d3e1 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,42 @@ +# Title: [Brief description of the change] + +## Description + + + +## Checklist + +- [ ] Code follows project style guidelines +- [ ] Tests added/updated +- [ ] Documentation updated +- [ ] Changelog updated (if applicable) + +## Testing + + + +## Outcome/Results + + + +## Additional Context: + + + +### Motivation + + + +### Related Issues/Dependencies + + + +### Additional Notes + + diff --git a/.github/workflows/style-checks.yaml b/.github/workflows/style-checks.yaml new file mode 100644 index 0000000..e5397ac --- /dev/null +++ b/.github/workflows/style-checks.yaml @@ -0,0 +1,47 @@ +name: Style check + +on: push + +jobs: + style-check: + env: + UV_CACHE_DIR: ${{ github.workspace }}/.cache/uv + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4.2.1 + + - name: Install uv + uses: astral-sh/setup-uv@v3.1.7 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Set up Python + uses: actions/setup-python@v5.2.0 + with: + python-version-file: "pyproject.toml" + + - name: Restore uv cache + uses: actions/cache@v4.1.2 + with: + path: ${{ github.workspace }}/.cache/uv + key: uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + restore-keys: | + uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + uv-${{ runner.os }} + + - name: Install dependencies + run: uv sync --all-extras --dev + + - name: Check linting with Ruff + run: uv run ruff check + + - name: Check format with Ruff + run: uv run ruff format --check + + - name: Check docstring coverage + run: uv run docstr-coverage ./**/*.py --fail-under 20 --skip-file-doc --verbose=2 + + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml new file mode 100644 index 0000000..cc817e3 --- /dev/null +++ b/.github/workflows/test-coverage.yaml @@ -0,0 +1,49 @@ +name: Test Coverage + +on: push + +jobs: + test-coverage: + env: + UV_CACHE_DIR: ${{ github.workspace }}/.cache/uv + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4.2.1 + + - name: Install uv + uses: astral-sh/setup-uv@v3.1.7 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Set up Python + uses: actions/setup-python@v5.2.0 + with: + python-version-file: "pyproject.toml" + + - name: Restore uv cache + uses: actions/cache@v4.1.2 + with: + path: ${{ github.workspace }}/.cache/uv + key: uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + restore-keys: | + uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + uv-${{ runner.os }} + + - name: Install dependencies + run: uv sync --all-extras --dev + + #- name: Pyright typechecking + # uses: jakebailey/pyright-action@v2.3.2 + # with: + # version: 1.1.370 + # skip-unannotated: true + # level: error + + - name: Run tests and check coverage + run: | + uv run pytest --cov=. --cov-fail-under=0 + + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..1a9c0d6 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,27 @@ +# If you make changes to this file, you should run `uv run pre-commit install` to update the hooks +# Remember to update the version of the ruff-pre-commit repo when you update Ruff +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.5.7 + hooks: + - id: ruff + args: [--fix] + - id: ruff-format + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-added-large-files + args: ["--maxkb=5120"] + - id: check-json + - id: check-toml + types: [toml] + - id: check-yaml + types: [yaml] + - repo: https://github.com/Yelp/detect-secrets + rev: v1.3.0 + hooks: + - id: detect-secrets + - repo: https://github.com/astral-sh/uv-pre-commit + rev: 0.4.13 + hooks: + - id: uv-lock \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..e4fba21 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/.secrets.baseline b/.secrets.baseline new file mode 100644 index 0000000..aacbc00 --- /dev/null +++ b/.secrets.baseline @@ -0,0 +1,887 @@ +{ + "version": "1.5.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "GitLabTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "IPPublicDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "OpenAIDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "PypiTokenDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TelegramBotTokenDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + } + ], + "results": { + "alembic.ini": [ + { + "type": "Basic Auth Credentials", + "filename": "alembic.ini", + "hashed_secret": "9d4e1e23bd5b727046a9e3b4b7db57bd8d6ee684", + "is_verified": false, + "line_number": 63 + } + ], + "config/defaults.py": [ + { + "type": "Secret Keyword", + "filename": "config/defaults.py", + "hashed_secret": "bc215ab14587d1f8aa7f3aa72bb6023f0677cd15", + "is_verified": false, + "line_number": 1 + } + ], + "migrations/versions/04eaff9bcc55_.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/04eaff9bcc55_.py", + "hashed_secret": "edcd50e46e8cd392a7816918fa727d3ee63a58ad", + "is_verified": false, + "line_number": 13 + } + ], + "migrations/versions/1d1b10e054af_add_timezones.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/1d1b10e054af_add_timezones.py", + "hashed_secret": "e94b87952eb8e21a72f4fe135c9bb110a55f5ecf", + "is_verified": false, + "line_number": 13 + } + ], + "migrations/versions/495235ece5f0_ondeckdata_unique.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/495235ece5f0_ondeckdata_unique.py", + "hashed_secret": "f02b02580675a6ee15d986ff51e92a9992d2f5ae", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/495235ece5f0_ondeckdata_unique.py", + "hashed_secret": "9e588cfcced82ff3e58efd496efd2e50c469563e", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/58dd42108a22_new_vid_file_table.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/58dd42108a22_new_vid_file_table.py", + "hashed_secret": "21b59970bfb99e5db987f700206210e0d2cfc8ab", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/643148911953_deckhand_json_views.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/643148911953_deckhand_json_views.py", + "hashed_secret": "278170ebcf21a499ec11e92c899280300292cbde", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/677a2f2884e1_s3uploadstable.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/677a2f2884e1_s3uploadstable.py", + "hashed_secret": "278170ebcf21a499ec11e92c899280300292cbde", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/677a2f2884e1_s3uploadstable.py", + "hashed_secret": "68944d242b0aa55394002cb6ac9c8961e0349df1", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/81b92a299311_gps_data_types.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/81b92a299311_gps_data_types.py", + "hashed_secret": "83850ea8bec2c1950939f2c9a9a261281721092e", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/8304966281aa_reencode_files.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/8304966281aa_reencode_files.py", + "hashed_secret": "da4fb25a5f7ef49f63f81d7e54cda4e09423069a", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/8304966281aa_reencode_files.py", + "hashed_secret": "edb568a4ef4a885bba23fc128f2cdac1c9103606", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/97b633de0899_video_cam_name.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/97b633de0899_video_cam_name.py", + "hashed_secret": "68944d242b0aa55394002cb6ac9c8961e0349df1", + "is_verified": false, + "line_number": 13 + } + ], + "migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py", + "hashed_secret": "3f19ea2cd8d3412a16597ab61b3cbab3cd8868cc", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py", + "hashed_secret": "bba098be25f53f8d7747e27bdfb40b6966faa586", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/b78dce0f5492_ondeck_json_columns.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/b78dce0f5492_ondeck_json_columns.py", + "hashed_secret": "30fcaef0489cae57a74bb2168980479eb33ce802", + "is_verified": false, + "line_number": 13 + } + ], + "migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py", + "hashed_secret": "c4beb9b614ea7918fb259ceda3c135181abe124e", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py", + "hashed_secret": "f02b02580675a6ee15d986ff51e92a9992d2f5ae", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/bbe04841c70d_port_departures_view.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/bbe04841c70d_port_departures_view.py", + "hashed_secret": "bba098be25f53f8d7747e27bdfb40b6966faa586", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/bbe04841c70d_port_departures_view.py", + "hashed_secret": "30fcaef0489cae57a74bb2168980479eb33ce802", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/d974c1aea745_elog_gaps_score_update.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/d974c1aea745_elog_gaps_score_update.py", + "hashed_secret": "edb568a4ef4a885bba23fc128f2cdac1c9103606", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/d974c1aea745_elog_gaps_score_update.py", + "hashed_secret": "3f19ea2cd8d3412a16597ab61b3cbab3cd8868cc", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/e718ddd7c0bd_add_track_table.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/e718ddd7c0bd_add_track_table.py", + "hashed_secret": "a8d27c445f98eeed69ca91d9302527347487b119", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/ecb326942445_starttime_on_videos.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/ecb326942445_starttime_on_videos.py", + "hashed_secret": "83850ea8bec2c1950939f2c9a9a261281721092e", + "is_verified": false, + "line_number": 16 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/ecb326942445_starttime_on_videos.py", + "hashed_secret": "e94b87952eb8e21a72f4fe135c9bb110a55f5ecf", + "is_verified": false, + "line_number": 17 + } + ], + "migrations/versions/f48359cf7456_ondeckdata_status.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/f48359cf7456_ondeckdata_status.py", + "hashed_secret": "9e588cfcced82ff3e58efd496efd2e50c469563e", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/f48359cf7456_ondeckdata_status.py", + "hashed_secret": "da4fb25a5f7ef49f63f81d7e54cda4e09423069a", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/f835aa8c569a_second.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/f835aa8c569a_second.py", + "hashed_secret": "edcd50e46e8cd392a7816918fa727d3ee63a58ad", + "is_verified": false, + "line_number": 14 + } + ], + "migrations/versions/f9dbf07180af_test_from_to_columns.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/f9dbf07180af_test_from_to_columns.py", + "hashed_secret": "21b59970bfb99e5db987f700206210e0d2cfc8ab", + "is_verified": false, + "line_number": 13 + } + ], + "migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py", + "hashed_secret": "a8d27c445f98eeed69ca91d9302527347487b119", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py", + "hashed_secret": "c4beb9b614ea7918fb259ceda3c135181abe124e", + "is_verified": false, + "line_number": 14 + } + ], + "notebooks/catchcount_vector.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "6e72c8c8fc45f51c115938d7078c9d5e79830225", + "is_verified": false, + "line_number": 365 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "095810202740402a8cea243d250efe887cc6dae9", + "is_verified": false, + "line_number": 556 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "58e2dca616c5f17fb7cdf8d74b6d4c657608376b", + "is_verified": false, + "line_number": 577 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "584e8f5539a1d74909480fbdaa988e3082d41ae5", + "is_verified": false, + "line_number": 598 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "17148ae061c415257c8398fc38b7d7f8c49c584a", + "is_verified": false, + "line_number": 627 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "1de9111783cd83b277d2034d24771eadc2d67404", + "is_verified": false, + "line_number": 648 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/catchcount_vector.ipynb", + "hashed_secret": "d57f65b451ee0371c35b8f9701867c7042d130fc", + "is_verified": false, + "line_number": 1230 + } + ], + "notebooks/edge_integration_charts.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "notebooks/edge_integration_charts.ipynb", + "hashed_secret": "f7fbaf7011569a5a339688e1bfe2b95e3299e0c1", + "is_verified": false, + "line_number": 298 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/edge_integration_charts.ipynb", + "hashed_secret": "0907614fa75ec404b4bba40cd67022127215e7be", + "is_verified": false, + "line_number": 500 + } + ], + "notebooks/elog_analysis.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "87f5e22603f04b745a71889ba559d6f2d7f90830", + "is_verified": false, + "line_number": 463 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "919e229b6254ff72705b5e990e75c893f4ce54e5", + "is_verified": false, + "line_number": 502 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "d617009a36aa952c6002f5e3dca646783af9fa95", + "is_verified": false, + "line_number": 567 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "d4fed1888fc1dd55f045e82025cf88d10f61a918", + "is_verified": false, + "line_number": 882 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "4167c14d524731b8f71eb496bc4212702fa74609", + "is_verified": false, + "line_number": 998 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "846030697270c04c90ef85517fcdd4dd43102978", + "is_verified": false, + "line_number": 1177 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "5c1f08fe0f4f84ed2dfe235b522e22abe91d346a", + "is_verified": false, + "line_number": 1228 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/elog_analysis.ipynb", + "hashed_secret": "09870453d44a74a80a94495f5706c0f4e887bb5e", + "is_verified": false, + "line_number": 1335 + } + ], + "notebooks/key_event_detection.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "9b9910dfe333ad85f1c2a67e584b5de456909709", + "is_verified": false, + "line_number": 1097 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "077ac58eeb67e7642dbaa195ac3cea781cb77306", + "is_verified": false, + "line_number": 2003 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "449d7109d841ba5e65c6dbdb723f35f68781f4d7", + "is_verified": false, + "line_number": 2255 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "b9aef4410841aba3229cf98977d7bf4648c1515f", + "is_verified": false, + "line_number": 2358 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "be2630490eb242ded4f1d746049117d695b0b151", + "is_verified": false, + "line_number": 2768 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "6afdc1ab6ee99bf55783c9818f5e987811a10c7e", + "is_verified": false, + "line_number": 3420 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "e0773f843c3d474492e507e9e6a0de1a6f3c468a", + "is_verified": false, + "line_number": 4289 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "14f4fd2b70d0f0edfc6b1a794f61134c39d57d96", + "is_verified": false, + "line_number": 4388 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/key_event_detection.ipynb", + "hashed_secret": "460bad2fdcc37eeb77a89d90b8c7cb7a233e4b52", + "is_verified": false, + "line_number": 4461 + } + ], + "notebooks/timeseries_classifier_model.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "c0aeedb857dd962c1e8c6e51cf8d6c6fe977f929", + "is_verified": false, + "line_number": 413 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "9b9910dfe333ad85f1c2a67e584b5de456909709", + "is_verified": false, + "line_number": 849 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "cb3259f8449e9f8a6cf1b4ed1c960508ffc8ff7f", + "is_verified": false, + "line_number": 2041 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "11d965375d0f365fdce46ee82da03d537bf0384e", + "is_verified": false, + "line_number": 2441 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "c9ace057b37d97411e413860c17aa8b21c78f1ce", + "is_verified": false, + "line_number": 3441 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "a51bbe01cfd7d3e60dd4d7ba1412a8a1f1b1fff4", + "is_verified": false, + "line_number": 3767 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/timeseries_classifier_model.ipynb", + "hashed_secret": "f5e2fa6e718e913617e61dd2f268f82bc133087b", + "is_verified": false, + "line_number": 4114 + } + ], + "notebooks/tnc-edge-catch-plots.ipynb": [ + { + "type": "Secret Keyword", + "filename": "notebooks/tnc-edge-catch-plots.ipynb", + "hashed_secret": "804d7c63d224cbab0f382b5b1a62e7675f7a0934", + "is_verified": false, + "line_number": 31 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-catch-plots.ipynb", + "hashed_secret": "d9944db0f05d647bcfc9d73cea130144904e8db2", + "is_verified": false, + "line_number": 2345 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-catch-plots.ipynb", + "hashed_secret": "0579558fbf5dca1b09563144d134853f4cb58eaf", + "is_verified": false, + "line_number": 2516 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-catch-plots.ipynb", + "hashed_secret": "d225366edf450026474e772c9aa0513ebf9d5a9b", + "is_verified": false, + "line_number": 2676 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-catch-plots.ipynb", + "hashed_secret": "87c1e55893908ff55fa5956b6fac7b3bf57e4b65", + "is_verified": false, + "line_number": 2744 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-catch-plots.ipynb", + "hashed_secret": "eb0ffee5ba5361a551bc09d3e0d18f2ed4dc7da8", + "is_verified": false, + "line_number": 3448 + } + ], + "notebooks/tnc-edge-data-integration.ipynb": [ + { + "type": "Secret Keyword", + "filename": "notebooks/tnc-edge-data-integration.ipynb", + "hashed_secret": "76a4acaf31b815aa2c41cc2a2176b11fa9edf00a", + "is_verified": false, + "line_number": 38 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-data-integration.ipynb", + "hashed_secret": "22c276b7d6efcd5df7ea2306c9cead07cf703285", + "is_verified": false, + "line_number": 5392 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-data-integration.ipynb", + "hashed_secret": "51cc78c114dc7e1d1d54b3c7950aa3c198879001", + "is_verified": false, + "line_number": 5565 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-data-integration.ipynb", + "hashed_secret": "49a4fa0f41ebd7d7c88236a6ee37bbc81a545064", + "is_verified": false, + "line_number": 7389 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-data-integration.ipynb", + "hashed_secret": "5e80b45cfc01eda35a76562cccaa51e7ddbe1e06", + "is_verified": false, + "line_number": 8122 + } + ], + "notebooks/tnc-edge-gps-speed.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-gps-speed.ipynb", + "hashed_secret": "019921a0d5de0bff97ef09761e60c9340d218938", + "is_verified": false, + "line_number": 712 + } + ], + "notebooks/tnc-edge-network-uptime.ipynb": [ + { + "type": "Secret Keyword", + "filename": "notebooks/tnc-edge-network-uptime.ipynb", + "hashed_secret": "76a4acaf31b815aa2c41cc2a2176b11fa9edf00a", + "is_verified": false, + "line_number": 33 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-network-uptime.ipynb", + "hashed_secret": "22e5539009c13fa6cee481a440fc764e31f811f9", + "is_verified": false, + "line_number": 113 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-network-uptime.ipynb", + "hashed_secret": "133725c865648fcf63467f06f7da1eaeda776f64", + "is_verified": false, + "line_number": 1776 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-network-uptime.ipynb", + "hashed_secret": "332082a0665d4e2373983b7b35276f8e41334792", + "is_verified": false, + "line_number": 2609 + } + ], + "notebooks/tnc-edge-system-uptime.ipynb": [ + { + "type": "Secret Keyword", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "804d7c63d224cbab0f382b5b1a62e7675f7a0934", + "is_verified": false, + "line_number": 47 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "9c5e638b8af880492003fe2a121dc6c2b57d7709", + "is_verified": false, + "line_number": 1828 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "f2840e0b7c60253fe53c6e3822148057e986af5c", + "is_verified": false, + "line_number": 2347 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "b631b1dd6e18cd993d71ebf9b9ba40b202a1d755", + "is_verified": false, + "line_number": 2409 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "22276d25a132f5b3d039df99adb3b0a528477873", + "is_verified": false, + "line_number": 2474 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "c297050440b58e810a8c7c3936c7ba899692564d", + "is_verified": false, + "line_number": 2922 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "9ec376cb6ffc0ce1c0b3481376e055bdbfea6cd2", + "is_verified": false, + "line_number": 2994 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "5de64ac5d586ad10664f89dfa5d277cecf9b91a2", + "is_verified": false, + "line_number": 3063 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "7a7f65ee267fbb297df4e05a3b40d2110bf3b682", + "is_verified": false, + "line_number": 3135 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-system-uptime.ipynb", + "hashed_secret": "154a48274bdf377b1c96dfe7b58597ba24cb3aab", + "is_verified": false, + "line_number": 3252 + } + ], + "notebooks/tnc-edge-vectorprocessing.ipynb": [ + { + "type": "Secret Keyword", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "76a4acaf31b815aa2c41cc2a2176b11fa9edf00a", + "is_verified": false, + "line_number": 35 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "ba7d36da62b997e66b675ef7ca027a8e71850a1e", + "is_verified": false, + "line_number": 1596 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "f270307ddf425fe0b046ed8852edfa0acecf143a", + "is_verified": false, + "line_number": 1637 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "c09876d29194619d4db61f5de003fee556edfa1b", + "is_verified": false, + "line_number": 1686 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "fbf6acd40bcfa992a059f842d71369c3d9beeda5", + "is_verified": false, + "line_number": 5792 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "1b7aa01040516a5537daf12f9928346d38e5a6c1", + "is_verified": false, + "line_number": 9835 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "b386f6dd00d89c497446c1bef09a348fb92f77f4", + "is_verified": false, + "line_number": 13611 + }, + { + "type": "Base64 High Entropy String", + "filename": "notebooks/tnc-edge-vectorprocessing.ipynb", + "hashed_secret": "3100ac16f40b132e8b03f7df5eeab23487d863d8", + "is_verified": false, + "line_number": 17650 + } + ] + }, + "generated_at": "2024-10-23T12:29:55Z" +} diff --git a/api/README.md b/api/README.md index 59ec602..83b2bdd 100644 --- a/api/README.md +++ b/api/README.md @@ -4,7 +4,8 @@ This folder contains all HTTP-api code. The Flask App `edge_http.py` will import Relevant code in `edge_http.py`: -``` +```python from api import deckhand + app.register_blueprint(deckhand, url_prefix='/deckhand') ``` diff --git a/api/__init__.py b/api/__init__.py index 4e6663d..b151603 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -1,3 +1,3 @@ - from .deckhand import blueprint as deckhand +__all__ = ["deckhand"] diff --git a/api/deckhand.py b/api/deckhand.py index 1ab5c4b..89a91a3 100644 --- a/api/deckhand.py +++ b/api/deckhand.py @@ -1,23 +1,15 @@ -from http.client import BAD_REQUEST -from flask import Blueprint, make_response, request, g - - -from sqlalchemy.orm import scoped_session, sessionmaker - -from model import DeckhandEventRaw - import json -from db import db +from flask import Blueprint, Response, make_response, request -blueprint = Blueprint('DeckhandApi', __name__) +from db import db +from model import DeckhandEventRaw +blueprint = Blueprint("DeckhandApi", __name__) -# ORM Session -# orm_session = scoped_session(sessionmaker()) -@blueprint.route('/', methods=['PUT', 'POST']) -def event(): +@blueprint.route("/", methods=["PUT", "POST"]) +def event() -> Response: d = request.get_json() event = DeckhandEventRaw() @@ -25,7 +17,4 @@ def event(): db.session.add(event) db.session.commit() - # for i in r: - # print(i); - - return make_response(('', 200)) + return make_response(("", 200)) diff --git a/config/README.md b/config/README.md index 7ed11d6..ffa86ba 100644 --- a/config/README.md +++ b/config/README.md @@ -1,31 +1,32 @@ # About config -This folder contains environment files. One environment per boat. +This folder contains environment files. One environment per boat. -_Boat specific config files are not checked into git._ This is a precautionary procedure to prevent overwriting configs in the production environment. Boat specific config filenames are listed in `.gitignore`. +_Boat specific config files are not checked into git._ This is a precautionary procedure to prevent overwriting configs in the production environment. Boat specific config filenames are listed in `.gitignore`. -# Using Configs +## Using Configs Create a new config file. It is a good idea to copy the contents from `defaults.py`. Export the filename (with the `config/` prefix) into an environmnent variable named `ENVIRONMENT` -``` -$ export ENVIRONMENT='config/queen_mary.py' +```bash +export ENVIRONMENT='config/queen_mary.py' ``` All code in this repository should pick up both the defaults and the the referenced `ENVIRONMENT` file for config values. Example python code: -``` +```python +import click + from flask.config import Config as FlaskConfig + flaskconfig = FlaskConfig(root_path='') flaskconfig.from_object('config.defaults') if 'ENVIRONMENT' in os.environ: flaskconfig.from_envvar('ENVIRONMENT') -import click - @click.command() @click.option('--dbname', default=flaskconfig.get('DBNAME')) @click.option('--dbuser', default=flaskconfig.get('DBUSER')) @@ -33,11 +34,11 @@ def main(dbname, dbuser): pass ``` -# Boat specific examples: +## Boat specific examples ### brancol.py -``` +```python DEBUG=False SECRET_KEY='' THALOS_VIDEO_DIR="/thalos/brancol/videos" @@ -50,7 +51,7 @@ BOAT_NAME='brancol' ### stpatrick.py -``` +```python DEBUG=False THALOS_VIDEO_DIR="/thalos/saintpatrick/videos" THALOS_CAM_NAME='cam1' diff --git a/config/defaults.py b/config/defaults.py index 5477e5e..0fa33a7 100644 --- a/config/defaults.py +++ b/config/defaults.py @@ -1,11 +1,13 @@ -SECRET_KEY='not_so_secret' -DEBUG=True -DBUSER="edge" -DBNAME="edge" -THALOS_VIDEO_DIR="/thalos/videos" -THALOS_CAM_NAME='cam1' -VIDEO_OUTPUT_DIR='/videos' -VIDEO_PASSPHRASE_FILE='/dev/null' -THALOS_VIDEO_SUFFIX='.avi.done' -BOAT_NAME='' -DB_TABLES_VERSION='v1' +import os + +SECRET_KEY = os.getenv("SECRET_KEY", "") +DEBUG = os.getenv("DEBUG", "true") +DBUSER = os.getenv("DBUSER", "edge") +DBNAME = os.getenv("DBNAME", "edge") +THALOS_VIDEO_DIR = os.getenv("THALOS_VIDEO_DIR", "/thalos/videos") +THALOS_CAM_NAME = os.getenv("THALOS_CAM_NAME", "cam1") +VIDEO_OUTPUT_DIR = os.getenv("VIDEO_OUTPUT_DIR", "/videos") +VIDEO_PASSPHRASE_FILE = os.getenv("VIDEO_PASSPHRASE_FILE", "/dev/null") +THALOS_VIDEO_SUFFIX = os.getenv("THALOS_VIDEO_SUFFIX", ".avi.done") +BOAT_NAME = os.getenv("BOAT_NAME", "") +DB_TABLES_VERSION = os.getenv("DB_TABLES_VERSION", "v1") diff --git a/db.py b/db.py index 21631f6..f0b13d6 100644 --- a/db.py +++ b/db.py @@ -1,4 +1,3 @@ - from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy() diff --git a/dbdumps/create_dump.sh b/dbdumps/create_dump.sh index b718390..efab64c 100644 --- a/dbdumps/create_dump.sh +++ b/dbdumps/create_dump.sh @@ -1,32 +1,29 @@ +#!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" function help { - echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] " - exit 1 + echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] " + exit 1 } DBNAME=edge DBUSER=edge -while (( "$#" )); do +while (("$#")); do case $1 in - --dbuser) - shift && DBUSER="$1" || help - ;; - --dbname) - shift && DBNAME="$1" || help - ;; - *) - help - ;; + --dbuser) + shift && DBUSER="$1" || help + ;; + --dbname) + shift && DBNAME="$1" || help + ;; + *) + help + ;; esac shift done - -pg_dump --clean -U "$DBUSER" "$DBNAME" > "$SCRIPTDIR/$(date -u -Iseconds | cut -f1 -d + )Z.pgdump" - - - +pg_dump --clean -U "$DBUSER" "$DBNAME" >"$SCRIPTDIR/$(date -u -Iseconds | cut -f1 -d +)Z.pgdump" diff --git a/dbdumps/reset_schema.sh b/dbdumps/reset_schema.sh index 935142a..6839045 100644 --- a/dbdumps/reset_schema.sh +++ b/dbdumps/reset_schema.sh @@ -1,37 +1,37 @@ +#!/bin/bash -SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" - +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" DBNAME=edge DBUSER=edge -while (( "$#" )); do +while (("$#")); do case $1 in - --dbuser) - shift && DBUSER="$1" || help - ;; - --dbname) - shift && DBNAME="$1" || help - ;; - *) - help - ;; + --dbuser) + shift && DBUSER="$1" || help + ;; + --dbname) + shift && DBNAME="$1" || help + ;; + *) + help + ;; esac shift done +cd "$SCRIPTDIR/.." || { + echo "Failed to cd to project root" + return +} -cd "$SCRIPTDIR/.." - -if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ] ; then - if [ "x$VIRTUAL_ENV" != "x" ] ; then - deactivate - fi - source venv/bin/activate +if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ]; then + if [ "x$VIRTUAL_ENV" != "x" ]; then + deactivate + fi + source venv/bin/activate fi - python -c 'from sqlalchemy import create_engine; \ from model import Base; \ engine = create_engine("postgresql+psycopg2://'$DBUSER'@/'$DBNAME'", echo=True); \ diff --git a/dbdumps/restore_dump.sh b/dbdumps/restore_dump.sh index 093b651..286546d 100644 --- a/dbdumps/restore_dump.sh +++ b/dbdumps/restore_dump.sh @@ -1,11 +1,12 @@ +#!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" function help { - echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] [DUMPFILE]" - echo " DBDUMP_FILENAME defaults to the latest dumpfile, sorted by filename" - exit 1 + echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] [DUMPFILE]" + echo " DBDUMP_FILENAME defaults to the latest dumpfile, sorted by filename" + exit 1 } DBNAME=edge @@ -13,28 +14,24 @@ DBUSER=edge DUMPFILE="$(ls $SCRIPTDIR/*.pgdump | sort | tail -n 1)" -while (( "$#" )); do +while (("$#")); do case $1 in - --dbuser) - shift && DBUSER="$1" || help - ;; - --dbname) - shift && DBNAME="$1" || help - ;; - *) - if [ -e "$1" ] ; then - DUMPFILE="$1" - else - echo "file does not exist" - exit 1 - fi - ;; + --dbuser) + shift && DBUSER="$1" || help + ;; + --dbname) + shift && DBNAME="$1" || help + ;; + *) + if [ -e "$1" ]; then + DUMPFILE="$1" + else + echo "file does not exist" + exit 1 + fi + ;; esac shift done - -psql -U "$DBUSER" "$DBNAME" < $DUMPFILE - - - +psql -U "$DBUSER" "$DBNAME" <$DUMPFILE diff --git a/dbdumps/seed_db.py b/dbdumps/seed_db.py index 84a0e02..0db1944 100644 --- a/dbdumps/seed_db.py +++ b/dbdumps/seed_db.py @@ -1,27 +1,10 @@ -# from flask import Flask -# from flask_admin import Admin import click - from sqlalchemy import create_engine, select from sqlalchemy.orm import Session, sessionmaker -import os - -from model import Base, RiskVector, Test, T - -import sqlite3 +from model import Base, FishAiData, GpsData, InternetData, RiskVector, Test from model.internetdata import InternetData -from model import FishAiData, InternetData, GpsData - -# app = Flask(__name__) -# app.config.from_object('config.defaults') - -# if 'ENVIRONMENT' in os.environ: -# app.config.from_envvar('ENVIRONMENT') - -# set optional bootswatch theme -# app.config['FLASK_ADMIN_SWATCH'] = 'cerulean' def clear_db(session: Session): result = session.execute(select(Test)) @@ -48,22 +31,24 @@ def clear_db(session: Session): @click.command() -@click.option('--cleardb', default=False, is_flag=True) -@click.option('--dbname', default="edge") -@click.option('--dbuser', default="edge") -@click.option('--force', default=False, is_flag=True) +@click.option("--cleardb", default=False, is_flag=True) +@click.option("--dbname", default="edge") +@click.option("--dbuser", default="edge") +@click.option("--force", default=False, is_flag=True) def cli(cleardb, dbname, dbuser, force): - - if not force : + if not force: import sys + print("This script is deprecated! run `venv/bin/alembic upgrade head` instead.") print("if you really want to run this script, rerun with --force") sys.exit(1) - + # engine = create_engine("sqlite:///db.db", echo=True) - engine = create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + engine = create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) + SessionMaker = sessionmaker(engine) session = SessionMaker() + if cleardb: clear_db(session) Base.metadata.drop_all(engine) diff --git a/edge_http.py b/edge_http.py index 9ca5f67..1e1dfff 100644 --- a/edge_http.py +++ b/edge_http.py @@ -1,58 +1,77 @@ -import click +import os -from flask import Flask, g +import click +from flask import Flask from flask_admin import Admin from flask_admin.contrib.sqla import ModelView - from sqlalchemy import text -from sqlalchemy.orm import scoped_session, sessionmaker -import os +from api import deckhand from db import db +from model import ( + AifishData, + BoatSchedule, + DeckhandEventRaw, + DeckhandEventView, + GpsData, + InternetDataView, + OndeckData, + RiskVectorModelView, + TestModelView, +) app = Flask(__name__) -app.config.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - app.config.from_envvar('ENVIRONMENT') +app.config.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + app.config.from_envvar("ENVIRONMENT") -# set optional bootswatch theme -app.config['FLASK_ADMIN_SWATCH'] = 'cerulean' +# Set optional bootswatch theme +app.config["FLASK_ADMIN_SWATCH"] = "cerulean" -app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql+psycopg2://%s@/%s"%(app.config['DBUSER'], app.config['DBNAME']) +app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql+psycopg2://{}@/{}".format( + app.config["DBUSER"], + app.config["DBNAME"], +) # engine = create_engine("postgresql+psycopg2://%s@/%s"%(app.config['DBUSER'], app.config['DBNAME']), echo=True) - # SessionMaker = scoped_session(sessionmaker(bind=engine)) db.init_app(app) -from model import * -from vector import GpsVector with app.app_context(): # Base.metadata.create_all(engine) db.metadata.create_all(db.engine) - + from alembic import command, config + cfg = config.Config("alembic.ini") command.upgrade(cfg, "head") with db.engine.begin() as connection: - connection.execute(text("SELECT setval('aifishdata_id_seq', (SELECT MAX(id) FROM aifishdata));")) - connection.execute(text("SELECT setval('boatschedules_id_seq', (SELECT MAX(id) FROM boatschedules));")) - connection.execute(text("SELECT setval('deckhandevents_id_seq', (SELECT MAX(id) FROM deckhandevents));")) - connection.execute(text("SELECT setval('internetdata_id_seq', (SELECT MAX(id) FROM internetdata));")) - connection.execute(text("SELECT setval('ondeckdata_id_seq', (SELECT MAX(id) FROM ondeckdata));")) + connection.execute( + text("SELECT setval('aifishdata_id_seq', (SELECT MAX(id) FROM aifishdata));") + ) + connection.execute( + text("SELECT setval('boatschedules_id_seq', (SELECT MAX(id) FROM boatschedules));") + ) + connection.execute( + text("SELECT setval('deckhandevents_id_seq', (SELECT MAX(id) FROM deckhandevents));") + ) + connection.execute( + text("SELECT setval('internetdata_id_seq', (SELECT MAX(id) FROM internetdata));") + ) + connection.execute( + text("SELECT setval('ondeckdata_id_seq', (SELECT MAX(id) FROM ondeckdata));") + ) connection.execute(text("SELECT setval('tests_id_seq', (SELECT MAX(id) FROM tests));")) connection.execute(text("SELECT setval('tracks_id_seq', (SELECT MAX(id) FROM tracks));")) connection.execute(text("SELECT setval('vectors_id_seq', (SELECT MAX(id) FROM vectors));")) -from api import deckhand -app.register_blueprint(deckhand, url_prefix='/deckhand') - +app.register_blueprint(deckhand, url_prefix="/deckhand") -admin = Admin(app, name='Risk Assesment', template_mode='bootstrap3') +admin = Admin(app, name="Risk Assesment", template_mode="bootstrap3") # work with session admin.add_view(RiskVectorModelView(db.session)) @@ -63,14 +82,14 @@ admin.add_view(InternetDataView(db.session)) admin.add_view(ModelView(DeckhandEventRaw, db.session)) admin.add_view(ModelView(DeckhandEventView, db.session)) - admin.add_view(ModelView(BoatSchedule, db.session)) @click.command() -@click.option('--port', default=50000) -def serve(port): +@click.option("--port", default=50000) +def serve(port: int | str) -> None: app.run(host="0.0.0.0", port=port) -if __name__ == '__main__': + +if __name__ == "__main__": serve() diff --git a/gps_fetch.py b/gps_fetch.py index 6ac9c1d..bb08a40 100644 --- a/gps_fetch.py +++ b/gps_fetch.py @@ -1,41 +1,39 @@ - -from datetime import datetime,timezone -from dateutil.parser import isoparse -import click -import codecs import os +import re +import time +from datetime import UTC, datetime from pathlib import Path + +import click import psycopg2 -from psycopg2.pool import SimpleConnectionPool -import re import schedule -import subprocess -import time +from dateutil.parser import isoparse +from flask.config import Config as FlaskConfig +from psycopg2.pool import SimpleConnectionPool +flaskconfig = FlaskConfig(root_path="") -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def thalos_gps_filename_date(filename: str) -> datetime: - m = re.match('.*(\d{8}).?(\d{6})\.txt', filename) + m = re.match(r".*(\d{8}).?(\d{6})\.txt", filename) if not m: return None return isoparse(m[1] + " " + m[2] + "+00:00") -def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): +def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): conn: psycopg2.connection = cpool.getconn() gps_files = [x for x in thalos_dir.iterdir()] dt_index = {} for gps_file in gps_files: - m = re.match('.*(\d{8}).?(\d{6})\.txt', gps_file.name) + m = re.match(r".*(\d{8}).?(\d{6})\.txt", gps_file.name) if not m: continue - dt = datetime.strptime(m[1] + " " + m[2] + "Z", '%Y%m%d %H%M%S%z') + dt = datetime.strptime(m[1] + " " + m[2] + "Z", "%Y%m%d %H%M%S%z") dt_index[dt] = gps_file new_dts = [] @@ -44,37 +42,46 @@ def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): if len(dt_index.keys()) > 0: try: with conn.cursor() as cur: - args = ','.join( - cur.mogrify("(%s)", [dt]).decode('utf-8') - for dt in dt_index.keys() - ) - cur.execute("""WITH t (file_dt) AS ( VALUES """ + args + """ ) - SELECT t.file_dt FROM t + args = ",".join(cur.mogrify("(%s)", [dt]).decode("utf-8") for dt in dt_index) + cur.execute( + """WITH t (file_dt) AS ( VALUES """ + + args + + """ ) + SELECT t.file_dt FROM t LEFT JOIN gpsdata ON t.file_dt = gpsdata.gps_datetime - WHERE gpsdata.gps_datetime IS NULL;""") + WHERE gpsdata.gps_datetime IS NULL;""" + ) # print(cur.query) # print(cur.description) rows = cur.fetchall() new_dts.extend(col for cols in rows for col in cols) - insert_tuples=[] + insert_tuples = [] for new_dt in new_dts: - new_file: Path = dt_index[new_dt.astimezone(timezone.utc)] + new_file: Path = dt_index[new_dt.astimezone(UTC)] with new_file.open() as data: line = data.readline() - m = re.match('([+-]?(\d+(\.\d*)?|\.\d+)).*,.*?([+-]?(\d+(\.\d*)?|\.\d+))', line) + m = re.match( + r"([+-]?(\d+(\.\d*)?|\.\d+)).*,.*?([+-]?(\d+(\.\d*)?|\.\d+))", line + ) if m: lat = m[1] lon = m[4] - insert_tuples.append((new_dt, lat, lon,)) + insert_tuples.append( + ( + new_dt, + lat, + lon, + ) + ) if len(insert_tuples) > 0: - click.echo('inserting {} new gps coords'.format(len(insert_tuples))) + click.echo(f"inserting {len(insert_tuples)} new gps coords") with conn.cursor() as cur: cur.executemany( "INSERT INTO gpsdata (gps_datetime, lat, lon) VALUES (%s, %s, %s);", - insert_tuples + insert_tuples, ) # print(cur.query) conn.commit() @@ -83,22 +90,20 @@ def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--thalos_gps_dir', default=flaskconfig.get('THALOS_GPS_DIR')) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--thalos_gps_dir", default=flaskconfig.get("THALOS_GPS_DIR")) def main(dbname, dbuser, thalos_gps_dir): - thalos_gps_dir = Path(thalos_gps_dir) cpool = SimpleConnectionPool(1, 1, database=dbname, user=dbuser) - - def runonce(cpool, thalos_gps_dir ): + + def runonce(cpool, thalos_gps_dir): gps_fetch(cpool, thalos_gps_dir) return schedule.CancelJob - schedule.every(1).seconds.do(runonce, cpool, thalos_gps_dir ) - schedule.every(15).minutes.do(gps_fetch, cpool, thalos_gps_dir ) - + schedule.every(1).seconds.do(runonce, cpool, thalos_gps_dir) + schedule.every(15).minutes.do(gps_fetch, cpool, thalos_gps_dir) while 1: n = schedule.idle_seconds() @@ -108,9 +113,10 @@ def runonce(cpool, thalos_gps_dir ): break elif n > 0: # sleep exactly the right amount of time - click.echo("sleeping for: {}".format(n)) + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/migrations/README.md b/migrations/README.md index 35db6ca..6ee9830 100644 --- a/migrations/README.md +++ b/migrations/README.md @@ -2,15 +2,15 @@ This folder is reserved for Alembic database migration scripts and configs. -# Using Alembic +## Using Alembic Alembic is installed as one of the pip requirements.txt packages. Using a venv is suggested. Exporting the `ENVIRONMENT` config is required. -The `alembic` cli command provides a suite of tools to manage database migrations. +The `alembic` cli command provides a suite of tools to manage database migrations. Alembic migration scripts can be run forwards or backwards with `upgrade` and `downgrade` -``` +```bash (venv) $ export ENVIRONMENT="config/queen_mary.py" (venv) $ alembic downgrade ba08d4e11cc7 INFO [alembic.runtime.migration] Context impl PostgresqlImpl. @@ -22,13 +22,12 @@ INFO [alembic.runtime.migration] Context impl PostgresqlImpl. INFO [alembic.runtime.migration] Will assume transactional DDL. INFO [alembic.runtime.migration] Running upgrade ba08d4e11cc7 -> fdfd9e708602, add_elog_timegap_vector_row INFO [alembic.runtime.migration] Running upgrade fdfd9e708602 -> e718ddd7c0bd, add_track_table -(venv) $ +(venv) $ ``` Alembic provides a tool that auto-generates new migration scripts from detected differences between the db schema and the python db model classes. - -``` +```bash (venv) $ export ENVIRONMENT="config/queen_mary.py" (venv) $ alembic revision --autogenerate -m new_migration_filename ``` diff --git a/migrations/env.py b/migrations/env.py index 96bc046..0335173 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,9 +1,11 @@ +import os from logging.config import fileConfig -from sqlalchemy import engine_from_config -from sqlalchemy import pool - from alembic import context +from flask.config import Config as FlaskConfig +from sqlalchemy import engine_from_config, pool + +from model import Base # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -18,7 +20,8 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from model import Base + + target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, @@ -26,15 +29,14 @@ # my_important_option = config.get_main_option("my_important_option") # ... etc. -import os -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -url = "postgresql+psycopg2://%s@/%s"%(flaskconfig['DBUSER'], flaskconfig['DBNAME']) +url = "postgresql+psycopg2://{}@/{}".format(flaskconfig["DBUSER"], flaskconfig["DBNAME"]) def run_migrations_offline() -> None: @@ -69,7 +71,7 @@ def run_migrations_online() -> None: """ alembicconfig = config.get_section(config.config_ini_section, {}) - alembicconfig['sqlalchemy.url'] = url + alembicconfig["sqlalchemy.url"] = url connectable = engine_from_config( alembicconfig, prefix="sqlalchemy.", @@ -77,9 +79,7 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/migrations/versions/04eaff9bcc55_.py b/migrations/versions/04eaff9bcc55_.py index d961078..c631e88 100644 --- a/migrations/versions/04eaff9bcc55_.py +++ b/migrations/versions/04eaff9bcc55_.py @@ -1,16 +1,13 @@ """first Revision ID: 04eaff9bcc55 -Revises: +Revises: Create Date: 2023-04-10 12:56:26.377798 """ -from alembic import op -import sqlalchemy as sa - # revision identifiers, used by Alembic. -revision = '04eaff9bcc55' +revision = "04eaff9bcc55" down_revision = None branch_labels = None depends_on = None diff --git a/migrations/versions/17911f3ffb3b_new_vector_rows_1.py b/migrations/versions/17911f3ffb3b_new_vector_rows_1.py index 2706e8f..257e39d 100644 --- a/migrations/versions/17911f3ffb3b_new_vector_rows_1.py +++ b/migrations/versions/17911f3ffb3b_new_vector_rows_1.py @@ -5,13 +5,13 @@ Create Date: 2023-06-02 14:22:38.910122 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '17911f3ffb3b' -down_revision = 'f835aa8c569a' +revision = "17911f3ffb3b" +down_revision = "f835aa8c569a" branch_labels = None depends_on = None @@ -22,18 +22,17 @@ def upgrade() -> None: for row in op.get_bind().execute("select id, name from vectors where name = 'InternetVector';"): if row: found_id = row[0] - + if found_id is None: - op.get_bind().execute('insert into vectors (name, configblob) values (\'InternetVector\', \'{"target_ips":["8.8.8.8","1.1.1.1","208.67.222.222","9.9.9.9"],"run_traceroute":false}\');') - - + op.get_bind().execute( + 'insert into vectors (name, configblob) values (\'InternetVector\', \'{"target_ips":["8.8.8.8","1.1.1.1","208.67.222.222","9.9.9.9"],"run_traceroute":false}\');' + ) def downgrade() -> None: - - op.get_bind().execute("delete from tests where vector_id = (select id from vectors where name = 'InternetVector');"); + op.get_bind().execute( + "delete from tests where vector_id = (select id from vectors where name = 'InternetVector');" + ) - t = sa.table('vectors') + t = sa.table("vectors") op.get_bind().execute("delete from vectors where name = 'InternetVector';") - - diff --git a/migrations/versions/1d1b10e054af_add_timezones.py b/migrations/versions/1d1b10e054af_add_timezones.py index f67e3eb..07fe932 100644 --- a/migrations/versions/1d1b10e054af_add_timezones.py +++ b/migrations/versions/1d1b10e054af_add_timezones.py @@ -5,26 +5,25 @@ Create Date: 2023-07-12 14:59:24.746444 """ -from alembic import op -import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '1d1b10e054af' -down_revision = '58dd42108a22' +revision = "1d1b10e054af" +down_revision = "58dd42108a22" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp with time zone;') + op.execute("ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp with time zone;") # op.execute('ALTER TABLE tests ALTER COLUMN datetime_from TYPE timestamp with time zone;') # op.execute('ALTER TABLE tests ALTER COLUMN datetime_to TYPE timestamp with time zone;') # op.execute('ALTER TABLE video_files ALTER COLUMN last_modified TYPE timestamp with time zone;') @@ -35,11 +34,11 @@ def upgrade() -> None: def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp without time zone;') + op.execute("ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp without time zone;") # ### end Alembic commands ### diff --git a/migrations/versions/47ff3fca73a4_vector_schedulestring.py b/migrations/versions/47ff3fca73a4_vector_schedulestring.py index 85c89ee..cf6679f 100644 --- a/migrations/versions/47ff3fca73a4_vector_schedulestring.py +++ b/migrations/versions/47ff3fca73a4_vector_schedulestring.py @@ -5,24 +5,24 @@ Create Date: 2023-06-06 13:07:09.704169 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '47ff3fca73a4' -down_revision = '5e4898954923' +revision = "47ff3fca73a4" +down_revision = "5e4898954923" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column('vectors', sa.Column('schedule_string', sa.String(), nullable=True)) + op.add_column("vectors", sa.Column("schedule_string", sa.String(), nullable=True)) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('vectors', 'schedule_string') + op.drop_column("vectors", "schedule_string") # ### end Alembic commands ### diff --git a/migrations/versions/495235ece5f0_ondeckdata_unique.py b/migrations/versions/495235ece5f0_ondeckdata_unique.py index 73e11c3..2cb9fe4 100644 --- a/migrations/versions/495235ece5f0_ondeckdata_unique.py +++ b/migrations/versions/495235ece5f0_ondeckdata_unique.py @@ -5,19 +5,17 @@ Create Date: 2023-10-10 15:43:07.752816 """ + from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = '495235ece5f0' -down_revision = 'f48359cf7456' +revision = "495235ece5f0" +down_revision = "f48359cf7456" branch_labels = None depends_on = None def upgrade() -> None: - op.execute(""" with duped as ( select distinct on (video_uri) video_uri, id @@ -28,9 +26,8 @@ def upgrade() -> None: where video_uri in (select video_uri from duped) and id not in (select id from duped) ;""") - op.create_unique_constraint(None, 'ondeckdata', ['video_uri']) - + op.create_unique_constraint(None, "ondeckdata", ["video_uri"]) def downgrade() -> None: - op.drop_constraint(None, 'ondeckdata', type_='unique') + op.drop_constraint(None, "ondeckdata", type_="unique") diff --git a/migrations/versions/58dd42108a22_new_vid_file_table.py b/migrations/versions/58dd42108a22_new_vid_file_table.py index 3fd7259..9c68cdb 100644 --- a/migrations/versions/58dd42108a22_new_vid_file_table.py +++ b/migrations/versions/58dd42108a22_new_vid_file_table.py @@ -5,29 +5,30 @@ Create Date: 2023-06-16 18:15:23.314916 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '58dd42108a22' -down_revision = 'f9dbf07180af' +revision = "58dd42108a22" +down_revision = "f9dbf07180af" branch_labels = None depends_on = None def upgrade() -> None: - op.create_table('video_files', - sa.Column('original_path', sa.String(), nullable=False), - sa.Column('last_modified', sa.DateTime(timezone=True), nullable=False), - sa.Column('decrypted_path', sa.String(), nullable=True), - sa.Column('decrypted_datetime', sa.DateTime(timezone=True), nullable=True), - sa.Column('stdout', sa.String(), nullable=True), - sa.Column('stderr', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('original_path') + op.create_table( + "video_files", + sa.Column("original_path", sa.String(), nullable=False), + sa.Column("last_modified", sa.DateTime(timezone=True), nullable=False), + sa.Column("decrypted_path", sa.String(), nullable=True), + sa.Column("decrypted_datetime", sa.DateTime(timezone=True), nullable=True), + sa.Column("stdout", sa.String(), nullable=True), + sa.Column("stderr", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("original_path"), ) pass def downgrade() -> None: - op.drop_table('video_files') + op.drop_table("video_files") diff --git a/migrations/versions/5e4898954923_ondeckdata_table.py b/migrations/versions/5e4898954923_ondeckdata_table.py index c65f96e..978cebb 100644 --- a/migrations/versions/5e4898954923_ondeckdata_table.py +++ b/migrations/versions/5e4898954923_ondeckdata_table.py @@ -5,30 +5,33 @@ Create Date: 2023-06-05 14:09:26.594081 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '5e4898954923' -down_revision = '17911f3ffb3b' +revision = "5e4898954923" +down_revision = "17911f3ffb3b" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('ondeckdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('video_uri', sa.String(), nullable=True), - sa.Column('cocoannotations_uri', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "ondeckdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("video_uri", sa.String(), nullable=True), + sa.Column("cocoannotations_uri", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('ondeckdata') + op.drop_table("ondeckdata") # ### end Alembic commands ### diff --git a/migrations/versions/5fdb864a1bbb_refactor_aifish.py b/migrations/versions/5fdb864a1bbb_refactor_aifish.py index eba3093..7d36048 100644 --- a/migrations/versions/5fdb864a1bbb_refactor_aifish.py +++ b/migrations/versions/5fdb864a1bbb_refactor_aifish.py @@ -5,43 +5,58 @@ Create Date: 2023-12-12 12:43:34.309532 """ -from alembic import op + import sqlalchemy as sa +from alembic import op from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = '5fdb864a1bbb' -down_revision = 'e718ddd7c0bd' +revision = "5fdb864a1bbb" +down_revision = "e718ddd7c0bd" branch_labels = None depends_on = None def upgrade() -> None: - - op.create_table('aifishdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('video_uri', sa.String(), nullable=True), - sa.Column('processing_uri', sa.String(), nullable=True), - sa.Column('output_uri', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.Column('count', sa.Integer(), nullable=True), - sa.Column('runtimems', sa.REAL(), nullable=True), - sa.Column('detection_confidence', sa.REAL(), nullable=True), - sa.Column('status', sa.String(), nullable=True), - sa.ForeignKeyConstraint(['video_uri'], ['video_files.decrypted_path'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('video_uri') + op.create_table( + "aifishdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("video_uri", sa.String(), nullable=True), + sa.Column("processing_uri", sa.String(), nullable=True), + sa.Column("output_uri", sa.String(), nullable=True), + sa.Column( + "datetime", + sa.DateTime(timezone=True), + server_default=sa.text("CURRENT_TIMESTAMP"), + nullable=True, + ), + sa.Column("count", sa.Integer(), nullable=True), + sa.Column("runtimems", sa.REAL(), nullable=True), + sa.Column("detection_confidence", sa.REAL(), nullable=True), + sa.Column("status", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["video_uri"], + ["video_files.decrypted_path"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("video_uri"), ) - op.drop_table('fishaidata') - + op.drop_table("fishaidata") def downgrade() -> None: - op.create_table('fishaidata', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('video_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('cocoannotations_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('datetime', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='fishaidata_pkey') + op.create_table( + "fishaidata", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("video_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("cocoannotations_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "datetime", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("CURRENT_TIMESTAMP"), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="fishaidata_pkey"), ) - op.drop_table('aifishdata') + op.drop_table("aifishdata") diff --git a/migrations/versions/643148911953_deckhand_json_views.py b/migrations/versions/643148911953_deckhand_json_views.py index d6bea48..7df0cfb 100644 --- a/migrations/versions/643148911953_deckhand_json_views.py +++ b/migrations/versions/643148911953_deckhand_json_views.py @@ -5,13 +5,12 @@ Create Date: 2023-08-16 11:38:18.120705 """ -from alembic import op -import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '643148911953' -down_revision = '677a2f2884e1' +revision = "643148911953" +down_revision = "677a2f2884e1" branch_labels = None depends_on = None @@ -50,10 +49,9 @@ def upgrade() -> None: WHERE a.jsonblob->>'eventType' = 'longlineEvent' GROUP BY a.id, a.jsonblob, a.datetime, bycatchcount; """) - def downgrade() -> None: - op.get_bind().execute('DROP VIEW deckhandevents_mostrecentlonglineevent_jsonextracted;') - op.get_bind().execute('DROP VIEW deckhandevents_mostrecenteventid_nophoto;') + op.get_bind().execute("DROP VIEW deckhandevents_mostrecentlonglineevent_jsonextracted;") + op.get_bind().execute("DROP VIEW deckhandevents_mostrecenteventid_nophoto;") pass diff --git a/migrations/versions/677a2f2884e1_s3uploadstable.py b/migrations/versions/677a2f2884e1_s3uploadstable.py index d10c22b..02fab78 100644 --- a/migrations/versions/677a2f2884e1_s3uploadstable.py +++ b/migrations/versions/677a2f2884e1_s3uploadstable.py @@ -5,28 +5,29 @@ Create Date: 2023-08-02 17:08:34.590190 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '677a2f2884e1' -down_revision = '97b633de0899' +revision = "677a2f2884e1" +down_revision = "97b633de0899" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - - op.create_table('s3uploads', - sa.Column('datetime', sa.DateTime(timezone=True), nullable=False), - sa.Column('tablename', sa.String(), nullable=False), + + op.create_table( + "s3uploads", + sa.Column("datetime", sa.DateTime(timezone=True), nullable=False), + sa.Column("tablename", sa.String(), nullable=False), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('s3uploads') + op.drop_table("s3uploads") # ### end Alembic commands ### diff --git a/migrations/versions/81b92a299311_gps_data_types.py b/migrations/versions/81b92a299311_gps_data_types.py index ecec9e5..33f4c31 100644 --- a/migrations/versions/81b92a299311_gps_data_types.py +++ b/migrations/versions/81b92a299311_gps_data_types.py @@ -5,13 +5,13 @@ Create Date: 2023-07-26 16:51:17.527649 """ -from alembic import op + import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from alembic import op # revision identifiers, used by Alembic. -revision = '81b92a299311' -down_revision = 'ecb326942445' +revision = "81b92a299311" +down_revision = "ecb326942445" branch_labels = None depends_on = None @@ -19,20 +19,22 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.execute("delete from gpsdata;") - op.add_column('gpsdata', sa.Column('gps_datetime', sa.DateTime(timezone=True), nullable=False)) - op.add_column('gpsdata', sa.Column('lat', sa.Float(), nullable=False)) - op.add_column('gpsdata', sa.Column('lon', sa.Float(), nullable=False)) - op.drop_column('gpsdata', 'sentence') - op.drop_column('gpsdata', 'id') + op.add_column("gpsdata", sa.Column("gps_datetime", sa.DateTime(timezone=True), nullable=False)) + op.add_column("gpsdata", sa.Column("lat", sa.Float(), nullable=False)) + op.add_column("gpsdata", sa.Column("lon", sa.Float(), nullable=False)) + op.drop_column("gpsdata", "sentence") + op.drop_column("gpsdata", "id") # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.execute("delete from gpsdata;") - op.add_column('gpsdata', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False)) - op.add_column('gpsdata', sa.Column('sentence', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.drop_column('gpsdata', 'lon') - op.drop_column('gpsdata', 'lat') - op.drop_column('gpsdata', 'gps_datetime') + op.add_column("gpsdata", sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False)) + op.add_column( + "gpsdata", sa.Column("sentence", sa.VARCHAR(), autoincrement=False, nullable=True) + ) + op.drop_column("gpsdata", "lon") + op.drop_column("gpsdata", "lat") + op.drop_column("gpsdata", "gps_datetime") # ### end Alembic commands ### diff --git a/migrations/versions/8304966281aa_reencode_files.py b/migrations/versions/8304966281aa_reencode_files.py index bcba903..f979ade 100644 --- a/migrations/versions/8304966281aa_reencode_files.py +++ b/migrations/versions/8304966281aa_reencode_files.py @@ -5,26 +5,39 @@ Create Date: 2023-09-20 15:15:56.043600 """ -from alembic import op + import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from alembic import op # revision identifiers, used by Alembic. -revision = '8304966281aa' -down_revision = 'd974c1aea745' +revision = "8304966281aa" +down_revision = "d974c1aea745" branch_labels = None depends_on = None def upgrade() -> None: - op.add_column('video_files', sa.Column('reencoded_path', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.add_column('video_files', sa.Column('reencoded_datetime', sa.DateTime(timezone=True), autoincrement=False, nullable=True)) - op.add_column('video_files', sa.Column('reencoded_stdout', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.add_column('video_files', sa.Column('reencoded_stderr', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.add_column( + "video_files", sa.Column("reencoded_path", sa.VARCHAR(), autoincrement=False, nullable=True) + ) + op.add_column( + "video_files", + sa.Column( + "reencoded_datetime", sa.DateTime(timezone=True), autoincrement=False, nullable=True + ), + ) + op.add_column( + "video_files", + sa.Column("reencoded_stdout", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "video_files", + sa.Column("reencoded_stderr", sa.VARCHAR(), autoincrement=False, nullable=True), + ) def downgrade() -> None: - op.drop_column('video_files', 'reencoded_stderr') - op.drop_column('video_files', 'reencoded_stdout') - op.drop_column('video_files', 'reencoded_datetime') - op.drop_column('video_files', 'reencoded_path') + op.drop_column("video_files", "reencoded_stderr") + op.drop_column("video_files", "reencoded_stdout") + op.drop_column("video_files", "reencoded_datetime") + op.drop_column("video_files", "reencoded_path") diff --git a/migrations/versions/97b633de0899_video_cam_name.py b/migrations/versions/97b633de0899_video_cam_name.py index dbe576d..c2b5072 100644 --- a/migrations/versions/97b633de0899_video_cam_name.py +++ b/migrations/versions/97b633de0899_video_cam_name.py @@ -5,24 +5,24 @@ Create Date: 2023-07-27 15:50:13.450935 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '97b633de0899' -down_revision = '81b92a299311' +revision = "97b633de0899" +down_revision = "81b92a299311" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column('video_files', sa.Column('cam_name', sa.VARCHAR(), nullable=True)) + op.add_column("video_files", sa.Column("cam_name", sa.VARCHAR(), nullable=True)) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('video_files', 'cam_name') + op.drop_column("video_files", "cam_name") # ### end Alembic commands ### diff --git a/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py b/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py index b1801df..0a9b2cb 100644 --- a/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py +++ b/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py @@ -5,13 +5,12 @@ Create Date: 2023-09-19 12:48:47.152161 """ -from alembic import op -import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = 'b2f76c38a4a0' -down_revision = 'bbe04841c70d' +revision = "b2f76c38a4a0" +down_revision = "bbe04841c70d" branch_labels = None depends_on = None @@ -32,7 +31,6 @@ def upgrade() -> None: end; $$;""") - op.get_bind().execute("""CREATE OR REPLACE VIEW elog_time_gap_score as ( with paired_seq_deckhandevents as ( with A as ( @@ -68,5 +66,5 @@ def upgrade() -> None: def downgrade() -> None: - op.get_bind().execute('drop view elog_time_gap_score'); - op.get_bind().execute('drop function elog_time_gap_sigmoid'); + op.get_bind().execute("drop view elog_time_gap_score") + op.get_bind().execute("drop function elog_time_gap_sigmoid") diff --git a/migrations/versions/b78dce0f5492_ondeck_json_columns.py b/migrations/versions/b78dce0f5492_ondeck_json_columns.py index 3fb7805..1ce2427 100644 --- a/migrations/versions/b78dce0f5492_ondeck_json_columns.py +++ b/migrations/versions/b78dce0f5492_ondeck_json_columns.py @@ -5,35 +5,33 @@ Create Date: 2023-08-16 14:16:31.080353 """ -from alembic import op + import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from alembic import op # revision identifiers, used by Alembic. -revision = 'b78dce0f5492' -down_revision = '643148911953' +revision = "b78dce0f5492" +down_revision = "643148911953" branch_labels = None depends_on = None def upgrade() -> None: + op.add_column("ondeckdata", sa.Column("overallcount", sa.Integer(), nullable=True)) + op.add_column("ondeckdata", sa.Column("overallruntimems", sa.REAL(), nullable=True)) + op.add_column("ondeckdata", sa.Column("tracked_confidence", sa.REAL(), nullable=True)) - op.add_column('ondeckdata', sa.Column('overallcount', sa.Integer(), nullable=True)) - op.add_column('ondeckdata', sa.Column('overallruntimems', sa.REAL(), nullable=True)) - op.add_column('ondeckdata', sa.Column('tracked_confidence', sa.REAL(), nullable=True)) - - - op.create_unique_constraint('uq_video_files_decrypted_path', 'video_files', ['decrypted_path']) - - op.get_bind().execute('delete from ondeckdata where id in (select ondeckdata.id from ondeckdata left join video_files on video_uri = decrypted_path where decrypted_path is null);') - op.create_foreign_key(None, 'ondeckdata', 'video_files', ['video_uri'], ['decrypted_path']) + op.create_unique_constraint("uq_video_files_decrypted_path", "video_files", ["decrypted_path"]) + op.get_bind().execute( + "delete from ondeckdata where id in (select ondeckdata.id from ondeckdata left join video_files on video_uri = decrypted_path where decrypted_path is null);" + ) + op.create_foreign_key(None, "ondeckdata", "video_files", ["video_uri"], ["decrypted_path"]) def downgrade() -> None: - op.drop_constraint(None, 'ondeckdata', type_='foreignkey') - op.drop_constraint('uq_video_files_decrypted_path', 'video_files', type_='unique') - op.drop_column('ondeckdata', 'tracked_confidence') - op.drop_column('ondeckdata', 'overallruntimems') - op.drop_column('ondeckdata', 'overallcount') - + op.drop_constraint(None, "ondeckdata", type_="foreignkey") + op.drop_constraint("uq_video_files_decrypted_path", "video_files", type_="unique") + op.drop_column("ondeckdata", "tracked_confidence") + op.drop_column("ondeckdata", "overallruntimems") + op.drop_column("ondeckdata", "overallcount") diff --git a/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py b/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py index be4e0d6..e54d48a 100644 --- a/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py +++ b/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py @@ -5,27 +5,24 @@ Create Date: 2023-10-11 17:33:42.633350 """ -from alembic import op + import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from alembic import op # revision identifiers, used by Alembic. -revision = 'ba08d4e11cc7' -down_revision = '495235ece5f0' +revision = "ba08d4e11cc7" +down_revision = "495235ece5f0" branch_labels = None depends_on = None def upgrade() -> None: - op.add_column('ondeckdata', sa.Column('overallcatches', sa.Integer(), nullable=True)) - op.add_column('ondeckdata', sa.Column('overalldiscards', sa.Integer(), nullable=True)) - op.add_column('ondeckdata', sa.Column('detection_confidence', sa.REAL(), nullable=True)) - + op.add_column("ondeckdata", sa.Column("overallcatches", sa.Integer(), nullable=True)) + op.add_column("ondeckdata", sa.Column("overalldiscards", sa.Integer(), nullable=True)) + op.add_column("ondeckdata", sa.Column("detection_confidence", sa.REAL(), nullable=True)) def downgrade() -> None: - op.drop_column('ondeckdata', 'detection_confidence') - op.drop_column('ondeckdata', 'overalldiscards') - op.drop_column('ondeckdata', 'overallcatches') - - + op.drop_column("ondeckdata", "detection_confidence") + op.drop_column("ondeckdata", "overalldiscards") + op.drop_column("ondeckdata", "overallcatches") diff --git a/migrations/versions/bbe04841c70d_port_departures_view.py b/migrations/versions/bbe04841c70d_port_departures_view.py index 70e30c2..b7b5271 100644 --- a/migrations/versions/bbe04841c70d_port_departures_view.py +++ b/migrations/versions/bbe04841c70d_port_departures_view.py @@ -5,22 +5,22 @@ Create Date: 2023-09-19 11:59:42.945969 """ -from alembic import op -import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = 'bbe04841c70d' -down_revision = 'b78dce0f5492' +revision = "bbe04841c70d" +down_revision = "b78dce0f5492" branch_labels = None depends_on = None def upgrade() -> None: - - op.get_bind().execute('create table if not exists port_location (port_location point);') - op.get_bind().execute('truncate port_location;') - op.get_bind().execute('insert into port_location (port_location) values (point(9.4241879,-84.1833372));') + op.get_bind().execute("create table if not exists port_location (port_location point);") + op.get_bind().execute("truncate port_location;") + op.get_bind().execute( + "insert into port_location (port_location) values (point(9.4241879,-84.1833372));" + ) op.get_bind().execute("""CREATE OR REPLACE VIEW port_departures as ( with A as ( @@ -30,7 +30,7 @@ def upgrade() -> None: from gpsdata cross join port_location ) select B.gps_datetime as datetime, B.lat, B.lon from A join A B on a.row_number = b.row_number-1 where a.at_port = true and b.at_port = false);""") - + op.get_bind().execute("""CREATE OR REPLACE VIEW port_arrivals as ( with A as ( select *, @@ -42,6 +42,6 @@ def upgrade() -> None: def downgrade() -> None: - op.get_bind().execute('drop view port_arrivals;') - op.get_bind().execute('drop view port_departures;') - op.get_bind().execute('drop table port_location;') + op.get_bind().execute("drop view port_arrivals;") + op.get_bind().execute("drop view port_departures;") + op.get_bind().execute("drop table port_location;") diff --git a/migrations/versions/d974c1aea745_elog_gaps_score_update.py b/migrations/versions/d974c1aea745_elog_gaps_score_update.py index fc4e1ce..7985000 100644 --- a/migrations/versions/d974c1aea745_elog_gaps_score_update.py +++ b/migrations/versions/d974c1aea745_elog_gaps_score_update.py @@ -5,18 +5,16 @@ Create Date: 2023-09-19 13:16:37.865465 """ -from alembic import op -import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = 'd974c1aea745' -down_revision = 'b2f76c38a4a0' +revision = "d974c1aea745" +down_revision = "b2f76c38a4a0" branch_labels = None depends_on = None - def upgrade() -> None: op.get_bind().execute("""CREATE OR REPLACE VIEW elog_time_gap_score as ( with paired_seq_deckhandevents as ( @@ -53,4 +51,4 @@ def upgrade() -> None: def downgrade() -> None: - pass \ No newline at end of file + pass diff --git a/migrations/versions/e718ddd7c0bd_add_track_table.py b/migrations/versions/e718ddd7c0bd_add_track_table.py index b0ecf59..62cb39e 100644 --- a/migrations/versions/e718ddd7c0bd_add_track_table.py +++ b/migrations/versions/e718ddd7c0bd_add_track_table.py @@ -5,32 +5,38 @@ Create Date: 2023-12-05 16:55:46.938879 """ -from alembic import op + import sqlalchemy as sa +from alembic import op from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = 'e718ddd7c0bd' -down_revision = 'fdfd9e708602' +revision = "e718ddd7c0bd" +down_revision = "fdfd9e708602" branch_labels = None depends_on = None def upgrade() -> None: - - op.create_table('tracks', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('video_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('cocoannotations_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('track_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('first_framenum', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('last_framenum', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('confidences', postgresql.ARRAY(sa.REAL()), autoincrement=False, nullable=True), - sa.Column('datetime', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='tracks_pkey') + op.create_table( + "tracks", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("video_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("cocoannotations_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("track_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("first_framenum", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("last_framenum", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("confidences", postgresql.ARRAY(sa.REAL()), autoincrement=False, nullable=True), + sa.Column( + "datetime", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("CURRENT_TIMESTAMP"), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="tracks_pkey"), ) def downgrade() -> None: - op.drop_table('tracks') - + op.drop_table("tracks") diff --git a/migrations/versions/ecb326942445_starttime_on_videos.py b/migrations/versions/ecb326942445_starttime_on_videos.py index 6c00cff..8b0162e 100644 --- a/migrations/versions/ecb326942445_starttime_on_videos.py +++ b/migrations/versions/ecb326942445_starttime_on_videos.py @@ -5,16 +5,14 @@ Create Date: 2023-07-20 16:58:45.490762 """ -from alembic import op + import sqlalchemy as sa +from alembic import op from sqlalchemy.dialects import postgresql -from datetime import datetime, timezone - - # revision identifiers, used by Alembic. -revision = 'ecb326942445' -down_revision = '1d1b10e054af' +revision = "ecb326942445" +down_revision = "1d1b10e054af" branch_labels = None depends_on = None @@ -22,8 +20,9 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.add_column( - 'video_files', - sa.Column('start_datetime', sa.DateTime(timezone=True), autoincrement=False, nullable=True)) + "video_files", + sa.Column("start_datetime", sa.DateTime(timezone=True), autoincrement=False, nullable=True), + ) op.execute(""" update video_files set start_datetime = to_timestamp( @@ -34,14 +33,17 @@ def upgrade() -> None: 'DD-MM-YYYY-HH24-MI TZH TZM' ) """) - op.alter_column('video_files', 'start_datetime', - existing_type=postgresql.TIMESTAMP(timezone=True), - nullable=False, - autoincrement=False) + op.alter_column( + "video_files", + "start_datetime", + existing_type=postgresql.TIMESTAMP(timezone=True), + nullable=False, + autoincrement=False, + ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('video_files', 'start_datetime') + op.drop_column("video_files", "start_datetime") # ### end Alembic commands ### diff --git a/migrations/versions/f48359cf7456_ondeckdata_status.py b/migrations/versions/f48359cf7456_ondeckdata_status.py index 1944b0f..9197882 100644 --- a/migrations/versions/f48359cf7456_ondeckdata_status.py +++ b/migrations/versions/f48359cf7456_ondeckdata_status.py @@ -5,23 +5,22 @@ Create Date: 2023-10-09 17:35:01.581320 """ -from alembic import op + import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from alembic import op # revision identifiers, used by Alembic. -revision = 'f48359cf7456' -down_revision = '8304966281aa' +revision = "f48359cf7456" +down_revision = "8304966281aa" branch_labels = None depends_on = None def upgrade() -> None: - - op.add_column('ondeckdata', sa.Column('status', sa.String(), nullable=True)) + op.add_column("ondeckdata", sa.Column("status", sa.String(), nullable=True)) op.execute("update ondeckdata set status = 'done';") def downgrade() -> None: - op.drop_column('ondeckdata', 'status') + op.drop_column("ondeckdata", "status") diff --git a/migrations/versions/f835aa8c569a_second.py b/migrations/versions/f835aa8c569a_second.py index 5f54e0b..bb4e3bf 100644 --- a/migrations/versions/f835aa8c569a_second.py +++ b/migrations/versions/f835aa8c569a_second.py @@ -5,81 +5,103 @@ Create Date: 2023-05-16 11:44:58.986312 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = 'f835aa8c569a' -down_revision = '04eaff9bcc55' +revision = "f835aa8c569a" +down_revision = "04eaff9bcc55" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('boatschedules', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('sentence', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "boatschedules", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("sentence", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('deckhandevents', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('jsonblob', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "deckhandevents", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("jsonblob", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('fishaidata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('video_uri', sa.String(), nullable=True), - sa.Column('cocoannotations_uri', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "fishaidata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("video_uri", sa.String(), nullable=True), + sa.Column("cocoannotations_uri", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('gpsdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('sentence', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "gpsdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("sentence", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('internetdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('traceroute', sa.String(), nullable=True), - sa.Column('ping', sa.Float(), nullable=True), - sa.Column('packetloss', sa.Float(), nullable=True), - sa.Column('returncode', sa.Integer(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "internetdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("traceroute", sa.String(), nullable=True), + sa.Column("ping", sa.Float(), nullable=True), + sa.Column("packetloss", sa.Float(), nullable=True), + sa.Column("returncode", sa.Integer(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('vectors', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('configblob', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "vectors", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("configblob", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('tests', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('type', sa.Enum('one', 'two', 'three', name='t'), nullable=True), - sa.Column('vector_id', sa.Integer(), nullable=True), - sa.Column('score', sa.Float(), nullable=True), - sa.Column('detail', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.ForeignKeyConstraint(['vector_id'], ['vectors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "tests", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("type", sa.Enum("one", "two", "three", name="t"), nullable=True), + sa.Column("vector_id", sa.Integer(), nullable=True), + sa.Column("score", sa.Float(), nullable=True), + sa.Column("detail", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.ForeignKeyConstraint( + ["vector_id"], + ["vectors.id"], + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('tests') - op.drop_table('vectors') - op.drop_table('internetdata') - op.drop_table('gpsdata') - op.drop_table('fishaidata') - op.drop_table('deckhandevents') - op.drop_table('boatschedules') - op.execute('drop type t;') - # ### end Alembic commands ### \ No newline at end of file + op.drop_table("tests") + op.drop_table("vectors") + op.drop_table("internetdata") + op.drop_table("gpsdata") + op.drop_table("fishaidata") + op.drop_table("deckhandevents") + op.drop_table("boatschedules") + op.execute("drop type t;") + # ### end Alembic commands ### diff --git a/migrations/versions/f9dbf07180af_test_from_to_columns.py b/migrations/versions/f9dbf07180af_test_from_to_columns.py index 7756141..f59c83b 100644 --- a/migrations/versions/f9dbf07180af_test_from_to_columns.py +++ b/migrations/versions/f9dbf07180af_test_from_to_columns.py @@ -5,26 +5,26 @@ Create Date: 2023-06-06 13:12:18.789652 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = 'f9dbf07180af' -down_revision = '47ff3fca73a4' +revision = "f9dbf07180af" +down_revision = "47ff3fca73a4" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column('tests', sa.Column('datetime_from', sa.DateTime(timezone=True), nullable=True)) - op.add_column('tests', sa.Column('datetime_to', sa.DateTime(timezone=True), nullable=True)) + op.add_column("tests", sa.Column("datetime_from", sa.DateTime(timezone=True), nullable=True)) + op.add_column("tests", sa.Column("datetime_to", sa.DateTime(timezone=True), nullable=True)) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('tests', 'datetime_to') - op.drop_column('tests', 'datetime_from') + op.drop_column("tests", "datetime_to") + op.drop_column("tests", "datetime_from") # ### end Alembic commands ### diff --git a/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py b/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py index 2776b15..55b41ed 100644 --- a/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py +++ b/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py @@ -5,33 +5,36 @@ Create Date: 2023-11-07 16:50:44.303059 """ -from alembic import op + import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from alembic import op # revision identifiers, used by Alembic. -revision = 'fdfd9e708602' -down_revision = 'ba08d4e11cc7' +revision = "fdfd9e708602" +down_revision = "ba08d4e11cc7" branch_labels = None depends_on = None - + def upgrade() -> None: # stmt = sa.select(sa.table('vectors')).where(name="ElogTimeGapsVector") found_id = None - for row in op.get_bind().execute("select id, name from vectors where name = 'ElogTimeGapsVector';"): + for row in op.get_bind().execute( + "select id, name from vectors where name = 'ElogTimeGapsVector';" + ): if row: found_id = row[0] - + if found_id is None: - op.get_bind().execute('insert into vectors (name, configblob, schedule_string) values (\'ElogTimeGapsVector\', \'{}\', \'every 4 hours\');') - - + op.get_bind().execute( + "insert into vectors (name, configblob, schedule_string) values ('ElogTimeGapsVector', '{}', 'every 4 hours');" + ) def downgrade() -> None: - op.get_bind().execute("delete from tests where vector_id = (select id from vectors where name = 'ElogTimeGapsVector');"); + op.get_bind().execute( + "delete from tests where vector_id = (select id from vectors where name = 'ElogTimeGapsVector');" + ) - t = sa.table('vectors') + t = sa.table("vectors") op.get_bind().execute("delete from vectors where name = 'ElogTimeGapsVector';") - diff --git a/misc/data/port_locations.sql b/misc/data/port_locations.sql index f1694da..f8b9379 100644 --- a/misc/data/port_locations.sql +++ b/misc/data/port_locations.sql @@ -1,13 +1,27 @@ - -- puntarenas -truncate port_location; -insert into port_location (port_location) values (point(9.8106338, -84.875245)); - +TRUNCATE port_location; +INSERT INTO + port_location (port_location) +VALUES + (point(9.8106338, -84.875245)); -- QUEPOS -truncate port_location; -insert into port_location (port_location) values (point(9.4241879,-84.1833372)); +TRUNCATE port_location; +INSERT INTO + port_location (port_location) +VALUES + (point(9.4241879, -84.1833372)); -select *, 'dep' from port_departures union select *, 'arr' from port_arrivals; +SELECT + *, + 'dep' +FROM + port_departures +UNION +SELECT + *, + 'arr' +FROM + port_arrivals; \ No newline at end of file diff --git a/misc/gpsdata_scratchpad.sql b/misc/gpsdata_scratchpad.sql index 3f3416d..51bd605 100644 --- a/misc/gpsdata_scratchpad.sql +++ b/misc/gpsdata_scratchpad.sql @@ -1,60 +1,130 @@ -with t as ( - select *, ROW_NUMBER() OVER (ORDER BY gps_datetime) from gpsdata -) select -t1.gps_datetime, t2.gps_datetime, -t1.lat, t1.lon, t2.lat, t2.lon, -(t1.lat - t2.lat)*110.574 as latkmdiff, -cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320 as lonkmdiff, -sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 -) as distance, -sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 -)/(extract(epoch from t1.gps_datetime - t2.gps_datetime)/3600) as kph -from t t1 -join t t2 on t2.row_number = t1.row_number-1 -where t1.row_number > 200 -limit 500; +WITH t AS ( + SELECT + *, + ROW_NUMBER() OVER ( + ORDER BY + gps_datetime + ) + FROM + gpsdata +) +SELECT + t1.gps_datetime, + t2.gps_datetime, + t1.lat, + t1.lon, + t2.lat, + t2.lon, + (t1.lat - t2.lat) * 110.574 AS latkmdiff, + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 AS lonkmdiff, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) AS distance, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) /( + extract( + epoch + FROM + t1.gps_datetime - t2.gps_datetime + ) / 3600 + ) AS kph +FROM + t t1 + JOIN t t2 ON t2.row_number = t1.row_number -1 +WHERE + t1.row_number > 200 +LIMIT + 500; +WITH t AS ( + SELECT + *, + ROW_NUMBER() OVER ( + ORDER BY + gps_datetime + ) + FROM + gpsdata +) +SELECT + -- (t1.lat - t2.lat)*100000 as latdiff, + -- (t1.lon - t2.lon)*100000 as londiff, + t1.gps_datetime, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) /( + extract( + epoch + FROM + t1.gps_datetime - t2.gps_datetime + ) / 3600 + ) AS kph, + CASE + WHEN t1.lon - t2.lon = 0 THEN 0 + ELSE ( + 6 - sign(t1.lon - t2.lon) *( + sign(t1.lon - t2.lon) * atan( + (t1.lat - t2.lat) /( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) + ) + ) / 3.14159 + 0.5 + ) * 6 + ) + END AS clockheading +FROM + t t1 + JOIN t t2 ON t2.row_number = t1.row_number -1 +LIMIT + 10; -with t as ( - select *, ROW_NUMBER() OVER (ORDER BY gps_datetime) from gpsdata -) select --- (t1.lat - t2.lat)*100000 as latdiff, --- (t1.lon - t2.lon)*100000 as londiff, -t1.gps_datetime, -sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 -)/(extract(epoch from t1.gps_datetime - t2.gps_datetime)/3600) as kph, - case - when t1.lon-t2.lon = 0 then 0 - else (6 - sign(t1.lon-t2.lon)*(sign(t1.lon-t2.lon)*atan((t1.lat - t2.lat)/(cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)))/3.14159 + 0.5 ) * 6 ) - end as clockheading -from t t1 -join t t2 on t2.row_number = t1.row_number-1 -limit 10; ; copy ( - with t as ( - select *, ROW_NUMBER() OVER (ORDER BY gps_datetime) from gpsdata - ) select - -- (t1.lat - t2.lat)*100000 as latdiff, - -- (t1.lon - t2.lon)*100000 as londiff, - t1.gps_datetime, - sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 - )/(extract(epoch from t1.gps_datetime - t2.gps_datetime)/3600) as kph, - case - when t1.lon-t2.lon = 0 then 0 - else (6 - sign(t1.lon-t2.lon)*(sign(t1.lon-t2.lon)*atan((t1.lat - t2.lat)/(cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)))/3.14159 + 0.5 ) * 6 ) - end as clockheading - from t t1 - join t t2 on t2.row_number = t1.row_number-1 - -- limit 10 -) to stdout csv header -; + WITH t AS ( + SELECT + *, + ROW_NUMBER() OVER ( + ORDER BY + gps_datetime + ) + FROM + gpsdata + ) + SELECT + -- (t1.lat - t2.lat)*100000 as latdiff, + -- (t1.lon - t2.lon)*100000 as londiff, + t1.gps_datetime, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) /( + extract( + epoch + FROM + t1.gps_datetime - t2.gps_datetime + ) / 3600 + ) AS kph, + CASE + WHEN t1.lon - t2.lon = 0 THEN 0 + ELSE ( + 6 - sign(t1.lon - t2.lon) *( + sign(t1.lon - t2.lon) * atan( + (t1.lat - t2.lat) /( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) + ) + ) / 3.14159 + 0.5 + ) * 6 + ) + END AS clockheading + FROM + t t1 + JOIN t t2 ON t2.row_number = t1.row_number -1 -- limit 10 +) TO stdout csv header; \ No newline at end of file diff --git a/misc/scratchpad_gst-launch_transcoding.sh b/misc/scratchpad_gst-launch_transcoding.sh index ba9349a..10ff606 100644 --- a/misc/scratchpad_gst-launch_transcoding.sh +++ b/misc/scratchpad_gst-launch_transcoding.sh @@ -1,3 +1,4 @@ +#!/bin/bash # see these documentation websites for each filter: # https://gstreamer.freedesktop.org/documentation/avi/avidemux.html @@ -56,7 +57,7 @@ gst-launch-1.0 filesrc location="/videos/20240308T124000Z_cam1_reenc.mkv" ! matr 20230912T133500Z_cam2_ondeck -mkdir ./frames || rm ./frames/* +mkdir ./frames || rm ./frames/* gst-launch-1.0 filesrc location="$V" ! avidemux ! multifilesink index=1 location=./frames/%d.jpg @@ -108,10 +109,8 @@ python3 -m http.server -for i in 12-09-2023-15-05.avi.done 12-09-2023-15-10.avi.done 12-09-2023-15-15.avi.done 12-09-2023-15-20.avi.done 12-09-2023-15-25.avi.done 12-09-2023-15-30.avi.done 12-09-2023-15-35.avi.done 12-09-2023-15-40.avi.done 12-09-2023-15-55.avi.done ; do +for i in 12-09-2023-15-05.avi.done 12-09-2023-15-10.avi.done 12-09-2023-15-15.avi.done 12-09-2023-15-20.avi.done 12-09-2023-15-25.avi.done 12-09-2023-15-30.avi.done 12-09-2023-15-35.avi.done 12-09-2023-15-40.avi.done 12-09-2023-15-55.avi.done ; do rm /videos/frames/* gst-launch-1.0 filesrc location="/thalos/brancol/videos/cam2/12-09-2023/15/$i" ! avidemux ! multifilesink index=1 location=/videos/frames/%d.jpg tar czf ~/$i.tar.gz -C /videos/frames {1,480,960,1440,1920,2400,2880,3360,3840,4320}.jpg done - - diff --git a/misc/vector_data_as_cloudwatch_metrics_experiment.py b/misc/vector_data_as_cloudwatch_metrics_experiment.py index 3125c70..83fa4ca 100644 --- a/misc/vector_data_as_cloudwatch_metrics_experiment.py +++ b/misc/vector_data_as_cloudwatch_metrics_experiment.py @@ -1,11 +1,11 @@ -import boto3 import time -from datetime import datetime + +import boto3 from dateutil.parser import parse as dateparse -athena = boto3.client('athena') +athena = boto3.client("athena") -custommetrics = boto3.client('cloudwatch') +custommetrics = boto3.client("cloudwatch") def has_query_succeeded(execution_id): @@ -28,19 +28,20 @@ def has_query_succeeded(execution_id): return False + def gen_put_metric_requests(vector_id, value_timestamp_pairs): request = None - for (value, timestamp) in value_timestamp_pairs: - - + for value, timestamp in value_timestamp_pairs: if request is None: - request = {'Namespace': 'tnc_edge_brancol_v1', 'MetricData': []} - request['MetricData'].append({ - 'MetricName': 'vector_{}'.format(vector_id), - 'Value': value, - 'Timestamp': timestamp, - }) - if len(request['MetricData']) >= 1000: + request = {"Namespace": "tnc_edge_brancol_v1", "MetricData": []} + request["MetricData"].append( + { + "MetricName": f"vector_{vector_id}", + "Value": value, + "Timestamp": timestamp, + } + ) + if len(request["MetricData"]) >= 1000: yield request request = None if request: @@ -49,10 +50,9 @@ def gen_put_metric_requests(vector_id, value_timestamp_pairs): def main(): # 5. Query Athena table - query = f"SELECT vector_id, score, datetime from tnc_edge.brancol_v1_tests" + query = "SELECT vector_id, score, datetime from tnc_edge.brancol_v1_tests" response = athena.start_query_execution( - QueryString=query, - ResultConfiguration={"OutputLocation": "s3://51-gema-dev-athena/"} + QueryString=query, ResultConfiguration={"OutputLocation": "s3://51-gema-dev-athena/"} ) execution_id = response["QueryExecutionId"] @@ -61,46 +61,45 @@ def main(): query_status = has_query_succeeded(execution_id=execution_id) print(f"Query state: {query_status}") - paginator = athena.get_paginator('get_query_results') - page_iterator = paginator.paginate( - QueryExecutionId=execution_id - ) + paginator = athena.get_paginator("get_query_results") + page_iterator = paginator.paginate(QueryExecutionId=execution_id) def gen_results(): for page in page_iterator: - if len(page['ResultSet']['Rows']) > 1: - for row in page['ResultSet']['Rows'][1:]: + if len(page["ResultSet"]["Rows"]) > 1: + for row in page["ResultSet"]["Rows"][1:]: yield row - + grouped = {} for row in gen_results(): - vector_id = row['Data'][0]['VarCharValue'] - if vector_id not in grouped.keys(): + vector_id = row["Data"][0]["VarCharValue"] + if vector_id not in grouped: grouped[vector_id] = [] - value = row['Data'][1].get('VarCharValue') + value = row["Data"][1].get("VarCharValue") try: value = float(value) except: continue - timestamp = row['Data'][2].get('VarCharValue') + timestamp = row["Data"][2].get("VarCharValue") if timestamp is None: continue timestamp = dateparse(timestamp) - if timestamp <= dateparse('2023-10-20 23:00:00Z'): + if timestamp <= dateparse("2023-10-20 23:00:00Z"): continue - grouped[vector_id].append( (value, timestamp) ) - - for (vector_id, value_timestamp_pairs) in grouped.items(): + grouped[vector_id].append((value, timestamp)) + + for vector_id, value_timestamp_pairs in grouped.items(): if int(vector_id) == 3: continue # metric_name = 'tnc_edge_brancol_v1_vector_{}'.format(vector_id) - for request in gen_put_metric_requests(vector_id=vector_id, value_timestamp_pairs=value_timestamp_pairs): - print('putting {} values on ') + for request in gen_put_metric_requests( + vector_id=vector_id, value_timestamp_pairs=value_timestamp_pairs + ): + print("putting {} values on ") response = custommetrics.put_metric_data(**request) print(response) - if __name__ == "__main__": main() diff --git a/model/__init__.py b/model/__init__.py index b4c02fb..011c1b4 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,20 +1,31 @@ - +from .aifishdata import AifishData from .base import Base - -from .riskvector import RiskVector, RiskVectorModelView - -from .test import Test, T, TestModelView - -from .gpsdata import GpsData - +from .boatschedule import BoatSchedule from .deckhandeventraw import DeckhandEventRaw from .deckhandeventview import DeckhandEventView - -from .aifishdata import AifishData -from .ondeckdata import OndeckData - +from .gpsdata import GpsData from .internetdata import InternetData, InternetDataView - -from .boatschedule import BoatSchedule -from .videofiles import VideoFile +from .ondeckdata import OndeckData +from .riskvector import RiskVector, RiskVectorModelView +from .test import T, Test, TestModelView from .track import Track +from .videofiles import VideoFile + +__all__ = [ + "AifishData", + "Base", + "BoatSchedule", + "DeckhandEventRaw", + "DeckhandEventView", + "GpsData", + "InternetData", + "InternetDataView", + "OndeckData", + "RiskVector", + "RiskVectorModelView", + "T", + "Test", + "TestModelView", + "Track", + "VideoFile", +] diff --git a/model/aifishdata.py b/model/aifishdata.py index c6ad7cb..fdddb6d 100644 --- a/model/aifishdata.py +++ b/model/aifishdata.py @@ -1,11 +1,12 @@ +from sqlalchemy import REAL, Column, DateTime, ForeignKey, Integer, String, text +from sqlalchemy.orm import relationship + from .base import Base from .videofiles import VideoFile -from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, text, REAL -from sqlalchemy.orm import relationship class AifishData(Base): - __tablename__ = 'aifishdata' + __tablename__ = "aifishdata" id = Column(Integer, primary_key=True) video_uri = Column(String, ForeignKey("video_files.decrypted_path"), unique=True) @@ -19,18 +20,24 @@ class AifishData(Base): status = Column(String) def __str__(self) -> str: - return 'AifishData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'video_uri', - # 'video_file', - 'processing_uri', - 'output_uri', - 'datetime', - 'count', - 'runtimems', - 'detection_confidence', - 'status', - - ]]) + ')' - + return ( + "AifishData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "video_uri", + # 'video_file', + "processing_uri", + "output_uri", + "datetime", + "count", + "runtimems", + "detection_confidence", + "status", + ] + ] + ) + + ")" + ) diff --git a/model/boatschedule.py b/model/boatschedule.py index 85557ea..1357671 100644 --- a/model/boatschedule.py +++ b/model/boatschedule.py @@ -1,20 +1,26 @@ +from sqlalchemy import Column, DateTime, Integer, String, text + from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text class BoatSchedule(Base): - __tablename__ = 'boatschedules' + __tablename__ = "boatschedules" id = Column(Integer, primary_key=True) sentence = Column(String) datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) def __str__(self) -> str: - return 'BoatSchedule(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'sentence', - ]]) + ')' - - - + return ( + "BoatSchedule(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "sentence", + ] + ] + ) + + ")" + ) diff --git a/model/deckhandeventraw.py b/model/deckhandeventraw.py index 46dd4fd..af9787c 100644 --- a/model/deckhandeventraw.py +++ b/model/deckhandeventraw.py @@ -1,15 +1,11 @@ +from sqlalchemy import Column, DateTime, Integer, String, text from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text - class DeckhandEventRaw(Base): - - __tablename__ = 'deckhandevents' + __tablename__ = "deckhandevents" id = Column(Integer, primary_key=True) jsonblob = Column(String) datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) - - \ No newline at end of file diff --git a/model/deckhandeventview.py b/model/deckhandeventview.py index 8c4b0d4..a76f649 100644 --- a/model/deckhandeventview.py +++ b/model/deckhandeventview.py @@ -1,17 +1,15 @@ +from sqlalchemy import Column, DateTime, Integer, text from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text - class DeckhandEventView(Base): - - __tablename__ = 'deckhandevents_mostrecentlonglineevent_jsonextracted' + __tablename__ = "deckhandevents_mostrecentlonglineevent_jsonextracted" id = Column(Integer, primary_key=True) # jsonblob = Column(String) datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) - + bycatchcount = Column(Integer) catchcount = Column(Integer) systemstartsetdatetime = Column(DateTime(timezone=True)) @@ -26,7 +24,7 @@ class DeckhandEventView(Base): systemendhauldatetime = Column(DateTime(timezone=True)) systemendhaullatitude = Column(Integer) systemendhaullongitude = Column(Integer) - - -if __name__ == '__main__': - pass \ No newline at end of file + + +if __name__ == "__main__": + pass diff --git a/model/gpsdata.py b/model/gpsdata.py index 9675be0..99b3223 100644 --- a/model/gpsdata.py +++ b/model/gpsdata.py @@ -1,9 +1,10 @@ +from sqlalchemy import Column, DateTime, Float, text + from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text, Float class GpsData(Base): - __tablename__ = 'gpsdata' + __tablename__ = "gpsdata" gps_datetime = Column(DateTime(timezone=True), primary_key=True) lat = Column(Float(), nullable=False) @@ -11,17 +12,27 @@ class GpsData(Base): datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) def __str__(self) -> str: - return 'GpsData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'gps_datetime', - 'lat', - 'lon', - 'datetime', - ]]) + ')' -example_gps_data = ''' -$ cat /mnt/thalos/brancol/export_gps/brancol_20230601_145918.txt -+47.7411535°,-3.4073535° edge@edge1:~$ + return ( + "GpsData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "gps_datetime", + "lat", + "lon", + "datetime", + ] + ] + ) + + ")" + ) + + +example_gps_data = """ +$ cat /mnt/thalos/brancol/export_gps/brancol_20230601_145918.txt ++47.7411535°,-3.4073535° edge@edge1:~$ edge@edge1:~$ cat /mnt/thalos/brancol/export_gps/brancol_20230601_145918.txt | xxd 00000000: 2b34 372e 3734 3131 3533 35c2 b02c 2d33 +47.7411535..,-3 -00000010: 2e34 3037 3335 3335 c2b0 20 .4073535.. -''' +00000010: 2e34 3037 3335 3335 c2b0 20 .4073535.. +""" diff --git a/model/internetdata.py b/model/internetdata.py index a6db90d..63976d8 100644 --- a/model/internetdata.py +++ b/model/internetdata.py @@ -1,10 +1,10 @@ -from .base import Base +from sqlalchemy import Column, DateTime, Float, Integer, String, text -from sqlalchemy import Column, Integer, String, Float, DateTime, text +from .base import Base class InternetData(Base): - __tablename__ = 'internetdata' + __tablename__ = "internetdata" id = Column(Integer, primary_key=True) traceroute = Column(String) @@ -15,27 +15,57 @@ class InternetData(Base): # fk = ForeignKeyConstraint(['id'], [RiskVector.id]) def __str__(self) -> str: - return 'InternetData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'traceroute', - 'ping', - 'packetloss', - 'returncode', - 'datetime', - ]]) + ')' + return ( + "InternetData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "traceroute", + "ping", + "packetloss", + "returncode", + "datetime", + ] + ] + ) + + ")" + ) from flask_admin.contrib.sqla import ModelView + class InternetDataView(ModelView): - def __init__(self, session, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): - super().__init__(InternetData, session, name, category, endpoint, url, static_folder, menu_class_name, menu_icon_type, menu_icon_value) + def __init__( + self, + session, + name=None, + category=None, + endpoint=None, + url=None, + static_folder=None, + menu_class_name=None, + menu_icon_type=None, + menu_icon_value=None, + ): + super().__init__( + InternetData, + session, + name, + category, + endpoint, + url, + static_folder, + menu_class_name, + menu_icon_type, + menu_icon_value, + ) + can_delete = True column_display_pk = True column_hide_backrefs = False - column_list = ["id","traceroute", "ping", "packetloss", "returncode", "datetime"] + column_list = ["id", "traceroute", "ping", "packetloss", "returncode", "datetime"] # column_searchable_list = ["name"] # inline_models = (RiskVector,) - - diff --git a/model/ondeckdata.py b/model/ondeckdata.py index 457c43c..61d8908 100644 --- a/model/ondeckdata.py +++ b/model/ondeckdata.py @@ -1,11 +1,12 @@ +from sqlalchemy import REAL, Column, DateTime, ForeignKey, Integer, String, text +from sqlalchemy.orm import relationship + from .base import Base from .videofiles import VideoFile -from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, text, REAL -from sqlalchemy.orm import relationship class OndeckData(Base): - __tablename__ = 'ondeckdata' + __tablename__ = "ondeckdata" id = Column(Integer, primary_key=True) video_uri = Column(String, ForeignKey("video_files.decrypted_path"), unique=True) @@ -16,22 +17,28 @@ class OndeckData(Base): overallruntimems = Column(REAL) tracked_confidence = Column(REAL) status = Column(String) - overallcatches = Column(Integer) - overalldiscards = Column(Integer) + overallcatches = Column(Integer) + overalldiscards = Column(Integer) detection_confidence = Column(REAL) def __str__(self) -> str: - return 'OndeckData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'video_uri', - # 'video_file', - 'cocoannotations_uri', - 'datetime', - 'overallcount', - 'overallruntimems', - 'tracked_confidence', - 'status', - - ]]) + ')' - + return ( + "OndeckData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "video_uri", + # 'video_file', + "cocoannotations_uri", + "datetime", + "overallcount", + "overallruntimems", + "tracked_confidence", + "status", + ] + ] + ) + + ")" + ) diff --git a/model/riskvector.py b/model/riskvector.py index c76c78c..0817f15 100644 --- a/model/riskvector.py +++ b/model/riskvector.py @@ -1,12 +1,12 @@ -from .base import Base - -from sqlalchemy.orm import relationship +from flask_admin.model.template import EndpointLinkRowAction from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import relationship + +from .base import Base -from flask_admin.model.template import EndpointLinkRowAction, LinkRowAction class RiskVector(Base): - __tablename__ = 'vectors' + __tablename__ = "vectors" id = Column(Integer, primary_key=True) name = Column(String) @@ -14,23 +14,48 @@ class RiskVector(Base): configblob = Column(String) tests = relationship("Test", back_populates="vector") - def __str__(self) -> str: - return 'RiskVector(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'name', - 'schedule_string', - 'configblob' - ]]) + ')' - + return ( + "RiskVector(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in ["id", "name", "schedule_string", "configblob"] + ] + ) + + ")" + ) from flask_admin.contrib.sqla import ModelView + class RiskVectorModelView(ModelView): - def __init__(self, session, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): - super().__init__(RiskVector, session, name, category, endpoint, url, static_folder, menu_class_name, menu_icon_type, menu_icon_value) + def __init__( + self, + session, + name=None, + category=None, + endpoint=None, + url=None, + static_folder=None, + menu_class_name=None, + menu_icon_type=None, + menu_icon_value=None, + ): + super().__init__( + RiskVector, + session, + name, + category, + endpoint, + url, + static_folder, + menu_class_name, + menu_icon_type, + menu_icon_value, + ) + can_delete = True column_display_pk = True column_hide_backrefs = False @@ -39,6 +64,10 @@ def __init__(self, session, name=None, category=None, endpoint=None, url=None, s # inline_models = (RiskVector,) # column_select_related_list = ["tests.vector_id"] column_extra_row_actions = [ - EndpointLinkRowAction('glyphicon glyphicon-arrow-right', 'test.index_view', title="Go to Tests ➡️", id_arg="flt1_0") + EndpointLinkRowAction( + "glyphicon glyphicon-arrow-right", + "test.index_view", + title="Go to Tests ➡️", + id_arg="flt1_0", + ) ] - diff --git a/model/test.py b/model/test.py index 068478b..fe16085 100644 --- a/model/test.py +++ b/model/test.py @@ -1,11 +1,10 @@ -from .base import Base - -from .riskvector import RiskVector - from enum import Enum as PyEnum +from sqlalchemy import Column, DateTime, Enum, Float, ForeignKey, Integer, String, text from sqlalchemy.orm import relationship -from sqlalchemy import Column, Integer, String, Enum, ForeignKey, Float, DateTime, text + +from .base import Base +from .riskvector import RiskVector class T(PyEnum): @@ -15,7 +14,7 @@ class T(PyEnum): class Test(Base): - __tablename__ = 'tests' + __tablename__ = "tests" id = Column(Integer, primary_key=True) name = Column(String) @@ -30,28 +29,68 @@ class Test(Base): # fk = ForeignKeyConstraint(['id'], [RiskVector.id]) def __str__(self) -> str: - return 'Test(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'name', - 'type', - 'vector_id', - 'datetime', - ]]) + ')' + return ( + "Test(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "name", + "type", + "vector_id", + "datetime", + ] + ] + ) + + ")" + ) from flask_admin.contrib.sqla import ModelView + class TestModelView(ModelView): - def __init__(self, session, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): - super().__init__(Test, session, name, category, endpoint, url, static_folder, menu_class_name, menu_icon_type, menu_icon_value) + def __init__( + self, + session, + name=None, + category=None, + endpoint=None, + url=None, + static_folder=None, + menu_class_name=None, + menu_icon_type=None, + menu_icon_value=None, + ): + super().__init__( + Test, + session, + name, + category, + endpoint, + url, + static_folder, + menu_class_name, + menu_icon_type, + menu_icon_value, + ) + can_delete = True column_display_pk = True column_hide_backrefs = False - column_list = ["id","name","type","vector", "score", "detail", "datetime_from", "datetime_to", "datetime"] + column_list = [ + "id", + "name", + "type", + "vector", + "score", + "detail", + "datetime_from", + "datetime_to", + "datetime", + ] column_searchable_list = ["name"] column_filters = ["vector_id", "datetime"] # column_select_related_list=['vector'] # inline_models = (RiskVector,) - - diff --git a/model/track.py b/model/track.py index 0f23929..006acad 100644 --- a/model/track.py +++ b/model/track.py @@ -1,11 +1,10 @@ +from sqlalchemy import ARRAY, REAL, Column, DateTime, Integer, String, text + from .base import Base -from .videofiles import VideoFile -from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, text, REAL, ARRAY -from sqlalchemy.orm import relationship class Track(Base): - __tablename__ = 'tracks' + __tablename__ = "tracks" id = Column(Integer, primary_key=True) video_uri = Column(String) @@ -19,13 +18,19 @@ class Track(Base): last_framenum = Column(Integer) confidences = Column(ARRAY(REAL)) - datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) # detection_confidence = Column(REAL) def __str__(self) -> str: - return 'Track(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - - ]]) + ')' + return ( + "Track(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + ] + ] + ) + + ")" + ) diff --git a/model/videofiles.py b/model/videofiles.py index 6ba483d..09de3c6 100644 --- a/model/videofiles.py +++ b/model/videofiles.py @@ -1,11 +1,11 @@ +from sqlalchemy import VARCHAR, Column, DateTime + from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, VARCHAR, text, PrimaryKeyConstraint -# from sqlalchemy.orm import relationship class VideoFile(Base): - __tablename__ = 'video_files' - + __tablename__ = "video_files" + original_path = Column(VARCHAR(), primary_key=True, autoincrement=False, nullable=False) last_modified = Column(DateTime(timezone=True), autoincrement=False, nullable=False) start_datetime = Column(DateTime(timezone=True), autoincrement=False, nullable=False) @@ -18,22 +18,30 @@ class VideoFile(Base): reencoded_stdout = Column(VARCHAR(), autoincrement=False, nullable=True) reencoded_stderr = Column(VARCHAR(), autoincrement=False, nullable=True) cam_name = Column(VARCHAR(), nullable=True) - + # ondeckdata = relationship("OndeckData", back_populates="video_file") def __str__(self) -> str: - return 'VideoFile(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - "original_path", - "last_modified", - "start_datetime", - "decrypted_path", - "decrypted_datetime", - "stdout", - "stderr", - "reencoded_path", - "reencoded_datetime", - "reencoded_stdout", - "reencoded_stderr", - "cam_name", - ]]) + ')' + return ( + "VideoFile(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "original_path", + "last_modified", + "start_datetime", + "decrypted_path", + "decrypted_datetime", + "stdout", + "stderr", + "reencoded_path", + "reencoded_datetime", + "reencoded_stdout", + "reencoded_stderr", + "cam_name", + ] + ] + ) + + ")" + ) diff --git a/notebooks/catchcount_vector.ipynb b/notebooks/catchcount_vector.ipynb index acb81de..a6db3f2 100644 --- a/notebooks/catchcount_vector.ipynb +++ b/notebooks/catchcount_vector.ipynb @@ -27,42 +27,28 @@ "metadata": {}, "outputs": [], "source": [ - "import awswrangler as wr\n", - "import pandas as pd\n", "import sqlite3\n", - "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import scipy.stats as stats\n", - "from sklearn.metrics import mean_squared_error\n", + "import warnings\n", "\n", - "from sklearn.linear_model import LinearRegression\n", + "import matplotlib.pyplot as plt\n", + "import pandas as pd\n", + "import seaborn as sns\n", "\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", - "import matplotlib.gridspec as gridspec\n", - "\n", - "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange, AutoDateLocator\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "import json\n", - "from tsai.all import *\n", - "from IPython.display import display, Markdown\n", - "import json\n", - "# from pandas.io.json import json_normalize\n", "\n", - "from matplotlib.patches import Patch\n", + "from matplotlib.dates import DayLocator\n", "\n", + "warnings.filterwarnings(\"ignore\")\n", + "# from pandas.io.json import json_normalize\n", "import matplotlib.dates as mdates\n", - "from matplotlib.lines import Line2D\n", + "from helper_functions.aggregations import *\n", "from helper_functions.data_readers import *\n", - "\n", - "from helper_functions.aggregations import *" + "from tsai.all import *" ] }, { @@ -95,13 +81,13 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", - "Brancol3data = get_data(boat = 'brancol', trip_no = 2)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", + "Brancol3data = get_data(boat=\"brancol\", trip_no=2)\n", "\n", "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)" + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)" ] }, { @@ -111,30 +97,29 @@ "metadata": {}, "outputs": [], "source": [ + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", + "counts_Brancol3 = Brancol3data[\"all_counts\"]\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", - "counts_Brancol3 = Brancol3data['all_counts']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", + "brancol3_elog = Brancol3data[\"elogs\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", - "brancol3_elog = Brancol3data['elogs']\n", + "brancol1_bv_sets = Brancol1data[\"bv_sets\"]\n", + "brancol2_bv_sets = Brancol2data[\"bv_sets\"]\n", + "brancol3_bv_sets = Brancol3data[\"bv_sets\"]\n", "\n", - "brancol1_bv_sets = Brancol1data['bv_sets']\n", - "brancol2_bv_sets = Brancol2data['bv_sets']\n", - "brancol3_bv_sets = Brancol3data['bv_sets']\n", + "brancol1_bv_set_counts = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_set_counts = Brancol2data[\"bv_set_counts\"]\n", + "brancol3_bv_set_counts = Brancol3data[\"bv_set_counts\"]\n", "\n", - "brancol1_bv_set_counts = Brancol1data['bv_set_counts']\n", - "brancol2_bv_set_counts = Brancol2data['bv_set_counts']\n", - "brancol3_bv_set_counts = Brancol3data['bv_set_counts']\n", + "aiCounts_Brancol1 = Brancol1data[\"ai_sets\"]\n", + "aiCounts_Brancol2 = Brancol2data[\"ai_sets\"]\n", + "aiCounts_Brancol3 = Brancol3data[\"ai_sets\"]\n", "\n", - "aiCounts_Brancol1 = Brancol1data['ai_sets']\n", - "aiCounts_Brancol2 = Brancol2data['ai_sets']\n", - "aiCounts_Brancol3 = Brancol3data['ai_sets']\n", - "\n", - "brancol1trip = Brancol1data['trip_info']\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol3trip = Brancol3data['trip_info']" + "brancol1trip = Brancol1data[\"trip_info\"]\n", + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol3trip = Brancol3data[\"trip_info\"]" ] }, { @@ -144,22 +129,20 @@ "metadata": {}, "outputs": [], "source": [ + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", "\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", - "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", - "\n", - "stpatrick1_bv_set_counts = StPatrick1data['bv_set_counts']\n", - "stpatrick2_bv_set_counts = StPatrick2data['bv_set_counts']\n", + "stpatrick1_bv_set_counts = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick2_bv_set_counts = StPatrick2data[\"bv_set_counts\"]\n", "\n", - "aiCounts_StPatrick1 = StPatrick1data['ai_sets']\n", - "aiCounts_StPatrick2 = StPatrick2data['ai_sets']\n", + "aiCounts_StPatrick1 = StPatrick1data[\"ai_sets\"]\n", + "aiCounts_StPatrick2 = StPatrick2data[\"ai_sets\"]\n", "\n", - "stpatrick2trip = StPatrick2data['trip_info']\n", - "stpatrick1trip = StPatrick1data['trip_info']" + "stpatrick2trip = StPatrick2data[\"trip_info\"]\n", + "stpatrick1trip = StPatrick1data[\"trip_info\"]" ] }, { @@ -179,13 +162,12 @@ "vectorData_StPatrick2 = {}\n", "\n", "for vector in vectors:\n", - " \n", - " vectorData_Brancol1[f'vector_{vector}'] = get_vector_data('brancol',vector, brancol1trip)\n", - " vectorData_Brancol2[f'vector_{vector}'] = get_vector_data('brancol',vector, brancol2trip)\n", - " vectorData_Brancol3[f'vector_{vector}'] = get_vector_data('brancol',vector, brancol3trip)\n", - " \n", - " vectorData_StPatrick1[f'vector_{vector}'] = get_vector_data('stpatrick',vector, stpatrick1trip)\n", - " vectorData_StPatrick2[f'vector_{vector}'] = get_vector_data('stpatrick',vector, stpatrick2trip)" + " vectorData_Brancol1[f\"vector_{vector}\"] = get_vector_data(\"brancol\", vector, brancol1trip)\n", + " vectorData_Brancol2[f\"vector_{vector}\"] = get_vector_data(\"brancol\", vector, brancol2trip)\n", + " vectorData_Brancol3[f\"vector_{vector}\"] = get_vector_data(\"brancol\", vector, brancol3trip)\n", + "\n", + " vectorData_StPatrick1[f\"vector_{vector}\"] = get_vector_data(\"stpatrick\", vector, stpatrick1trip)\n", + " vectorData_StPatrick2[f\"vector_{vector}\"] = get_vector_data(\"stpatrick\", vector, stpatrick2trip)" ] }, { @@ -195,20 +177,20 @@ "metadata": {}, "outputs": [], "source": [ - "vector7_brancol1 = vectorData_Brancol1['vector_7']\n", - "vector7_brancol1['datetime'] = pd.to_datetime(vector7_brancol1['datetime'])\n", + "vector7_brancol1 = vectorData_Brancol1[\"vector_7\"]\n", + "vector7_brancol1[\"datetime\"] = pd.to_datetime(vector7_brancol1[\"datetime\"])\n", "\n", - "vector7_brancol2 = vectorData_Brancol2['vector_7']\n", - "vector7_brancol2['datetime'] = pd.to_datetime(vector7_brancol2['datetime'])\n", + "vector7_brancol2 = vectorData_Brancol2[\"vector_7\"]\n", + "vector7_brancol2[\"datetime\"] = pd.to_datetime(vector7_brancol2[\"datetime\"])\n", "\n", - "vector7_brancol3 = vectorData_Brancol3['vector_7']\n", - "vector7_brancol3['datetime'] = pd.to_datetime(vector7_brancol3['datetime'])\n", + "vector7_brancol3 = vectorData_Brancol3[\"vector_7\"]\n", + "vector7_brancol3[\"datetime\"] = pd.to_datetime(vector7_brancol3[\"datetime\"])\n", "\n", - "vector7_stpatrick1 = vectorData_StPatrick1['vector_7']\n", - "vector7_stpatrick1['datetime'] = pd.to_datetime(vector7_stpatrick1['datetime'])\n", + "vector7_stpatrick1 = vectorData_StPatrick1[\"vector_7\"]\n", + "vector7_stpatrick1[\"datetime\"] = pd.to_datetime(vector7_stpatrick1[\"datetime\"])\n", "\n", - "vector7_stpatrick2 = vectorData_StPatrick2['vector_7']\n", - "vector7_stpatrick2['datetime'] = pd.to_datetime(vector7_stpatrick2['datetime'])" + "vector7_stpatrick2 = vectorData_StPatrick2[\"vector_7\"]\n", + "vector7_stpatrick2[\"datetime\"] = pd.to_datetime(vector7_stpatrick2[\"datetime\"])" ] }, { @@ -237,13 +219,12 @@ "outputs": [], "source": [ "def get_ai_counts_elog(ai_counts, elog_hauls):\n", - " conn = sqlite3.connect(':memory:')\n", - " \n", + " conn = sqlite3.connect(\":memory:\")\n", + "\n", + " # write the tables to add set_number\n", + " ai_counts.to_sql(\"ai_counts\", conn, index=False)\n", + " elog_hauls.to_sql(\"hauls\", conn, index=False)\n", "\n", - " #write the tables to add set_number\n", - " ai_counts.to_sql('ai_counts', conn, index=False)\n", - " elog_hauls.to_sql('hauls', conn, index=False)\n", - " \n", " query = \"\"\"\n", " select\n", " distinct\n", @@ -252,17 +233,15 @@ " hauls.id is not null as elog_haul\n", " from ai_counts\n", " left join hauls on ai_counts.utc_start_datetime between hauls.systemstarthauldatetime and hauls.systemendhauldatetime and hauls.id!= '140'\n", - " \n", + "\n", " \"\"\"\n", " ai_counts_elog = pd.read_sql_query(query, conn)\n", "\n", - " ai_counts_elog['utc_end_datetime'] = pd.to_datetime(ai_counts_elog['utc_end_datetime'])\n", - " ai_counts_elog['utc_start_datetime'] = pd.to_datetime(ai_counts_elog['utc_start_datetime'])\n", + " ai_counts_elog[\"utc_end_datetime\"] = pd.to_datetime(ai_counts_elog[\"utc_end_datetime\"])\n", + " ai_counts_elog[\"utc_start_datetime\"] = pd.to_datetime(ai_counts_elog[\"utc_start_datetime\"])\n", "\n", - " elog_hauls['network_delay'] =elog_hauls['datetime']-elog_hauls['systemendhauldatetime']\n", - " elog_hauls['large_delay'] =elog_hauls['network_delay']> pd.Timedelta('4 hours')\n", - "\n", - " \n", + " elog_hauls[\"network_delay\"] = elog_hauls[\"datetime\"] - elog_hauls[\"systemendhauldatetime\"]\n", + " elog_hauls[\"large_delay\"] = elog_hauls[\"network_delay\"] > pd.Timedelta(\"4 hours\")\n", "\n", " return ai_counts_elog, elog_hauls" ] @@ -275,25 +254,28 @@ "outputs": [], "source": [ "def identify_excluded_elogs(elog_hauls, df_vector, ai_counts_elog):\n", - " elog_hauls.sort_values(by = 'systemendhauldatetime', inplace = True)\n", - " df_vector['window_start'] = df_vector['datetime']-pd.Timedelta('12 hours')\n", + " elog_hauls.sort_values(by=\"systemendhauldatetime\", inplace=True)\n", + " df_vector[\"window_start\"] = df_vector[\"datetime\"] - pd.Timedelta(\"12 hours\")\n", "\n", " elog_vectors = {}\n", " for idx, haul in elog_hauls.iterrows():\n", " considered_vectors = []\n", - " received = haul['datetime']\n", - " haul_end = haul['systemendhauldatetime']\n", - " df_relevant_vectors = df_vector.loc[(haul_end df_vector['window_start'])]\n", + " received = haul[\"datetime\"]\n", + " haul_end = haul[\"systemendhauldatetime\"]\n", + " df_relevant_vectors = df_vector.loc[\n", + " (haul_end < df_vector[\"datetime\"]) & (haul_end > df_vector[\"window_start\"])\n", + " ]\n", " for idx, vector in df_relevant_vectors.iterrows():\n", - " vector_time = vector['datetime']\n", + " vector_time = vector[\"datetime\"]\n", " if received < vector_time:\n", - " considered_vectors.append(vector['id'])\n", - " elog_vectors[haul['id']] = considered_vectors\n", - " \n", - " \n", - " excluded_elogs = [key for key, value in elog_vectors.items() if len(value)==0]\n", - " \n", - " ai_counts_elog['unused_elog'] = ai_counts_elog['elog_id'].apply(lambda x: True if x in excluded_elogs else False)\n", + " considered_vectors.append(vector[\"id\"])\n", + " elog_vectors[haul[\"id\"]] = considered_vectors\n", + "\n", + " excluded_elogs = [key for key, value in elog_vectors.items() if len(value) == 0]\n", + "\n", + " ai_counts_elog[\"unused_elog\"] = ai_counts_elog[\"elog_id\"].apply(\n", + " lambda x: True if x in excluded_elogs else False\n", + " )\n", "\n", " return ai_counts_elog, elog_vectors" ] @@ -313,43 +295,43 @@ "metadata": {}, "outputs": [], "source": [ - "def annotate_notes(ax, df, text_col = 'network_delay', text_xy = (4, 4), var = 4):\n", - " arrowprops=dict(arrowstyle=\"->\",connectionstyle=\"arc3,rad=0\", color = 'black')\n", + "def annotate_notes(ax, df, text_col=\"network_delay\", text_xy=(4, 4), var=4):\n", + " arrowprops = dict(arrowstyle=\"->\", connectionstyle=\"arc3,rad=0\", color=\"black\")\n", "\n", " annots = []\n", - " bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + " bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "\n", " text_x, text_y = text_xy\n", "\n", - "\n", - " for idx, row in df.loc[df['large_delay']].iterrows():\n", + " for idx, row in df.loc[df[\"large_delay\"]].iterrows():\n", " # y_var = x_vars[n]\n", " components = row[text_col].components\n", " if components.days > 0:\n", - " days = f'{components.days}d '\n", + " days = f\"{components.days}d \"\n", " else:\n", - " days = ''\n", - " \n", - " text = f'{days}{components.hours}h delay'\n", - " data_xy = (row['systemendhauldatetime'], 1)\n", + " days = \"\"\n", + "\n", + " text = f\"{days}{components.hours}h delay\"\n", + " data_xy = (row[\"systemendhauldatetime\"], 1)\n", "\n", - " \n", " an = ax.annotate(\n", " text,\n", - " xy=data_xy, xycoords='data',\n", - " xytext=(text_x, text_y), textcoords='offset points',\n", + " xy=data_xy,\n", + " xycoords=\"data\",\n", + " xytext=(text_x, text_y),\n", + " textcoords=\"offset points\",\n", " arrowprops=arrowprops,\n", - " size = 9,\n", - " horizontalalignment='left',\n", - " verticalalignment='bottom',\n", + " size=9,\n", + " horizontalalignment=\"left\",\n", + " verticalalignment=\"bottom\",\n", " # bbox = bbox_args,\n", - " color = 'black'\n", + " color=\"black\",\n", " )\n", - " \n", + "\n", " annots.append(an)\n", - " \n", - " text_y+=var\n", - " var = var*-1\n", + "\n", + " text_y += var\n", + " var = var * -1\n", "\n", " return annots" ] @@ -372,106 +354,147 @@ } ], "source": [ - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 9)\n", - "\n", - "def vector_score_plot(elog_hauls, ai_counts_elog, catch_col, df_vector, figsize, hpad, savefig = None):\n", - " bv_color = '#a2c662'\n", - " ai_color = '#184EAD'\n", - " elog_color = '#117347'\n", - " vector_color = 'red'\n", - " \n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=9)\n", + "\n", + "\n", + "def vector_score_plot(\n", + " elog_hauls, ai_counts_elog, catch_col, df_vector, figsize, hpad, savefig=None\n", + "):\n", + " bv_color = \"#a2c662\"\n", + " ai_color = \"#184EAD\"\n", + " elog_color = \"#117347\"\n", + " vector_color = \"red\"\n", + "\n", " # predictions_color = '#43aa99'\n", - " \n", - " \n", - " \n", - " fig, axes = plt.subplots(3, 1, figsize = figsize, sharex = True)\n", + "\n", + " fig, axes = plt.subplots(3, 1, figsize=figsize, sharex=True)\n", " plt.tight_layout()\n", - " plt.subplots_adjust(hspace = hpad)\n", - " \n", + " plt.subplots_adjust(hspace=hpad)\n", + "\n", " ax = axes[0]\n", " ax2 = axes[1]\n", " ax3 = axes[2]\n", " # twin1 = ax.twinx()\n", " # twin2 = ax.twinx()\n", - " \n", + "\n", " # Offset the right spine of twin2. The ticks and label have already been\n", " # placed on the right by twinx above.\n", " # twin2.spines.right.set_position((\"axes\", 1.1))\n", " # twin2.set_ylim(-0.1,2)\n", " # twin2.set_yticks([0,1])\n", - " \n", - " annots = annotate_notes(ax ,elog_hauls, text_xy = (10, 10), var = 10 )\n", - " \n", - " # TOP ax = Elog plot \n", - " sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[ai_counts_elog['unused_elog']==False], ax = ax, label = 'Elog Included', color =elog_color, lw = 1, )\n", - " sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[(ai_counts_elog['unused_elog']) | (ai_counts_elog['elog_haul']==0)], ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", + "\n", + " annots = annotate_notes(ax, elog_hauls, text_xy=(10, 10), var=10)\n", + "\n", + " # TOP ax = Elog plot\n", + " sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"elog_haul\",\n", + " data=ai_counts_elog.loc[ai_counts_elog[\"unused_elog\"] == False],\n", + " ax=ax,\n", + " label=\"Elog Included\",\n", + " color=elog_color,\n", + " lw=1,\n", + " )\n", + " sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"elog_haul\",\n", + " data=ai_counts_elog.loc[\n", + " (ai_counts_elog[\"unused_elog\"]) | (ai_counts_elog[\"elog_haul\"] == 0)\n", + " ],\n", + " ax=ax,\n", + " label=\"Elog Excluded\",\n", + " linestyle=\"--\",\n", + " color=elog_color,\n", + " lw=1,\n", + " )\n", " # sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog, ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", - " ax.set_ylabel('Elog Haul')\n", + " ax.set_ylabel(\"Elog Haul\")\n", " ax.set_ylim(-0.1, 2.5)\n", - " ax.set_yticks([0,1])\n", - " \n", - " \n", + " ax.set_yticks([0, 1])\n", + "\n", " # MIDDLE ax2 = AI catches\n", - " sns.lineplot(x = 'utc_start_datetime', y = catch_col, data = ai_counts_elog, ax = ax2, label = 'AI Counts', color =ai_color, lw = 1)\n", - " ax2.set_ylabel('AI Count')\n", - " \n", + " sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=catch_col,\n", + " data=ai_counts_elog,\n", + " ax=ax2,\n", + " label=\"AI Counts\",\n", + " color=ai_color,\n", + " lw=1,\n", + " )\n", + " ax2.set_ylabel(\"AI Count\")\n", + "\n", " # BOTTOM ax3 = vector schore\n", " # sns.lineplot(x = 'datetime', y= 'score', data = df_vector, ax = ax3, label = 'vector_score', color = vector_color, marker = 'o', markersize = 5, lw = 1)\n", - " ax3.set_ylabel('Vector Score')\n", - "\n", - " df_vector_nulls = df_vector.loc[df_vector['score'].isna()]\n", - " \n", - " markerline, stemlines, baseline = ax3.stem(df_vector['datetime'], df_vector['score'], linefmt = vector_color, label = 'Vector Score')\n", - " stemlines.set_linewidths(.5)\n", - " baseline.set_linewidth(.2)\n", - " markerline.set_markersize(.3)\n", - "\n", - " markerline2, stemlines2, baseline2 = ax3.stem(df_vector_nulls['datetime'], [.2]*len(df_vector_nulls) , linefmt = 'grey', label = 'Null Score')\n", - " stemlines2.set_linewidths(.5)\n", - " baseline2.set_linewidth(.2)\n", + " ax3.set_ylabel(\"Vector Score\")\n", + "\n", + " df_vector_nulls = df_vector.loc[df_vector[\"score\"].isna()]\n", + "\n", + " markerline, stemlines, baseline = ax3.stem(\n", + " df_vector[\"datetime\"], df_vector[\"score\"], linefmt=vector_color, label=\"Vector Score\"\n", + " )\n", + " stemlines.set_linewidths(0.5)\n", + " baseline.set_linewidth(0.2)\n", + " markerline.set_markersize(0.3)\n", + "\n", + " markerline2, stemlines2, baseline2 = ax3.stem(\n", + " df_vector_nulls[\"datetime\"],\n", + " [0.2] * len(df_vector_nulls),\n", + " linefmt=\"grey\",\n", + " label=\"Null Score\",\n", + " )\n", + " stemlines2.set_linewidths(0.5)\n", + " baseline2.set_linewidth(0.2)\n", " markerline2.set_markersize(0)\n", - " \n", + "\n", " # ax3.vlines(df_vector_nulls['datetime'], ymin=-.01, ymax=.2, color = 'grey', linewidth = .3)\n", - " ax3.set_ylabel('Vector Score')\n", - " \n", - " \n", + " ax3.set_ylabel(\"Vector Score\")\n", + "\n", " # formatting x axis dates\n", - " locator = DayLocator(interval = 2)\n", + " locator = DayLocator(interval=2)\n", " # locator = AutoDateLocator(minticks = 14)\n", " formatter = mdates.ConciseDateFormatter(locator)\n", " ax3.xaxis.set_major_locator(locator)\n", " ax3.xaxis.set_major_formatter(formatter)\n", - " ax3.set_xlabel('Datetime (UTC)')\n", - " \n", - " \n", - " \n", + " ax3.set_xlabel(\"Datetime (UTC)\")\n", + "\n", " # creating legend\n", " h1, l1 = ax.get_legend_handles_labels()\n", " h2, l2 = ax2.get_legend_handles_labels()\n", " h3, l3 = ax3.get_legend_handles_labels()\n", - " # line = Line2D([0], [0], color='grey', marker='|', \n", + " # line = Line2D([0], [0], color='grey', marker='|',\n", " # markersize=10, markeredgewidth=1.5, label='Vertical line')\n", "\n", " # h3.extend([line])\n", - " \n", + "\n", " # ax.legend(h1+h2+ h3 , l1+l2+ l3, fontsize = 9,loc='upper center', bbox_to_anchor=(1.15, .5), frameon = False)\n", - " ax.legend(frameon = False, fontsize = 9)\n", - " ax2.legend(frameon = False, fontsize = 9, loc = 'upper left')\n", - " ax3.legend(frameon = False, fontsize = 9)\n", + " ax.legend(frameon=False, fontsize=9)\n", + " ax2.legend(frameon=False, fontsize=9, loc=\"upper left\")\n", + " ax3.legend(frameon=False, fontsize=9)\n", " # ax3.get_legend().remove()\n", " # ax2.get_legend().remove()\n", - " \n", - " sns.despine(trim=True, \n", - " right = False,\n", - " # left=True\n", - " )\n", + "\n", + " sns.despine(\n", + " trim=True,\n", + " right=False,\n", + " # left=True\n", + " )\n", " if savefig:\n", - " plt.savefig(savefig,bbox_inches='tight', dpi = 150)\n", + " plt.savefig(savefig, bbox_inches=\"tight\", dpi=150)\n", "\n", - "prep_and_plot(ai_counts = aiCounts_StPatrick2,elog_hauls = stpatrick2_elog, df_vector = vector7_stpatrick2.copy(), count_col = \"overallcatches\", figsize = (7, 3),hpad = 0, savefig = 'vector7_stpatrick2.png', )" + "\n", + "prep_and_plot(\n", + " ai_counts=aiCounts_StPatrick2,\n", + " elog_hauls=stpatrick2_elog,\n", + " df_vector=vector7_stpatrick2.copy(),\n", + " count_col=\"overallcatches\",\n", + " figsize=(7, 3),\n", + " hpad=0,\n", + " savefig=\"vector7_stpatrick2.png\",\n", + ")" ] }, { @@ -492,7 +515,7 @@ } ], "source": [ - "1040/150" + "1040 / 150" ] }, { @@ -513,7 +536,7 @@ } ], "source": [ - "446/150" + "446 / 150" ] }, { @@ -531,10 +554,20 @@ "metadata": {}, "outputs": [], "source": [ - "def prep_and_plot(ai_counts,elog_hauls, df_vector, count_col, figsize = (7, 3), hpad = .5, savefig = None):\n", - " ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts,elog_hauls)\n", + "def prep_and_plot(\n", + " ai_counts, elog_hauls, df_vector, count_col, figsize=(7, 3), hpad=0.5, savefig=None\n", + "):\n", + " ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts, elog_hauls)\n", " ai_counts_elog, elog_vectors = identify_excluded_elogs(elog_hauls, df_vector, ai_counts_elog)\n", - " vector_score_plot(elog_hauls, ai_counts_elog, count_col, df_vector, figsize = figsize, hpad = hpad, savefig = savefig)" + " vector_score_plot(\n", + " elog_hauls,\n", + " ai_counts_elog,\n", + " count_col,\n", + " df_vector,\n", + " figsize=figsize,\n", + " hpad=hpad,\n", + " savefig=savefig,\n", + " )" ] }, { @@ -563,7 +596,15 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_StPatrick2,elog_hauls = stpatrick2_elog, df_vector = vector7_stpatrick2.copy(), count_col = \"overallcatches\", figsize = (7, 3),hpad = 0, savefig = 'vector7_stpatrick2.png', )" + "prep_and_plot(\n", + " ai_counts=aiCounts_StPatrick2,\n", + " elog_hauls=stpatrick2_elog,\n", + " df_vector=vector7_stpatrick2.copy(),\n", + " count_col=\"overallcatches\",\n", + " figsize=(7, 3),\n", + " hpad=0,\n", + " savefig=\"vector7_stpatrick2.png\",\n", + ")" ] }, { @@ -584,7 +625,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_StPatrick1,elog_hauls = stpatrick1_elog, df_vector = vector7_stpatrick1.copy(), count_col = \"overallcatches\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_StPatrick1,\n", + " elog_hauls=stpatrick1_elog,\n", + " df_vector=vector7_stpatrick1.copy(),\n", + " count_col=\"overallcatches\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -605,7 +652,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_Brancol1.copy(),elog_hauls = brancol1_elog.copy(), df_vector = vector7_brancol1.copy(), count_col = \"count\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_Brancol1.copy(),\n", + " elog_hauls=brancol1_elog.copy(),\n", + " df_vector=vector7_brancol1.copy(),\n", + " count_col=\"count\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -634,7 +687,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_Brancol2,elog_hauls = brancol2_elog, df_vector = vector7_brancol2.copy(), count_col = \"count\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_Brancol2,\n", + " elog_hauls=brancol2_elog,\n", + " df_vector=vector7_brancol2.copy(),\n", + " count_col=\"count\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -655,7 +714,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_Brancol3,elog_hauls = brancol3_elog, df_vector = vector7_brancol3.copy(), count_col = \"count\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_Brancol3,\n", + " elog_hauls=brancol3_elog,\n", + " df_vector=vector7_brancol3.copy(),\n", + " count_col=\"count\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -678,7 +743,7 @@ } ], "source": [ - "df_vector_nulls['detail'].value_counts()" + "df_vector_nulls[\"detail\"].value_counts()" ] }, { @@ -691,7 +756,7 @@ "ai_counts = aiCounts_Brancol2.copy()\n", "elog_hauls = brancol2_elog.copy()\n", "df_vector = vector7_brancol2.copy()\n", - "ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts,elog_hauls)\n", + "ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts, elog_hauls)\n", "ai_counts_elog, elog_vectors = identify_excluded_elogs(elog_hauls, df_vector, ai_counts_elog)" ] }, @@ -850,7 +915,7 @@ } ], "source": [ - "elog_hauls.set_index('id').drop(index = '140').head()" + "elog_hauls.set_index(\"id\").drop(index=\"140\").head()" ] }, { @@ -860,7 +925,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_vector.sort_values('datetime', inplace = True)" + "df_vector.sort_values(\"datetime\", inplace=True)" ] }, { @@ -1206,7 +1271,7 @@ } ], "source": [ - "df_vector.set_index('datetime').loc['2024-02-07':'2024-02-08']" + "df_vector.set_index(\"datetime\").loc[\"2024-02-07\":\"2024-02-08\"]" ] }, { @@ -1237,16 +1302,15 @@ } ], "source": [ - "bv_color = '#a2c662'\n", - "ai_color = '#184EAD'\n", - "elog_color = '#117347'\n", - "vector_color = 'red'\n", + "bv_color = \"#a2c662\"\n", + "ai_color = \"#184EAD\"\n", + "elog_color = \"#117347\"\n", + "vector_color = \"red\"\n", "\n", "# predictions_color = '#43aa99'\n", "\n", "\n", - "\n", - "fig, axes = plt.subplots(3, 1, figsize = (7,3), sharex = True)\n", + "fig, axes = plt.subplots(3, 1, figsize=(7, 3), sharex=True)\n", "\n", "ax = axes[0]\n", "ax2 = axes[1]\n", @@ -1260,9 +1324,9 @@ "# twin2.set_ylim(-0.1,2)\n", "# twin2.set_yticks([0,1])\n", "\n", - "annots = annotate_notes(ax ,elog_hauls, text_xy = (-40, 7), var = 10 )\n", + "annots = annotate_notes(ax, elog_hauls, text_xy=(-40, 7), var=10)\n", "\n", - "# # TOP ax = Elog plot \n", + "# # TOP ax = Elog plot\n", "# sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[ai_counts_elog['unused_elog']==False], ax = ax, label = 'Elog Included', color =elog_color, lw = 1, )\n", "# sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[(ai_counts_elog['unused_elog']) | (ai_counts_elog['elog_haul']==0)], ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", "# # sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog, ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", @@ -1277,12 +1341,21 @@ "\n", "# BOTTOM ax3 = vector schore\n", "# sns.lineplot(x = 'datetime', y= 'score', data = df_vector, ax = ax3, label = 'vector_score', color = vector_color, marker = 'o', markersize = 5, lw = 1)\n", - "ax3.vlines(df_vector_nulls['datetime'], ymin=-.01, ymax=.2, color = 'grey', linewidth = .3, label = 'Null Score')\n", - "markerline, stemlines, baseline = ax3.stem(df_vector['datetime'], df_vector['score'], linefmt = vector_color, label = 'Vector Score')\n", - "stemlines.set_linewidths(.5)\n", - "baseline.set_linewidth(.2)\n", - "markerline.set_markersize(.3)\n", - "ax3.set_ylabel('Vector Score')" + "ax3.vlines(\n", + " df_vector_nulls[\"datetime\"],\n", + " ymin=-0.01,\n", + " ymax=0.2,\n", + " color=\"grey\",\n", + " linewidth=0.3,\n", + " label=\"Null Score\",\n", + ")\n", + "markerline, stemlines, baseline = ax3.stem(\n", + " df_vector[\"datetime\"], df_vector[\"score\"], linefmt=vector_color, label=\"Vector Score\"\n", + ")\n", + "stemlines.set_linewidths(0.5)\n", + "baseline.set_linewidth(0.2)\n", + "markerline.set_markersize(0.3)\n", + "ax3.set_ylabel(\"Vector Score\")" ] }, { @@ -1292,7 +1365,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_vector_nulls = df_vector.loc[df_vector['score'].isna()]" + "df_vector_nulls = df_vector.loc[df_vector[\"score\"].isna()]" ] }, { @@ -1324,7 +1397,7 @@ } ], "source": [ - "df_vector['score'].isna()" + "df_vector[\"score\"].isna()" ] } ], diff --git a/notebooks/edge_integration_charts.ipynb b/notebooks/edge_integration_charts.ipynb index 5d212cf..f2b9eb6 100644 --- a/notebooks/edge_integration_charts.ipynb +++ b/notebooks/edge_integration_charts.ipynb @@ -27,35 +27,24 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", + "import warnings\n", "\n", - "import awswrangler as wr\n", + "import matplotlib.pyplot as plt\n", "import pandas as pd\n", - "import sqlite3\n", "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import scipy.stats as stats\n", - "from sklearn.metrics import mean_squared_error\n", "\n", - "from sklearn.linear_model import LinearRegression\n", - "\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", - "import matplotlib.gridspec as gridspec\n", "\n", - "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", "\n", - "from tsai.all import *\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", - "from pywaffle import Waffle" + "from pywaffle import Waffle\n", + "from tsai.all import *" ] }, { @@ -66,7 +55,7 @@ "outputs": [], "source": [ "# read in ai & video status csv\n", - "df = pd.read_pickle('../data/integration_state_evaluations_videocopy_ai.pickle')" + "df = pd.read_pickle(\"../data/integration_state_evaluations_videocopy_ai.pickle\")" ] }, { @@ -270,20 +259,84 @@ "metadata": {}, "outputs": [], "source": [ - "report_colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])\n", + "report_colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")\n", "\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", "\n", "%matplotlib inline\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", + "\n", + "\n", "def show_color_pallete(pallete):\n", - "# fig, ax = plt.subplots()\n", - " \n", + " # fig, ax = plt.subplots()\n", + "\n", " sns.palplot(pallete, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(pallete):\n", - " label = f'[{i}]'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}]\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -325,30 +378,44 @@ } ], "source": [ - "#create a dictionary of statuses and values\n", - "video_cols = ['videocopy_no_video', 'videocopy_found_unable_to_copy','videocopy_late', 'videocopy_ok']\n", - "ai_cols = ['ai_vidok_but_did_not_try','ai_crash_no_output', 'ai_outputed_but_cant_parse','ai_crash_output_too_fast', 'ai_ok']\n", + "# create a dictionary of statuses and values\n", + "video_cols = [\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + "]\n", + "ai_cols = [\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]\n", "video_status = {}\n", "ai_status = {}\n", "for video_col in video_cols:\n", " value_counts = df[video_col].value_counts().to_dict()\n", " if True in value_counts.keys():\n", - " video_status[video_col] = {'value': value_counts[True]}\n", + " video_status[video_col] = {\"value\": value_counts[True]}\n", "\n", "\n", "for ai_col in ai_cols:\n", " value_counts = df[ai_col].value_counts().to_dict()\n", " # print(value_counts)\n", " if True in value_counts.keys():\n", - " ai_status[ai_col] = {'value': value_counts[True]}\n", + " ai_status[ai_col] = {\"value\": value_counts[True]}\n", "\n", "\n", - "ai_status['no_video'] = {'value': video_status['videocopy_no_video']['value']+ video_status['videocopy_found_unable_to_copy']['value']}\n", + "ai_status[\"no_video\"] = {\n", + " \"value\": video_status[\"videocopy_no_video\"][\"value\"]\n", + " + video_status[\"videocopy_found_unable_to_copy\"][\"value\"]\n", + "}\n", "\n", "print(video_status)\n", "print(ai_status)\n", "# print(statusDF['status'].value_counts().to_dict())\n", - "print(' ')\n" + "print(\" \")\n" ] }, { @@ -360,49 +427,22 @@ "source": [ "# dictionaries of column name and desired display label and color for ai status\n", "ai_dict = {\n", - " 'no_video': {\n", - " 'label': 'No Video',\n", - " 'color':'#949494' \n", - " },\n", - " 'ai_vidok_but_did_not_try': {\n", - " 'label': 'Video ok but did not try',\n", - " 'color':report_colors[1]\n", - " } ,\n", - " 'ai_crash_no_output': {\n", - " 'label': 'Crash, no output',\n", - " 'color': report_colors[10]\n", - " }, \n", - " 'ai_outputed_but_cant_parse': {\n", - " 'label': 'Outputted, cannot parse',\n", - " 'color':report_colors[13]\n", - " },\n", - " 'ai_crash_output_too_fast': {\n", - " 'label': 'Empty Output',\n", - " 'color':report_colors[6]\n", - " },\n", - " 'ai_ok': {\n", - " 'label': 'Ok',\n", - " 'color': report_colors[15]\n", - " }\n", + " \"no_video\": {\"label\": \"No Video\", \"color\": \"#949494\"},\n", + " \"ai_vidok_but_did_not_try\": {\"label\": \"Video ok but did not try\", \"color\": report_colors[1]},\n", + " \"ai_crash_no_output\": {\"label\": \"Crash, no output\", \"color\": report_colors[10]},\n", + " \"ai_outputed_but_cant_parse\": {\"label\": \"Outputted, cannot parse\", \"color\": report_colors[13]},\n", + " \"ai_crash_output_too_fast\": {\"label\": \"Empty Output\", \"color\": report_colors[6]},\n", + " \"ai_ok\": {\"label\": \"Ok\", \"color\": report_colors[15]},\n", "}\n", "\n", "video_dict = {\n", - " 'videocopy_no_video': {\n", - " 'label': 'No Video',\n", - " 'color': '#949494'\n", - " },\n", - " 'videocopy_found_unable_to_copy': {\n", - " 'label': 'Video found, unable to copy',\n", - " 'color': report_colors[10]\n", - " } ,\n", - " 'videocopy_late': {\n", - " 'label': 'Late',\n", - " 'color':report_colors[13]\n", - " }, \n", - " 'videocopy_ok': {\n", - " 'label': 'Ok',\n", - " 'color': report_colors[15]\n", + " \"videocopy_no_video\": {\"label\": \"No Video\", \"color\": \"#949494\"},\n", + " \"videocopy_found_unable_to_copy\": {\n", + " \"label\": \"Video found, unable to copy\",\n", + " \"color\": report_colors[10],\n", " },\n", + " \"videocopy_late\": {\"label\": \"Late\", \"color\": report_colors[13]},\n", + " \"videocopy_ok\": {\"label\": \"Ok\", \"color\": report_colors[15]},\n", "}" ] }, @@ -458,26 +498,22 @@ "outputs": [], "source": [ "gps_status = {\n", - " 'integration_errors': {\n", - " 'value':int_errs,\n", - " 'label': 'Integration error',\n", - " 'color': report_colors[10]\n", + " \"integration_errors\": {\n", + " \"value\": int_errs,\n", + " \"label\": \"Integration error\",\n", + " \"color\": report_colors[10],\n", + " },\n", + " \"int_errs_pwr\": {\n", + " \"value\": int_errs_pwr,\n", + " \"label\": \"Integration error at power boundary\",\n", + " \"color\": report_colors[13],\n", " },\n", - " 'int_errs_pwr': {\n", - " 'value':int_errs_pwr,\n", - " 'label': 'Integration error at power boundary',\n", - " 'color': report_colors[13]\n", - " } ,\n", - " 'out_of_disk_space': {\n", - " 'value':out_of_disk_space,\n", - " 'label': 'Out of disk space',\n", - " 'color': report_colors[5]\n", - " }, \n", - " 'gps_ok': {\n", - " 'value':ok,\n", - " 'label': 'Ok',\n", - " 'color': report_colors[15]\n", - " }\n", + " \"out_of_disk_space\": {\n", + " \"value\": out_of_disk_space,\n", + " \"label\": \"Out of disk space\",\n", + " \"color\": report_colors[5],\n", + " },\n", + " \"gps_ok\": {\"value\": ok, \"label\": \"Ok\", \"color\": report_colors[15]},\n", "}" ] }, @@ -507,9 +543,21 @@ } ], "source": [ - "video_col_order = ['videocopy_no_video', 'videocopy_found_unable_to_copy','videocopy_late', 'videocopy_ok']\n", - "ai_col_order = ['no_video', 'ai_vidok_but_did_not_try','ai_crash_no_output', 'ai_outputed_but_cant_parse','ai_crash_output_too_fast', 'ai_ok']\n", - "gps_order = ['integration_errors', 'int_errs_pwr','out_of_disk_space', 'gps_ok']\n", + "video_col_order = [\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + "]\n", + "ai_col_order = [\n", + " \"no_video\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]\n", + "gps_order = [\"integration_errors\", \"int_errs_pwr\", \"out_of_disk_space\", \"gps_ok\"]\n", "\n", "waffle_dict = {}\n", "\n", @@ -521,29 +569,33 @@ "plot_dict = {}\n", "\n", "\n", - "for col in video_col_order: \n", - " \n", - " value = video_status[col]['value']\n", - " color = video_status[col]['color']\n", - " label = video_status[col]['label'] \n", + "for col in video_col_order:\n", + " value = video_status[col][\"value\"]\n", + " color = video_status[col][\"color\"]\n", + " label = video_status[col][\"label\"]\n", " n = 1\n", "\n", - " percent = (value/len(df))*100\n", + " percent = (value / len(df)) * 100\n", " percent_formatted = f\"{percent:.1f}\\\\%\"\n", - " formatted_label = label + r' $\\bf{{{}}}$'.format(percent_formatted.replace(' ', r'\\;'))\n", - " \n", - " values.append(value/scale)\n", + " formatted_label = label + r\" $\\bf{{{}}}$\".format(percent_formatted.replace(\" \", r\"\\;\"))\n", + "\n", + " values.append(value / scale)\n", " labels.append(formatted_label)\n", " colors.append(color)\n", "\n", - "plot_dict['values'] = values\n", - "plot_dict['labels'] = labels\n", - "plot_dict['legend'] = {'loc': 'upper left', 'bbox_to_anchor': (1.05, 1.1), 'fontsize': 8, 'frameon':False}\n", - "plot_dict['title'] = {'label': \"Video Integration Status\", 'loc': 'left', 'fontsize': 9}\n", - "plot_dict['colors'] = colors\n", + "plot_dict[\"values\"] = values\n", + "plot_dict[\"labels\"] = labels\n", + "plot_dict[\"legend\"] = {\n", + " \"loc\": \"upper left\",\n", + " \"bbox_to_anchor\": (1.05, 1.1),\n", + " \"fontsize\": 8,\n", + " \"frameon\": False,\n", + "}\n", + "plot_dict[\"title\"] = {\"label\": \"Video Integration Status\", \"loc\": \"left\", \"fontsize\": 9}\n", + "plot_dict[\"colors\"] = colors\n", + "\n", + "waffle_dict[310 + n] = plot_dict\n", "\n", - "waffle_dict[310+n] = plot_dict\n", - " \n", "# ai status waffle\n", "values = []\n", "labels = []\n", @@ -551,27 +603,31 @@ "plot_dict = {}\n", "\n", "for col in ai_col_order:\n", - " \n", - " label = ai_status[col]['label']\n", - " color = ai_status[col]['color']\n", - " value = ai_status[col]['value']\n", + " label = ai_status[col][\"label\"]\n", + " color = ai_status[col][\"color\"]\n", + " value = ai_status[col][\"value\"]\n", " n = 2\n", - " \n", - " percent = (value/len(df))*100\n", + "\n", + " percent = (value / len(df)) * 100\n", " percent_formatted = f\"{percent:.1f}\\\\%\"\n", - " formatted_label = label + r' $\\bf{{{}}}$'.format(percent_formatted.replace(' ', r'\\;'))\n", - " \n", - " values.append(value/scale)\n", + " formatted_label = label + r\" $\\bf{{{}}}$\".format(percent_formatted.replace(\" \", r\"\\;\"))\n", + "\n", + " values.append(value / scale)\n", " labels.append(formatted_label)\n", " colors.append(color)\n", "\n", - "plot_dict['values'] = values\n", - "plot_dict['labels'] = labels\n", - "plot_dict['legend'] = {'loc': 'upper left', 'bbox_to_anchor': (1.05, 1.1), 'fontsize': 8, 'frameon':False}\n", - "plot_dict['title'] = {'label': \"AI Integration Status\", 'loc': 'left', 'fontsize': 9}\n", - "plot_dict['colors'] = colors\n", + "plot_dict[\"values\"] = values\n", + "plot_dict[\"labels\"] = labels\n", + "plot_dict[\"legend\"] = {\n", + " \"loc\": \"upper left\",\n", + " \"bbox_to_anchor\": (1.05, 1.1),\n", + " \"fontsize\": 8,\n", + " \"frameon\": False,\n", + "}\n", + "plot_dict[\"title\"] = {\"label\": \"AI Integration Status\", \"loc\": \"left\", \"fontsize\": 9}\n", + "plot_dict[\"colors\"] = colors\n", "\n", - "waffle_dict[310+n] = plot_dict\n", + "waffle_dict[310 + n] = plot_dict\n", "\n", "\n", "# gps status waffle\n", @@ -580,39 +636,43 @@ "values = []\n", "labels = []\n", "plot_dict = {}\n", - "colors =[]\n", + "colors = []\n", "\n", - "for col in gps_order: \n", - " value = gps_status[col]['value']\n", - " label = gps_status[col]['label']\n", - " color = gps_status[col]['color']\n", + "for col in gps_order:\n", + " value = gps_status[col][\"value\"]\n", + " label = gps_status[col][\"label\"]\n", + " color = gps_status[col][\"color\"]\n", " n = 1\n", "\n", - " percent = (value/gps_len)*100\n", + " percent = (value / gps_len) * 100\n", " percent_formatted = f\"{percent:.1f}\\\\%\"\n", - " formatted_label = label + r' $\\bf{{{}}}$'.format(percent_formatted.replace(' ', r'\\;'))\n", - " \n", - " values.append(value/scale)\n", + " formatted_label = label + r\" $\\bf{{{}}}$\".format(percent_formatted.replace(\" \", r\"\\;\"))\n", + "\n", + " values.append(value / scale)\n", " labels.append(formatted_label)\n", " colors.append(color)\n", "\n", - "plot_dict['values'] = values\n", - "plot_dict['labels'] = labels\n", - "plot_dict['legend'] = {'loc': 'upper left', 'bbox_to_anchor': (1.05, 1.1), 'fontsize': 8, 'frameon':False}\n", - "plot_dict['title'] = {'label': \"GPS Integration Status\", 'loc': 'left', 'fontsize': 9}\n", - "plot_dict['colors'] = colors\n", + "plot_dict[\"values\"] = values\n", + "plot_dict[\"labels\"] = labels\n", + "plot_dict[\"legend\"] = {\n", + " \"loc\": \"upper left\",\n", + " \"bbox_to_anchor\": (1.05, 1.1),\n", + " \"fontsize\": 8,\n", + " \"frameon\": False,\n", + "}\n", + "plot_dict[\"title\"] = {\"label\": \"GPS Integration Status\", \"loc\": \"left\", \"fontsize\": 9}\n", + "plot_dict[\"colors\"] = colors\n", "\n", "waffle_dict[313] = plot_dict\n", "\n", "\n", - "\n", "fig = plt.figure(\n", " FigureClass=Waffle,\n", " plots=waffle_dict,\n", - " rows=10, # Outside parameter\n", + " rows=10, # Outside parameter\n", " # cmap_name=\"\", # Change color with cmap\n", - " rounding_rule='ceil', # Change rounding rule, so value less than 1000 will still have at least 1 block\n", - " figsize=(8, 4.89)\n", + " rounding_rule=\"ceil\", # Change rounding rule, so value less than 1000 will still have at least 1 block\n", + " figsize=(8, 4.89),\n", ")\n", "# fig.supxlabel(f'1 block = {scale} instances, Each instance occurs at a 5 minute interval',\n", "# fontsize=10,\n", @@ -621,7 +681,7 @@ "# )\n", "# Display the chart\n", "\n", - "plt.savefig('ai_video_status.png', bbox_inches='tight')" + "plt.savefig(\"ai_video_status.png\", bbox_inches=\"tight\")" ] } ], diff --git a/notebooks/elog_analysis.ipynb b/notebooks/elog_analysis.ipynb index a3eabb3..679d14d 100644 --- a/notebooks/elog_analysis.ipynb +++ b/notebooks/elog_analysis.ipynb @@ -17,7 +17,6 @@ "metadata": {}, "outputs": [], "source": [ - " \n", "%autoreload 2" ] }, @@ -36,42 +35,33 @@ "metadata": {}, "outputs": [], "source": [ - "import awswrangler as wr\n", - "import pandas as pd\n", - "import sqlite3\n", - "import seaborn as sns\n", + "import warnings\n", + "\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", - "import scipy.stats as stats\n", - "from sklearn.metrics import mean_squared_error\n", - "\n", - "from sklearn.linear_model import LinearRegression\n", + "import pandas as pd\n", + "import seaborn as sns\n", + "from scipy import stats\n", "\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", "# import matplotlib.gridspec as gridspec\n", "\n", - "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", - "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "import json\n", + "\n", + "from matplotlib.dates import DayLocator\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", "# from tsai.all import *\n", - "from IPython.display import display, Markdown\n", - "import json\n", "# from pandas.io.json import json_normalize\n", "\n", - "from matplotlib.patches import Patch\n", - "\n", "import matplotlib.dates as mdates\n", - "\n", + "from helper_functions.aggregations import *\n", "from helper_functions.data_readers import *\n", - "\n", - "from helper_functions.aggregations import *" + "from matplotlib.patches import Patch" ] }, { @@ -101,7 +91,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "# import matplotlib.ticker as ticker" ] }, @@ -120,7 +109,29 @@ "metadata": {}, "outputs": [], "source": [ - "colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])" + "colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")" ] }, { @@ -141,14 +152,36 @@ "outputs": [], "source": [ "%matplotlib inline\n", + "\n", + "\n", "def show_color_pallete():\n", - "# fig, ax = plt.subplots()\n", - " color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + " # fig, ax = plt.subplots()\n", + " color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", " sns.palplot(color_list, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(color_list):\n", - " label = f'[{i}] {name}'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}] {name}\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -178,22 +211,22 @@ ], "source": [ "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)\n", - "StPatrick3data = get_data(boat = 'stpatrick', trip_no = 2)\n", + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)\n", + "StPatrick3data = get_data(boat=\"stpatrick\", trip_no=2)\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", - "counts_StPatrick3 = StPatrick3data['all_counts']\n", + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", + "counts_StPatrick3 = StPatrick3data[\"all_counts\"]\n", "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", - "stpatrick3_elog = StPatrick3data['elogs']\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", + "stpatrick3_elog = StPatrick3data[\"elogs\"]\n", "\n", "\n", - "stpatrick1_bv_sets = StPatrick1data['bv_set_counts']\n", - "stpatrick2_bv_sets = StPatrick2data['bv_set_counts']\n", - "stpatrick3_bv_sets = StPatrick3data['bv_set_counts']" + "stpatrick1_bv_sets = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick2_bv_sets = StPatrick2data[\"bv_set_counts\"]\n", + "stpatrick3_bv_sets = StPatrick3data[\"bv_set_counts\"]" ] }, { @@ -214,27 +247,27 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", - "Brancol3data = get_data(boat = 'brancol', trip_no = 2)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", + "Brancol3data = get_data(boat=\"brancol\", trip_no=2)\n", "\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", - "counts_Brancol3 = Brancol3data['all_counts']\n", + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", + "counts_Brancol3 = Brancol3data[\"all_counts\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", - "brancol3_elog = Brancol3data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", + "brancol3_elog = Brancol3data[\"elogs\"]\n", "\n", - "brancol1_bv_sets = Brancol1data['bv_set_counts']\n", - "brancol2_bv_sets = Brancol2data['bv_set_counts']\n", - "brancol3_bv_sets = Brancol3data['bv_set_counts']\n", + "brancol1_bv_sets = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_sets = Brancol2data[\"bv_set_counts\"]\n", + "brancol3_bv_sets = Brancol3data[\"bv_set_counts\"]\n", "\n", "\n", - "brancol1trip = Brancol1data['trip_info']\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol3trip = Brancol3data['trip_info']" + "brancol1trip = Brancol1data[\"trip_info\"]\n", + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol3trip = Brancol3data[\"trip_info\"]" ] }, { @@ -307,10 +340,10 @@ "# haul_start_col = 'haul_start_datetime'\n", "# haul_end_col = 'haul_end_datetime'\n", "# y_val = 0\n", - " \n", - "# colors = color_dict[source] \n", - " \n", - " \n", + "\n", + "# colors = color_dict[source]\n", + "\n", + "\n", "# #plot_hauling\n", "# plot_hlines(ax, df,y_val, haul_start_col, haul_end_col, 12, colors['haul'], 'haul')\n", "\n", @@ -327,9 +360,9 @@ "# text = f'count: {row[count_col]}'\n", "# x_value = row[x_col]\n", "# ax.text( x_value,y_value , text, fontsize=10, horizontalalignment='right', bbox=props)\n", - " \n", - " \n", - "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes) " + "\n", + "\n", + "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes)" ] }, { @@ -340,35 +373,34 @@ "outputs": [], "source": [ "def plot_set_bars(ax, df, source, color_dict):\n", - " if source == 'elog':\n", - " set_start_col = 'systemstartsetdatetime'\n", - " set_end_col = 'systemendsetdatetime'\n", - " haul_start_col = 'systemstarthauldatetime'\n", - " haul_end_col = 'systemendhauldatetime'\n", + " if source == \"elog\":\n", + " set_start_col = \"systemstartsetdatetime\"\n", + " set_end_col = \"systemendsetdatetime\"\n", + " haul_start_col = \"systemstarthauldatetime\"\n", + " haul_end_col = \"systemendhauldatetime\"\n", " y_val = 1.7\n", - " \n", - " elif source == 'bv':\n", - " set_start_col = 'set_start_datetime'\n", - " set_end_col = 'set_end_datetime'\n", - " haul_start_col = 'haul_start_datetime'\n", - " haul_end_col = 'haul_end_datetime'\n", - " y_val = .7\n", - "\n", - " df['set_duration'] = df[set_end_col] - df[set_start_col]\n", - " df['haul_duration'] = df[haul_end_col] - df[haul_start_col]\n", - " df['mid_duration'] = df[haul_start_col] - df[set_end_col]\n", - " \n", - " set_x = list(zip(df[set_start_col], df['set_duration']))\n", - " haul_x = list(zip(df[haul_start_col], df['haul_duration']))\n", - " mid_x = list(zip(df[set_end_col], df['mid_duration']))\n", - " \n", - " y = (y_val, .6)\n", + "\n", + " elif source == \"bv\":\n", + " set_start_col = \"set_start_datetime\"\n", + " set_end_col = \"set_end_datetime\"\n", + " haul_start_col = \"haul_start_datetime\"\n", + " haul_end_col = \"haul_end_datetime\"\n", + " y_val = 0.7\n", + "\n", + " df[\"set_duration\"] = df[set_end_col] - df[set_start_col]\n", + " df[\"haul_duration\"] = df[haul_end_col] - df[haul_start_col]\n", + " df[\"mid_duration\"] = df[haul_start_col] - df[set_end_col]\n", + "\n", + " set_x = list(zip(df[set_start_col], df[\"set_duration\"], strict=False))\n", + " haul_x = list(zip(df[haul_start_col], df[\"haul_duration\"], strict=False))\n", + " mid_x = list(zip(df[set_end_col], df[\"mid_duration\"], strict=False))\n", + "\n", + " y = (y_val, 0.6)\n", "\n", " colors = color_dict[source]\n", - " ax.broken_barh(mid_x, y, facecolors = colors['mid'], edgecolor = 'face')\n", - " ax.broken_barh(haul_x, y, facecolors = colors['haul'], edgecolor = 'face')\n", - " ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n", - " " + " ax.broken_barh(mid_x, y, facecolors=colors[\"mid\"], edgecolor=\"face\")\n", + " ax.broken_barh(haul_x, y, facecolors=colors[\"haul\"], edgecolor=\"face\")\n", + " ax.broken_barh(set_x, y, facecolors=colors[\"set\"], edgecolor=\"face\")\n" ] }, { @@ -378,48 +410,42 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_elog_comparisons(ax, dfElog, dfBV, title, legend = True, annotate_counts = False, display_axis= True):\n", - "\n", + "def plot_elog_comparisons(\n", + " ax, dfElog, dfBV, title, legend=True, annotate_counts=False, display_axis=True\n", + "):\n", " # ax[0].autofmt_xdate()\n", "\n", - " \n", - " ax.set_yticks([1,2],('actual','elogs'))\n", + " ax.set_yticks([1, 2], (\"actual\", \"elogs\"))\n", " # ax.set_yticks([0,1,2],('bv','elogs',' '))\n", - " fig.suptitle(titles['main'], fontsize = 20)\n", - " \n", + " fig.suptitle(titles[\"main\"], fontsize=20)\n", + "\n", " # df1 = brancol1_elog\n", " # df1sets =brancol1_bv_sets\n", "\n", - " \n", - "\n", " if annotate_counts:\n", - " dfElog['totalcount'] = dfElog['bycatchcount'].astype(int) + dfElog['catchcount'].astype(int)\n", - " dfBV['retained_count'] = dfBV['retained_count'].astype('Int64')\n", - " annotate_counts(ax, dfElog, 'totalcount', 'systemstarthauldatetime', 1.2)\n", - " annotate_counts(ax, dfBV, 'retained_count', 'haul_start_datetime', 0.2)\n", - " \n", + " dfElog[\"totalcount\"] = dfElog[\"bycatchcount\"].astype(int) + dfElog[\"catchcount\"].astype(int)\n", + " dfBV[\"retained_count\"] = dfBV[\"retained_count\"].astype(\"Int64\")\n", + " annotate_counts(ax, dfElog, \"totalcount\", \"systemstarthauldatetime\", 1.2)\n", + " annotate_counts(ax, dfBV, \"retained_count\", \"haul_start_datetime\", 0.2)\n", "\n", - " plot_set_bars(ax, dfElog, 'elog', color_dict)\n", - " plot_set_bars(ax, dfBV, 'bv', color_dict)\n", + " plot_set_bars(ax, dfElog, \"elog\", color_dict)\n", + " plot_set_bars(ax, dfBV, \"bv\", color_dict)\n", "\n", - " \n", + " ax.set_title(title, x=0.1, y=1, fontsize=9)\n", "\n", - " ax.set_title(title,x = .1, y = 1, fontsize = 9)\n", - " \n", " # ax.autoscale()\n", " # ax[0].set_ylim(-.5,1.5)\n", " # ax[0].tick_params(axis='x', labelrotation=45)\n", "\n", " if legend:\n", " legend_elements = []\n", - " for label, color in color_dict['elog'].items():\n", - " \n", - " legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", - " label=label))\n", - " ax.legend(handles = legend_elements, loc='center', bbox_to_anchor=(1.08, 1.1), ncol = 1, fontsize = 12)\n", + " for label, color in color_dict[\"elog\"].items():\n", + " legend_elements.append(Patch(facecolor=color, edgecolor=color, label=label))\n", + " ax.legend(\n", + " handles=legend_elements, loc=\"center\", bbox_to_anchor=(1.08, 1.1), ncol=1, fontsize=12\n", + " )\n", "\n", - " \n", - " #use consise date formater\n", + " # use consise date formater\n", "\n", " if display_axis:\n", " locator = DayLocator()\n", @@ -446,8 +472,12 @@ "outputs": [], "source": [ "color_dict = {\n", - " 'bv': {'set':'#40a018', 'mid':'#a2c662', 'haul':'#117347', },\n", - " 'elog': {'set':'#40a018', 'mid':'#a2c662', 'haul':'#117347'},\n", + " \"bv\": {\n", + " \"set\": \"#40a018\",\n", + " \"mid\": \"#a2c662\",\n", + " \"haul\": \"#117347\",\n", + " },\n", + " \"elog\": {\"set\": \"#40a018\", \"mid\": \"#a2c662\", \"haul\": \"#117347\"},\n", " # 'elog':{'set':'#648fff', 'haul':'#184EAD', 'mid':'#88ccee'}\n", "}" ] @@ -471,24 +501,24 @@ ], "source": [ "df1 = brancol1_elog\n", - "df1sets =brancol1_bv_sets\n", - "trip1 = Brancol1data['trip_info']\n", + "df1sets = brancol1_bv_sets\n", + "trip1 = Brancol1data[\"trip_info\"]\n", "\n", "df2 = brancol2_elog\n", "df2sets = brancol2_bv_sets\n", - "trip2 = Brancol2data['trip_info']\n", + "trip2 = Brancol2data[\"trip_info\"]\n", "\n", "trip1title = f'Brancol Trip 1: {trip1['trip_start_date']} to {trip1['trip_end_date']}'\n", "trip2title = f'Brancol Trip 2: {trip2['trip_start_date']} to {trip2['trip_end_date']}'\n", "\n", - "titles = {'main':'', 'plot1':trip1title, 'plot2':trip2title}\n", + "titles = {\"main\": \"\", \"plot1\": trip1title, \"plot2\": trip2title}\n", "\n", - "fig, ax = plt.subplots(2,1, figsize=(10,3))\n", + "fig, ax = plt.subplots(2, 1, figsize=(10, 3))\n", "# fig.tight_layout(pad=4.0)\n", - "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend = False)\n", - "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend = True)\n", - "plt.subplots_adjust(hspace=.7)\n", - "plt.savefig('elog_comparisons_brancol.png', bbox_inches='tight')" + "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend=False)\n", + "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend=True)\n", + "plt.subplots_adjust(hspace=0.7)\n", + "plt.savefig(\"elog_comparisons_brancol.png\", bbox_inches=\"tight\")" ] }, { @@ -510,24 +540,24 @@ ], "source": [ "df1 = stpatrick1_elog\n", - "df1sets =stpatrick1_bv_sets\n", - "trip1 = StPatrick1data['trip_info']\n", + "df1sets = stpatrick1_bv_sets\n", + "trip1 = StPatrick1data[\"trip_info\"]\n", "\n", "df2 = stpatrick2_elog\n", "df2sets = stpatrick2_bv_sets\n", - "trip2 = StPatrick2data['trip_info']\n", + "trip2 = StPatrick2data[\"trip_info\"]\n", "\n", "trip1title = f'St. Patrick Trip 1: {trip1['trip_start_date']} to {trip1['trip_end_date']}'\n", "trip2title = f'St. Patrick Trip 2: {trip2['trip_start_date']} to {trip2['trip_end_date']}'\n", "\n", - "titles = {'main':'', 'plot1':trip1title, 'plot2':trip2title}\n", + "titles = {\"main\": \"\", \"plot1\": trip1title, \"plot2\": trip2title}\n", "\n", - "fig, ax = plt.subplots(2,1, figsize=(10,3))\n", + "fig, ax = plt.subplots(2, 1, figsize=(10, 3))\n", "# fig.tight_layout(pad=4.0)\n", - "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend = False)\n", - "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend = True)\n", - "plt.subplots_adjust(hspace=.7)\n", - "plt.savefig('elog_comparisons_stpatrick.png',bbox_inches='tight')" + "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend=False)\n", + "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend=True)\n", + "plt.subplots_adjust(hspace=0.7)\n", + "plt.savefig(\"elog_comparisons_stpatrick.png\", bbox_inches=\"tight\")" ] }, { @@ -547,11 +577,11 @@ "metadata": {}, "outputs": [], "source": [ - "stpatrick2_elog.sort_values(by = 'systemstartsetdatetime', inplace = True)\n", + "stpatrick2_elog.sort_values(by=\"systemstartsetdatetime\", inplace=True)\n", "\n", - "missing_set = stpatrick2_elog.sort_values(by = 'systemstartsetdatetime').iloc[[0]]\n", + "missing_set = stpatrick2_elog.sort_values(by=\"systemstartsetdatetime\").iloc[[0]]\n", "\n", - "stpatrick2_elog.drop(stpatrick2_elog.index[0], axis = 0, inplace = True)\n", + "stpatrick2_elog.drop(stpatrick2_elog.index[0], axis=0, inplace=True)\n", "\n", "stpatrick1_elog = pd.concat([stpatrick1_elog, missing_set])" ] @@ -575,23 +605,23 @@ ], "source": [ "df1 = brancol3_elog\n", - "df1sets =brancol3_bv_sets\n", - "trip1 = Brancol3data['trip_info']\n", + "df1sets = brancol3_bv_sets\n", + "trip1 = Brancol3data[\"trip_info\"]\n", "\n", "df2 = stpatrick3_elog\n", "df2sets = stpatrick3_bv_sets\n", - "trip2 = StPatrick3data['trip_info']\n", + "trip2 = StPatrick3data[\"trip_info\"]\n", "\n", "trip1title = f'St. Patrick Trip 1: {trip1['trip_start_date']} to {trip1['trip_end_date']}'\n", "trip2title = f'St. Patrick Trip 2: {trip2['trip_start_date']} to {trip2['trip_end_date']}'\n", "\n", - "titles = {'main':'', 'plot1':trip1title, 'plot2':trip2title}\n", + "titles = {\"main\": \"\", \"plot1\": trip1title, \"plot2\": trip2title}\n", "\n", - "fig, ax = plt.subplots(2,1, figsize=(8,4))\n", + "fig, ax = plt.subplots(2, 1, figsize=(8, 4))\n", "fig.tight_layout(pad=4.0)\n", - "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend = False)\n", - "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend = True)\n", - "plt.subplots_adjust(wspace=0, hspace=.1)" + "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend=False)\n", + "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend=True)\n", + "plt.subplots_adjust(wspace=0, hspace=0.1)" ] }, { @@ -604,9 +634,11 @@ "# a type discovered in BV notes, mix up of AM and PM, changing ot intended value\n", "brancol2_bv_sets_adjusted = brancol2_bv_sets.copy()\n", "\n", - "brancol2_bv_sets_adjusted.loc[brancol2_bv_sets_adjusted['set_number'] == '2','haul_end_datetime'] = brancol2_bv_sets_adjusted.loc[brancol2_bv_sets_adjusted['set_number'] == '2']['haul_end_datetime'] - pd.to_timedelta('12 hours')\n", - "\n", - "\n" + "brancol2_bv_sets_adjusted.loc[\n", + " brancol2_bv_sets_adjusted[\"set_number\"] == \"2\", \"haul_end_datetime\"\n", + "] = brancol2_bv_sets_adjusted.loc[brancol2_bv_sets_adjusted[\"set_number\"] == \"2\"][\n", + " \"haul_end_datetime\"\n", + "] - pd.to_timedelta(\"12 hours\")\n" ] }, { @@ -645,15 +677,20 @@ "outputs": [], "source": [ "def get_elog_bv_deltas(dfMerged):\n", - " dfMerged['set_start_delta_minutes'] = (dfMerged['systemstartsetdatetime'] - dfMerged['set_start_datetime']).dt.total_seconds()/60\n", - " dfMerged['set_end_delta_minutes'] = (dfMerged['systemendsetdatetime'] - dfMerged['set_end_datetime']).dt.total_seconds()/60\n", - " dfMerged['haul_start_delta_minutes'] = (dfMerged['systemstarthauldatetime'] - dfMerged['haul_start_datetime']).dt.total_seconds()/60\n", - " dfMerged['haul_end_delta_minutes'] = (dfMerged['systemendhauldatetime'] - dfMerged['haul_end_datetime']).dt.total_seconds()/60\n", - " dfMerged['catch_count_delta'] = dfMerged['elog_total_count'] - dfMerged['bv_retained_count'] \n", + " dfMerged[\"set_start_delta_minutes\"] = (\n", + " dfMerged[\"systemstartsetdatetime\"] - dfMerged[\"set_start_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"set_end_delta_minutes\"] = (\n", + " dfMerged[\"systemendsetdatetime\"] - dfMerged[\"set_end_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"haul_start_delta_minutes\"] = (\n", + " dfMerged[\"systemstarthauldatetime\"] - dfMerged[\"haul_start_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"haul_end_delta_minutes\"] = (\n", + " dfMerged[\"systemendhauldatetime\"] - dfMerged[\"haul_end_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"catch_count_delta\"] = dfMerged[\"elog_total_count\"] - dfMerged[\"bv_retained_count\"]\n", "\n", - "\n", - "\n", - " \n", " # dfMerged['set_start_delta'].\n" ] }, @@ -664,30 +701,67 @@ "metadata": {}, "outputs": [], "source": [ - "def merge_bv_elog_sets(dfElog, dfBV, delta = '1 hour'):\n", - " dfElog.sort_values(by = 'systemstartsetdatetime', inplace = True)\n", - " dfBV.sort_values(by = 'set_start_datetime', inplace = True)\n", - " elog_columns = ['id','bycatchcount', 'catchcount','systemstartsetdatetime','systemendsetdatetime','systemstarthauldatetime','systemendhauldatetime']\n", - " bv_columns = ['set_number','set_start_datetime','set_end_datetime', 'haul_start_datetime','haul_end_datetime', 'retained_count']\n", - " dfMerged = pd.merge_asof(dfElog[elog_columns], dfBV[bv_columns], left_on = 'systemstartsetdatetime', right_on = 'set_start_datetime', tolerance=pd.Timedelta(delta), direction = 'nearest')\n", - "\n", - " col_order = ['id','set_number', \n", - " 'bycatchcount', 'catchcount', \n", - " 'retained_count',\n", - " 'systemstartsetdatetime','set_start_datetime',\n", - " 'systemendsetdatetime', 'set_end_datetime',\n", - " 'systemstarthauldatetime', 'haul_start_datetime',\n", - " 'systemendhauldatetime', 'haul_end_datetime']\n", + "def merge_bv_elog_sets(dfElog, dfBV, delta=\"1 hour\"):\n", + " dfElog.sort_values(by=\"systemstartsetdatetime\", inplace=True)\n", + " dfBV.sort_values(by=\"set_start_datetime\", inplace=True)\n", + " elog_columns = [\n", + " \"id\",\n", + " \"bycatchcount\",\n", + " \"catchcount\",\n", + " \"systemstartsetdatetime\",\n", + " \"systemendsetdatetime\",\n", + " \"systemstarthauldatetime\",\n", + " \"systemendhauldatetime\",\n", + " ]\n", + " bv_columns = [\n", + " \"set_number\",\n", + " \"set_start_datetime\",\n", + " \"set_end_datetime\",\n", + " \"haul_start_datetime\",\n", + " \"haul_end_datetime\",\n", + " \"retained_count\",\n", + " ]\n", + " dfMerged = pd.merge_asof(\n", + " dfElog[elog_columns],\n", + " dfBV[bv_columns],\n", + " left_on=\"systemstartsetdatetime\",\n", + " right_on=\"set_start_datetime\",\n", + " tolerance=pd.Timedelta(delta),\n", + " direction=\"nearest\",\n", + " )\n", + "\n", + " col_order = [\n", + " \"id\",\n", + " \"set_number\",\n", + " \"bycatchcount\",\n", + " \"catchcount\",\n", + " \"retained_count\",\n", + " \"systemstartsetdatetime\",\n", + " \"set_start_datetime\",\n", + " \"systemendsetdatetime\",\n", + " \"set_end_datetime\",\n", + " \"systemstarthauldatetime\",\n", + " \"haul_start_datetime\",\n", + " \"systemendhauldatetime\",\n", + " \"haul_end_datetime\",\n", + " ]\n", " dfMerged = dfMerged[col_order]\n", - " \n", - " \n", "\n", - " dfMerged.rename(columns = {'retained_count':'bv_retained_count','catchcount':'elog_target_count','bycatchcount':'elog_bycatch_count'}, inplace = True)\n", + " dfMerged.rename(\n", + " columns={\n", + " \"retained_count\": \"bv_retained_count\",\n", + " \"catchcount\": \"elog_target_count\",\n", + " \"bycatchcount\": \"elog_bycatch_count\",\n", + " },\n", + " inplace=True,\n", + " )\n", "\n", - " dfMerged['elog_total_count'] = dfMerged['elog_target_count'].astype(int) + dfMerged['elog_bycatch_count'].astype(int)\n", + " dfMerged[\"elog_total_count\"] = dfMerged[\"elog_target_count\"].astype(int) + dfMerged[\n", + " \"elog_bycatch_count\"\n", + " ].astype(int)\n", "\n", " get_elog_bv_deltas(dfMerged)\n", - " \n", + "\n", " return dfMerged" ] }, @@ -699,7 +773,12 @@ "outputs": [], "source": [ "def get_combined_delta_means(dfs):\n", - " delta_cols = ['set_start_delta_minutes','set_end_delta_minutes','haul_start_delta_minutes','haul_end_delta_minutes']\n", + " delta_cols = [\n", + " \"set_start_delta_minutes\",\n", + " \"set_end_delta_minutes\",\n", + " \"haul_start_delta_minutes\",\n", + " \"haul_end_delta_minutes\",\n", + " ]\n", " for col in delta_cols:\n", " combined_list = []\n", " for df in dfs:\n", @@ -708,16 +787,17 @@ " # print(combined_list)\n", " avg = np.mean(combined_list)\n", " # print(stats.describe(combined_list))\n", - " \n", - " \n", - " print(f'{col} Mean: {avg} min | Min: {np.min(combined_list)} | Max: {np.max(combined_list)}')\n", - " q = [10,25,50,75,80,90]\n", - " percentile_str = ''\n", - " percentiles = np.percentile(combined_list, q = q)\n", - " for p, percentile in zip(q, percentiles):\n", - " percentile_str += f'\\033[1m{p}%:\\033[0m {percentile} '\n", - " print(percentile_str) \n", - " print(' ')\n", + "\n", + " print(\n", + " f\"{col} Mean: {avg} min | Min: {np.min(combined_list)} | Max: {np.max(combined_list)}\"\n", + " )\n", + " q = [10, 25, 50, 75, 80, 90]\n", + " percentile_str = \"\"\n", + " percentiles = np.percentile(combined_list, q=q)\n", + " for p, percentile in zip(q, percentiles, strict=False):\n", + " percentile_str += f\"\\033[1m{p}%:\\033[0m {percentile} \"\n", + " print(percentile_str)\n", + " print(\" \")\n", " # print(f'number of logged sets: {len(combined_list)}')" ] }, @@ -729,10 +809,15 @@ "outputs": [], "source": [ "def get_delta_means(dfMerged):\n", - " delta_cols = ['set_start_delta_minutes','set_end_delta_minutes','haul_start_delta_minutes','haul_end_delta_minutes']\n", + " delta_cols = [\n", + " \"set_start_delta_minutes\",\n", + " \"set_end_delta_minutes\",\n", + " \"haul_start_delta_minutes\",\n", + " \"haul_end_delta_minutes\",\n", + " ]\n", " for col in delta_cols:\n", " avg = np.mean(dfMerged[col].abs())\n", - " print(f'{col}: {avg} min')" + " print(f\"{col}: {avg} min\")" ] }, { @@ -744,7 +829,12 @@ "source": [ "def get_combined_deltas(dfs):\n", " deltas = {}\n", - " delta_cols = ['set_start_delta_minutes','set_end_delta_minutes','haul_start_delta_minutes','haul_end_delta_minutes']\n", + " delta_cols = [\n", + " \"set_start_delta_minutes\",\n", + " \"set_end_delta_minutes\",\n", + " \"haul_start_delta_minutes\",\n", + " \"haul_end_delta_minutes\",\n", + " ]\n", " for col in delta_cols:\n", " combined_list = []\n", " for df in dfs:\n", @@ -765,7 +855,7 @@ "outputs": [], "source": [ "def get_combined_catch_deltas(dfs):\n", - " col = 'catch_count_delta'\n", + " col = \"catch_count_delta\"\n", " delta_list = []\n", " for df in dfs:\n", " df_list = df[col].to_list()\n", @@ -789,12 +879,12 @@ "outputs": [], "source": [ "dfBrancol1_merged = merge_bv_elog_sets(brancol1_elog, brancol1_bv_sets)\n", - "dfBrancol2_merged = merge_bv_elog_sets(brancol2_elog, brancol2_bv_sets_adjusted, '2 hours')\n", - "dfBrancol3_merged = merge_bv_elog_sets(brancol3_elog, brancol3_bv_sets, '2 hours')\n", + "dfBrancol2_merged = merge_bv_elog_sets(brancol2_elog, brancol2_bv_sets_adjusted, \"2 hours\")\n", + "dfBrancol3_merged = merge_bv_elog_sets(brancol3_elog, brancol3_bv_sets, \"2 hours\")\n", "\n", - "dfStPatrick1_merged = merge_bv_elog_sets(stpatrick1_elog, stpatrick1_bv_sets, '2 hours')\n", - "dfStPatrick2_merged = merge_bv_elog_sets(stpatrick2_elog, stpatrick2_bv_sets, '5 hours')\n", - "dfStPatrick3_merged = merge_bv_elog_sets(stpatrick3_elog, stpatrick3_bv_sets, '2 hours')" + "dfStPatrick1_merged = merge_bv_elog_sets(stpatrick1_elog, stpatrick1_bv_sets, \"2 hours\")\n", + "dfStPatrick2_merged = merge_bv_elog_sets(stpatrick2_elog, stpatrick2_bv_sets, \"5 hours\")\n", + "dfStPatrick3_merged = merge_bv_elog_sets(stpatrick3_elog, stpatrick3_bv_sets, \"2 hours\")" ] }, { @@ -812,7 +902,14 @@ "metadata": {}, "outputs": [], "source": [ - "all_merged_dfs = [dfBrancol1_merged, dfBrancol2_merged,dfBrancol3_merged, dfStPatrick1_merged, dfStPatrick2_merged, dfStPatrick3_merged]" + "all_merged_dfs = [\n", + " dfBrancol1_merged,\n", + " dfBrancol2_merged,\n", + " dfBrancol3_merged,\n", + " dfStPatrick1_merged,\n", + " dfStPatrick2_merged,\n", + " dfStPatrick3_merged,\n", + "]" ] }, { @@ -841,8 +938,16 @@ } ], "source": [ - "\n", - "get_combined_delta_means([dfBrancol1_merged, dfBrancol2_merged,dfBrancol3_merged, dfStPatrick1_merged, dfStPatrick2_merged, dfStPatrick3_merged])" + "get_combined_delta_means(\n", + " [\n", + " dfBrancol1_merged,\n", + " dfBrancol2_merged,\n", + " dfBrancol3_merged,\n", + " dfStPatrick1_merged,\n", + " dfStPatrick2_merged,\n", + " dfStPatrick3_merged,\n", + " ]\n", + ")" ] }, { @@ -864,11 +969,17 @@ "source": [ "df = pd.DataFrame()\n", "for col, values in deltas.items():\n", - " df_col = pd.DataFrame({'column': np.repeat(col, len(values)), 'value': values})\n", + " df_col = pd.DataFrame({\"column\": np.repeat(col, len(values)), \"value\": values})\n", " df = pd.concat([df, df_col])\n", "\n", - "label_dict = {'set_start_delta_minutes': 'Set Start', 'set_end_delta_minutes':'Set End', 'haul_start_delta_minutes':'Haul Start', 'haul_end_delta_minutes':'Haul End','catch_count_delta': 'Catch Count' }\n", - "df['label'] = df['column'].map(label_dict)" + "label_dict = {\n", + " \"set_start_delta_minutes\": \"Set Start\",\n", + " \"set_end_delta_minutes\": \"Set End\",\n", + " \"haul_start_delta_minutes\": \"Haul Start\",\n", + " \"haul_end_delta_minutes\": \"Haul End\",\n", + " \"catch_count_delta\": \"Catch Count\",\n", + "}\n", + "df[\"label\"] = df[\"column\"].map(label_dict)" ] }, { @@ -892,38 +1003,36 @@ "# Create the plot\n", "sns.set_theme(style=\"ticks\")\n", "fig, ax = plt.subplots(figsize=(8, 2.5)) # Increased the height to 4 for better spacing\n", - "sns.boxplot(y='label', x='value', data=df, hue='label', ax=ax, palette=colors4, width=0.5)\n", + "sns.boxplot(y=\"label\", x=\"value\", data=df, hue=\"label\", ax=ax, palette=colors4, width=0.5)\n", "ax.yaxis.label.set_visible(False)\n", "ax.set_xlim(-1, 40)\n", - "ax.set_xlabel('Time Delta (Absolute Value in Minutes)', size = 11)\n", + "ax.set_xlabel(\"Time Delta (Absolute Value in Minutes)\", size=11)\n", "\n", "# Calculate medians\n", - "medians = df.groupby('label')['value'].median()\n", + "medians = df.groupby(\"label\")[\"value\"].median()\n", "\n", "# Annotate medians above the boxplots with arrows pointing to the median line\n", "for label in medians.index:\n", " median_value = medians[label]\n", - " y_pos = df['label'].unique().tolist().index(label)\n", + " y_pos = df[\"label\"].unique().tolist().index(label)\n", " ax.annotate(\n", - " f'{median_value:.2f}', \n", - " xy=(median_value, y_pos), \n", + " f\"{median_value:.2f}\",\n", + " xy=(median_value, y_pos),\n", " xytext=(median_value, y_pos - 0.25), # Position the text above the boxplot\n", - " va='bottom', \n", - " ha='center', \n", - " color='black', \n", - " fontsize=8, \n", + " va=\"bottom\",\n", + " ha=\"center\",\n", + " color=\"black\",\n", + " fontsize=8,\n", " # fontweight='bold',\n", " # arrowprops=dict(facecolor='black', edgecolor='black', shrink=0.05, headwidth=5, headlength=5, width=.7), # Arrow properties\n", " # bbox=dict(facecolor='white', edgecolor='none', boxstyle='round,pad=0.3')\n", " )\n", "ax.xaxis.grid(True)\n", - "ax.tick_params(axis='x', labelsize=10) # Change x ticks font size to 12\n", - "ax.tick_params(axis='y', labelsize=10) # Change y ticks font size to 12\n", + "ax.tick_params(axis=\"x\", labelsize=10) # Change x ticks font size to 12\n", + "ax.tick_params(axis=\"y\", labelsize=10) # Change y ticks font size to 12\n", "# ax.set(ylabel=\"\")\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('elog_timedeltas.png', bbox_inches = 'tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"elog_timedeltas.png\", bbox_inches=\"tight\", dpi=150)\n", "plt.tight_layout()\n", "plt.show()" ] @@ -955,7 +1064,7 @@ "metadata": {}, "outputs": [], "source": [ - "catch_deltas_inverted = [x*-1 for x in catch_deltas]" + "catch_deltas_inverted = [x * -1 for x in catch_deltas]" ] }, { @@ -977,14 +1086,16 @@ "source": [ "avg = np.mean(catch_deltas)\n", "\n", - "print(f'Mean: {avg} min | Median: {np.median(catch_deltas)} | Min: {np.min(catch_deltas)} | Max: {np.max(catch_deltas)}')\n", - "q = [10,25,50,75,80,90]\n", - "percentile_str = ''\n", - "percentiles = np.percentile(catch_deltas, q = q)\n", - "for p, percentile in zip(q, percentiles):\n", - " percentile_str += f'\\033[1m{p}%:\\033[0m {percentile} '\n", - "print(percentile_str) \n", - "print(' ')" + "print(\n", + " f\"Mean: {avg} min | Median: {np.median(catch_deltas)} | Min: {np.min(catch_deltas)} | Max: {np.max(catch_deltas)}\"\n", + ")\n", + "q = [10, 25, 50, 75, 80, 90]\n", + "percentile_str = \"\"\n", + "percentiles = np.percentile(catch_deltas, q=q)\n", + "for p, percentile in zip(q, percentiles, strict=False):\n", + " percentile_str += f\"\\033[1m{p}%:\\033[0m {percentile} \"\n", + "print(percentile_str)\n", + "print(\" \")" ] }, { @@ -1010,17 +1121,17 @@ "\n", "colors4 = [colors[15], colors[16], colors[17], colors[18]]\n", "\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 8)\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=8)\n", "\n", "sns.set_theme(style=\"ticks\")\n", - "fig, ax = plt.subplots(figsize=(3.5, .8)) # Increased the height to 4 for better spacing\n", - "sns.boxplot( x=catch_deltas, ax=ax, palette=colors4)\n", + "fig, ax = plt.subplots(figsize=(3.5, 0.8)) # Increased the height to 4 for better spacing\n", + "sns.boxplot(x=catch_deltas, ax=ax, palette=colors4)\n", "ax.yaxis.label.set_visible(False)\n", "# ax.set_xlim(-30, 30)\n", - "ax.set_xlabel('Catch Count Delta')\n", + "ax.set_xlabel(\"Catch Count Delta\")\n", "\n", "# Calculate medians\n", "# medians = df.loc[df['column']=='catch_count_delta'].groupby('column')['value'].median()\n", @@ -1030,25 +1141,23 @@ "# median_value = medians[label]\n", "# y_pos = df['column'].unique().tolist().index(label)\n", "# ax.annotate(\n", - "# f'Median: {median_value:.0f}', \n", - "# xy=(0, median_value), \n", + "# f'Median: {median_value:.0f}',\n", + "# xy=(0, median_value),\n", "# xytext=(0, median_value), # Position the text above the boxplot\n", - "# va='bottom', \n", - "# ha='center', \n", - "# color='black', \n", - "# fontsize=9, \n", + "# va='bottom',\n", + "# ha='center',\n", + "# color='black',\n", + "# fontsize=9,\n", "\n", "# )\n", "\n", "\n", "ax.xaxis.grid(True)\n", - "ax.tick_params(axis='x', labelsize=9) # Change x ticks font size to 12\n", - "ax.tick_params(axis='y', labelsize=9, left = False) # Change y ticks font size to 12\n", + "ax.tick_params(axis=\"x\", labelsize=9) # Change x ticks font size to 12\n", + "ax.tick_params(axis=\"y\", labelsize=9, left=False) # Change y ticks font size to 12\n", "\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('elog_countdeltas2.png', bbox_inches = 'tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"elog_countdeltas2.png\", bbox_inches=\"tight\", dpi=150)\n", "plt.tight_layout()\n" ] }, @@ -1075,19 +1184,49 @@ "metadata": {}, "outputs": [], "source": [ - "def compare_elog_vector(vectors, df, dfsets, triptitle, rerun = False, figsize = (10, 4), titlesize = 12, lw = .5, ylim = None, markersize = 3):\n", + "def compare_elog_vector(\n", + " vectors,\n", + " df,\n", + " dfsets,\n", + " triptitle,\n", + " rerun=False,\n", + " figsize=(10, 4),\n", + " titlesize=12,\n", + " lw=0.5,\n", + " ylim=None,\n", + " markersize=3,\n", + "):\n", + " fig, ax = plt.subplots(\n", + " 2, 1, figsize=figsize, sharex=True, gridspec_kw={\"height_ratios\": [2, 1]}\n", + " )\n", "\n", - " fig, ax = plt.subplots(2,1,figsize=figsize, sharex = True, gridspec_kw={'height_ratios': [2, 1]})\n", - " \n", - " sns.lineplot(x = 'datetime', y = 'score', data = vectors, ax = ax[0], marker = \"o\", markersize = markersize, label = 'Original Vector \\n Score', lw = lw)\n", + " sns.lineplot(\n", + " x=\"datetime\",\n", + " y=\"score\",\n", + " data=vectors,\n", + " ax=ax[0],\n", + " marker=\"o\",\n", + " markersize=markersize,\n", + " label=\"Original Vector \\n Score\",\n", + " lw=lw,\n", + " )\n", " if rerun:\n", - " sns.lineplot(x = 'datetime', y = 'rerunscore', data = vectors, ax = ax[0], marker = \"o\",markersize = markersize, label = 'Point-in-time \\n Score', lw = lw)\n", - " plot_elog_comparisons(ax[1], df, dfsets, '', legend = False, display_axis = False)\n", + " sns.lineplot(\n", + " x=\"datetime\",\n", + " y=\"rerunscore\",\n", + " data=vectors,\n", + " ax=ax[0],\n", + " marker=\"o\",\n", + " markersize=markersize,\n", + " label=\"Point-in-time \\n Score\",\n", + " lw=lw,\n", + " )\n", + " plot_elog_comparisons(ax[1], df, dfsets, \"\", legend=False, display_axis=False)\n", " locator = DayLocator()\n", " formatter = mdates.ConciseDateFormatter(locator)\n", " ax[0].xaxis.set_major_locator(locator)\n", " ax[0].xaxis.set_major_formatter(formatter)\n", - " ax[0].set_title(triptitle,x = .5, y = 1, fontsize = titlesize)\n", + " ax[0].set_title(triptitle, x=0.5, y=1, fontsize=titlesize)\n", " # ax[0].set_xticks(locator)\n", " # plt.tick_params(axis='both', which='both')\n", " return fig, ax" @@ -1127,23 +1266,30 @@ } ], "source": [ - "set_dfs = {'brancol1': brancol1_bv_sets, 'brancol2': brancol2_bv_sets, 'brancol3': brancol3_bv_sets,\n", - " 'stpatrick1':stpatrick1_bv_sets, 'stpatrick2': stpatrick2_bv_sets, 'stpatrick3': stpatrick3_bv_sets\n", - " }\n", + "set_dfs = {\n", + " \"brancol1\": brancol1_bv_sets,\n", + " \"brancol2\": brancol2_bv_sets,\n", + " \"brancol3\": brancol3_bv_sets,\n", + " \"stpatrick1\": stpatrick1_bv_sets,\n", + " \"stpatrick2\": stpatrick2_bv_sets,\n", + " \"stpatrick3\": stpatrick3_bv_sets,\n", + "}\n", "\n", "set_delta_dfs = []\n", "\n", "for trip, df in set_dfs.items():\n", - " df.sort_values(by = 'set_start_datetime', inplace = True)\n", - " df['trip'] = trip\n", - " df['last_haul_end'] = df['haul_end_datetime'].shift(1)\n", - " df['last_haul_delta'] = df['haul_end_datetime']-df['last_haul_end']\n", - " set_delta_dfs.append(df[['trip','set_start_datetime','haul_end_datetime', 'last_haul_end', 'last_haul_delta']])\n", + " df.sort_values(by=\"set_start_datetime\", inplace=True)\n", + " df[\"trip\"] = trip\n", + " df[\"last_haul_end\"] = df[\"haul_end_datetime\"].shift(1)\n", + " df[\"last_haul_delta\"] = df[\"haul_end_datetime\"] - df[\"last_haul_end\"]\n", + " set_delta_dfs.append(\n", + " df[[\"trip\", \"set_start_datetime\", \"haul_end_datetime\", \"last_haul_end\", \"last_haul_delta\"]]\n", + " )\n", "\n", "\n", "set_deltas = pd.concat(set_delta_dfs)\n", "\n", - "set_deltas['last_haul_delta'].describe()" + "set_deltas[\"last_haul_delta\"].describe()" ] }, { @@ -1184,7 +1330,7 @@ } ], "source": [ - "sns.displot(set_deltas[\"last_haul_delta\"]/pd.Timedelta(\"1 hour\"),kind = \"ecdf\")" + "sns.displot(set_deltas[\"last_haul_delta\"] / pd.Timedelta(\"1 hour\"), kind=\"ecdf\")" ] }, { @@ -1194,7 +1340,7 @@ "metadata": {}, "outputs": [], "source": [ - "x = (set_deltas[\"last_haul_delta\"]/pd.Timedelta(\"1 hour\")).dropna().sort_values()\n", + "x = (set_deltas[\"last_haul_delta\"] / pd.Timedelta(\"1 hour\")).dropna().sort_values()\n", "\n", "params = st.lognorm.fit(x)\n", "# Separate parts of parameters\n", @@ -1212,9 +1358,9 @@ "source": [ "# sigmoid function used by Vector 6, time gap vector\n", "def vector_6_algorithim(x):\n", - " k = -0.15;\n", - " b = 60.0;\n", - " return 1.0/(1.0+math.exp(k*(x-b)))" + " k = -0.15\n", + " b = 60.0\n", + " return 1.0 / (1.0 + math.exp(k * (x - b)))" ] }, { @@ -1236,10 +1382,10 @@ ], "source": [ "x_data = np.linspace(0, max(x), 100)\n", - "cdf = stats.lognorm.cdf(x_data,loc=loc, scale=scale, *arg)\n", + "cdf = stats.lognorm.cdf(x_data, loc=loc, scale=scale, *arg)\n", "x_vector_6 = [vector_6_algorithim(x_val) for x_val in x_data]\n", - "plt.plot(x_data, cdf, label = 'cdf')\n", - "plt.plot(x_data,x_vector_6, label = 'vector 6')\n", + "plt.plot(x_data, cdf, label=\"cdf\")\n", + "plt.plot(x_data, x_vector_6, label=\"vector 6\")\n", "plt.legend()\n", "plt.show()\n" ] @@ -1259,9 +1405,9 @@ "metadata": {}, "outputs": [], "source": [ - "vector_rerun_brancol = pd.read_csv('../data/vector_id_6_rerun_brancol.csv')\n", - "vector_rerun_brancol['datetime'] = pd.to_datetime(vector_rerun_brancol['datetime'], utc=True)\n", - "vector_rerun_brancol['datetime'] = vector_rerun_brancol['datetime'].dt.tz_convert(None)" + "vector_rerun_brancol = pd.read_csv(\"../data/vector_id_6_rerun_brancol.csv\")\n", + "vector_rerun_brancol[\"datetime\"] = pd.to_datetime(vector_rerun_brancol[\"datetime\"], utc=True)\n", + "vector_rerun_brancol[\"datetime\"] = vector_rerun_brancol[\"datetime\"].dt.tz_convert(None)" ] }, { @@ -1272,10 +1418,9 @@ "outputs": [], "source": [ "def get_last_haul_delta(df, set_start_col, haul_end_col):\n", - " df.sort_values(by = set_start_col, inplace = True)\n", - " df['last_haul_end'] = df[haul_end_col].shift(1)\n", - " df['last_haul_delta'] = df[haul_end_col]-df['last_haul_end']\n", - "\n" + " df.sort_values(by=set_start_col, inplace=True)\n", + " df[\"last_haul_end\"] = df[haul_end_col].shift(1)\n", + " df[\"last_haul_delta\"] = df[haul_end_col] - df[\"last_haul_end\"]\n" ] }, { @@ -1286,9 +1431,9 @@ "outputs": [], "source": [ "# define trip details\n", - "trip = Brancol2data['trip_info']\n", - "trip_start_date =pd.to_datetime(trip['trip_start_date'])\n", - "trip_end_date = pd.to_datetime(trip['trip_end_date'])" + "trip = Brancol2data[\"trip_info\"]\n", + "trip_start_date = pd.to_datetime(trip[\"trip_start_date\"])\n", + "trip_end_date = pd.to_datetime(trip[\"trip_end_date\"])" ] }, { @@ -1300,27 +1445,28 @@ "source": [ "# create dataset of results using cdf function\n", "\n", + "\n", "def most_recent(haul_endings, test):\n", - " return max(haul for haul in haul_endings if haul<=test)\n", - " \n", + " return max(haul for haul in haul_endings if haul <= test)\n", + "\n", + "\n", "# create array of datetime at 4 hr interval\n", - "test_intervals= pd.date_range(trip_start_date, trip_end_date, freq = '4h')\n", - "haul_endings = brancol2_elog['systemendhauldatetime'].tolist()\n", + "test_intervals = pd.date_range(trip_start_date, trip_end_date, freq=\"4h\")\n", + "haul_endings = brancol2_elog[\"systemendhauldatetime\"].tolist()\n", "haul_endings.append(trip_start_date)\n", "\n", "tests = []\n", "for test in test_intervals:\n", " test_haul = {}\n", " last_haul = most_recent(haul_endings, test)\n", - " test_haul['test_datetime'] = test\n", - " test_haul['last_haul'] = last_haul\n", - " test_haul['last_haul_delta'] = test - last_haul\n", + " test_haul[\"test_datetime\"] = test\n", + " test_haul[\"last_haul\"] = last_haul\n", + " test_haul[\"last_haul_delta\"] = test - last_haul\n", "\n", - " \n", " tests.append(test_haul)\n", - "testsDF = pd.DataFrame(tests) \n", + "testsDF = pd.DataFrame(tests)\n", "\n", - "x_deltas = testsDF[\"last_haul_delta\"]/pd.Timedelta(\"1 hour\")\n", + "x_deltas = testsDF[\"last_haul_delta\"] / pd.Timedelta(\"1 hour\")\n", "cdf = stats.lognorm.cdf(x_deltas, loc=loc, scale=scale, *arg)" ] }, @@ -1342,50 +1488,60 @@ } ], "source": [ + "plt.rc(\"xtick\", labelsize=9)\n", + "plt.rc(\"ytick\", labelsize=9)\n", "\n", - "\n", - "plt.rc('xtick',labelsize=9)\n", - "plt.rc('ytick',labelsize=9)\n", - "\n", - "plt.rc('axes', labelsize = 9)\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", + "plt.rc(\"axes\", labelsize=9)\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", "\n", "df = brancol2_elog\n", "\n", - "get_last_haul_delta(df, 'systemstartsetdatetime', 'systemendhauldatetime')\n", + "get_last_haul_delta(df, \"systemstartsetdatetime\", \"systemendhauldatetime\")\n", "\n", - "dfsets =brancol2_bv_sets\n", + "dfsets = brancol2_bv_sets\n", "\n", "\n", - "mask = (vector_rerun_brancol['datetime'] > trip_start_date) & (vector_rerun_brancol['datetime'] <= trip_end_date)\n", - "vectors =vector_rerun_brancol.loc[mask]\n", - "triptitle = r\"$\\bf{Elog\\ Time\\ Gap\\ Vectors}$ | Brancol Trip 1: \" +f'{trip['trip_start_date']} to {trip['trip_end_date']}'\n", + "mask = (vector_rerun_brancol[\"datetime\"] > trip_start_date) & (\n", + " vector_rerun_brancol[\"datetime\"] <= trip_end_date\n", + ")\n", + "vectors = vector_rerun_brancol.loc[mask]\n", + "triptitle = (\n", + " r\"$\\bf{Elog\\ Time\\ Gap\\ Vectors}$ | Brancol Trip 1: \"\n", + " + f'{trip['trip_start_date']} to {trip['trip_end_date']}'\n", + ")\n", "\n", "\n", - "brancol1_fig, brancol1_ax = compare_elog_vector(vectors, df, dfsets, None, rerun = True, figsize = (7.5,3.5),titlesize = 10, lw = .8)\n", - "sns.lineplot(x = testsDF['test_datetime'], y = cdf, ax = brancol1_ax[0], label = 'Recalculated Vector \\n Score (CDF)', lw = .8)\n", + "brancol1_fig, brancol1_ax = compare_elog_vector(\n", + " vectors, df, dfsets, None, rerun=True, figsize=(7.5, 3.5), titlesize=10, lw=0.8\n", + ")\n", + "sns.lineplot(\n", + " x=testsDF[\"test_datetime\"],\n", + " y=cdf,\n", + " ax=brancol1_ax[0],\n", + " label=\"Recalculated Vector \\n Score (CDF)\",\n", + " lw=0.8,\n", + ")\n", "# brancol1_ax[0].legend().remove()\n", - "brancol1_ax[1].hlines([.25, .75],.01,.99, transform=brancol1_ax[1].transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + "brancol1_ax[1].hlines(\n", + " [0.25, 0.75], 0.01, 0.99, transform=brancol1_ax[1].transAxes, colors=\"grey\", lw=0.2, zorder=0\n", + ")\n", "\n", "\n", "# plt.subplots_adjust(wspace=0, hspace=.1)\n", "legend_elements = []\n", - "for label, color in color_dict['elog'].items():\n", - " \n", - " legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", - " label=label))\n", + "for label, color in color_dict[\"elog\"].items():\n", + " legend_elements.append(Patch(facecolor=color, edgecolor=color, label=label))\n", "\n", "# labels = ['Original vector \\n score', 'Point-in-time \\n score', 'Recalculated Vector \\n Score (CDF)']\n", - "brancol1_ax[0].legend( bbox_to_anchor=(.95, .9), ncol = 1, \n", - " loc = 'upper left', fontsize = 9, frameon = False)\n", - "brancol1_ax[1].legend(handles = legend_elements, bbox_to_anchor=(1, .9), \n", - " loc = 'upper left', fontsize = 9, frameon = False\n", - " )\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", + "brancol1_ax[0].legend(\n", + " bbox_to_anchor=(0.95, 0.9), ncol=1, loc=\"upper left\", fontsize=9, frameon=False\n", + ")\n", + "brancol1_ax[1].legend(\n", + " handles=legend_elements, bbox_to_anchor=(1, 0.9), loc=\"upper left\", fontsize=9, frameon=False\n", + ")\n", + "sns.despine(trim=True, left=True)\n", "brancol1_fig.show()\n", - "brancol1_fig.savefig('brancol1_elog_vector.png', bbox_inches='tight')\n", + "brancol1_fig.savefig(\"brancol1_elog_vector.png\", bbox_inches=\"tight\")\n", "\n", "# testsDF.head()" ] @@ -1414,7 +1570,7 @@ "outputs": [], "source": [ "# def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "\n", "# if duration:\n", "# x_duration = np.full(len(df), pd.Timedelta(duration))\n", "# elif end_col:\n", @@ -1422,13 +1578,11 @@ "# x_duration = df['duration']\n", "# elif duration_col:\n", "# x_duration = df[duration_col]\n", - " \n", + "\n", "# x = list(zip(df[datetime_col], x_duration))\n", "# y = (y_val, y_height)\n", "\n", - "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n" ] }, { @@ -1441,7 +1595,7 @@ "# def plot_event_vspan(df_events, ax, color_dict):\n", "# for category, color in color_dict.items():\n", "# df_category = df_events.loc[df_events['category']==category]\n", - " \n", + "\n", "# for idx, row in df_category.iterrows():\n", "# ax.axvspan(*mdates.date2num([row['start_time'], row['end_time']]), color=color, edgecolor = 'face',alpha=0.5)" ] @@ -1462,11 +1616,11 @@ "# trip_end_date = trip_info['trip_end_date']\n", "\n", "# sql = f\"\"\"\n", - "# SELECT \n", - "# v.start_datetime, \n", + "# SELECT\n", + "# v.start_datetime,\n", "# v.cam_name\n", - " \n", - "# from {vessel}_v1_video_files v \n", + "\n", + "# from {vessel}_v1_video_files v\n", "# where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", "# \"\"\"\n", "# video_df = wr.athena.read_sql_query(sql, database=\"tnc_edge\")\n", @@ -1492,7 +1646,7 @@ "# # y_var = x_vars[n]\n", "# text = row[text_col]\n", "# data_xy = (row['start_time'], 1.7)\n", - " \n", + "\n", "# an = ax.annotate(\n", "# text,\n", "# xy=data_xy, xycoords='data',\n", @@ -1501,14 +1655,14 @@ "# bbox = bbox_args,\n", "# color = 'white'\n", "# )\n", - " \n", + "\n", "# annots.append(an)\n", - " \n", + "\n", "# x, y = text_xy\n", - " \n", + "\n", "# y = y+y_var\n", "# y_var = y_var * -1\n", - " \n", + "\n", "# text_xy = (x,y)\n", "\n", "# return annots" @@ -1548,8 +1702,8 @@ "# for idx, (category, color) in enumerate(category_color_dict.items()):\n", "# df_category = df_events.loc[df_events['category']==category].copy()\n", "# y_val = y_vals[idx]\n", - " \n", - " \n", + "\n", + "\n", "# plot_event_bars(df_category, ax,category, 'start_time',end_col = 'end_time', y_val=y_val, y_height = y_height, color= color )" ] }, @@ -1561,7 +1715,7 @@ "outputs": [], "source": [ "# def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "\n", "# if duration:\n", "# x_duration = np.full(len(df), pd.Timedelta(duration))\n", "# elif end_col:\n", @@ -1571,13 +1725,11 @@ "# x_duration = df[duration_col]\n", "# else:\n", "# x_duration = np.full(len(df), 2)\n", - " \n", + "\n", "# x = list(zip(df[datetime_col], x_duration))\n", "# y = (y_val, y_height)\n", "\n", - "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n" ] }, { diff --git a/notebooks/helper_functions/aggregations.py b/notebooks/helper_functions/aggregations.py index fa61f63..fc6d325 100644 --- a/notebooks/helper_functions/aggregations.py +++ b/notebooks/helper_functions/aggregations.py @@ -1,94 +1,101 @@ -import scipy.stats as stats -import itertools -import pandas as pd +import itertools + import numpy as np +import pandas as pd +from scipy import stats from sklearn.linear_model import LinearRegression + def aggregate_by_interval(df, freq, agg_list, agg_cols): agg_dict = {} for col in agg_cols: agg_dict[col] = agg_list - - data = df.groupby(pd.Grouper(key = 'utc_start_datetime', freq = f'{freq}min')).agg(agg_dict).reset_index() - data.columns = ['_'.join(col).strip() for col in data.columns.values] + + data = ( + df.groupby(pd.Grouper(key="utc_start_datetime", freq=f"{freq}min")) + .agg(agg_dict) + .reset_index() + ) + data.columns = ["_".join(col).strip() for col in data.columns.values] return data + def aggregate_concat(dfs, freq, agg_list, agg_cols): datas = [] for df in dfs: - data = aggregate_by_interval(df = df, freq = freq, agg_list = agg_list, agg_cols= agg_cols) - data.fillna(0, inplace = True) + data = aggregate_by_interval(df=df, freq=freq, agg_list=agg_list, agg_cols=agg_cols) + data.fillna(0, inplace=True) datas.append(data) concat_data = pd.concat(datas) return concat_data + def compare_aggregation_correlations(dfs, intervals, agg_list, x_col, y_col): results = {} for x_agg in agg_list: # print(f'x_agg: {x_agg}') x_results = [] for i in intervals: - data = aggregate_concat(dfs, freq = i, agg_list= agg_list, agg_cols=[x_col, y_col]) + data = aggregate_concat(dfs, freq=i, agg_list=agg_list, agg_cols=[x_col, y_col]) for y_agg in agg_list: y_results = {} - slope, intercept, rvalue, pvalue, stderr = stats.linregress(x=data[f'{x_col}_{x_agg}'], y=data[f'{y_col}_{y_agg}']) - r2 = rvalue ** 2 - y_results['interval'] = i - y_results['y_agg'] = y_agg - y_results['r2'] = r2 - + slope, intercept, rvalue, pvalue, stderr = stats.linregress( + x=data[f"{x_col}_{x_agg}"], y=data[f"{y_col}_{y_agg}"] + ) + r2 = rvalue**2 + y_results["interval"] = i + y_results["y_agg"] = y_agg + y_results["r2"] = r2 + x_results.append(y_results) - - + df_Xresults = pd.DataFrame(x_results) # print(df_Xresults.head()) results[x_agg] = df_Xresults return results + def compare_aggregation_correlation_columns(dfs, intervals, agg_list, x_col, y_cols): results = [] - + for i in intervals: - - data = aggregate_concat(dfs, freq = i, agg_list= agg_list, agg_cols=[x_col]+ y_cols) + data = aggregate_concat(dfs, freq=i, agg_list=agg_list, agg_cols=[x_col] + y_cols) # result['interval'] = i for y_col, x_agg, y_agg in itertools.product(y_cols, agg_list, agg_list): result = {} - result['interval'] = i - result['x_agg'] = x_agg - result['y_agg'] = y_agg - result['y_col'] = y_col + result["interval"] = i + result["x_agg"] = x_agg + result["y_agg"] = y_agg + result["y_col"] = y_col # get r2 value - x = np.array(data[f'{x_col}_{x_agg}']).reshape((-1, 1)) - y = np.array(data[f'{y_col}_{y_agg}']).reshape((-1, 1)) + x = np.array(data[f"{x_col}_{x_agg}"]).reshape((-1, 1)) + y = np.array(data[f"{y_col}_{y_agg}"]).reshape((-1, 1)) model = LinearRegression() - model.fit(x,y) - r2 = model.score(x,y) + model.fit(x, y) + r2 = model.score(x, y) - result['r2'] = r2 + result["r2"] = r2 results.append(result) - - - + df_results = pd.DataFrame(results) - return df_results + def add_rolling_aggregates(df, win, agg_dict, keep_cols): - rolling_df = df.rolling(win, center = True).agg(agg_dict) + rolling_df = df.rolling(win, center=True).agg(agg_dict) new_cols = {} for col in agg_dict.keys(): - new_cols[col]= f'rolling_{col}' - - rolling_df.rename(columns = new_cols, inplace = True) + new_cols[col] = f"rolling_{col}" + + rolling_df.rename(columns=new_cols, inplace=True) _df = pd.merge(df[keep_cols], rolling_df, left_index=True, right_index=True) - return _df.reset_index() \ No newline at end of file + return _df.reset_index() diff --git a/notebooks/helper_functions/data_readers.py b/notebooks/helper_functions/data_readers.py index c5c35d2..d499280 100644 --- a/notebooks/helper_functions/data_readers.py +++ b/notebooks/helper_functions/data_readers.py @@ -1,209 +1,235 @@ +import sqlite3 + import awswrangler as wr import pandas as pd -import sqlite3 -import numpy as np -def get_data(boat, trip_no): +def get_data(boat, trip_no): trip_data = {} - - trip_info = get_trip_info(boat = boat, trip_no=trip_no) - trip_data['trip_info'] = trip_info + + trip_info = get_trip_info(boat=boat, trip_no=trip_no) + trip_data["trip_info"] = trip_info print(trip_info) - bv_sets = get_bv_sets(boat=boat, trip_id = trip_info['trip_id']) - trip_data['bv_sets'] = bv_sets + bv_sets = get_bv_sets(boat=boat, trip_id=trip_info["trip_id"]) + trip_data["bv_sets"] = bv_sets - bv_fish = get_bv_fish(boat = boat, trip_id = trip_info['trip_id']) - trip_data['bv_fish'] = bv_fish - + bv_fish = get_bv_fish(boat=boat, trip_id=trip_info["trip_id"]) + trip_data["bv_fish"] = bv_fish - ai_df= get_ai_counts(boat=boat, trip_info=trip_info) - trip_data['ai_df'] = ai_df + ai_df = get_ai_counts(boat=boat, trip_info=trip_info) + trip_data["ai_df"] = ai_df all_counts = get_bv_counts(ai_df, bv_fish) - trip_data['all_counts'] = all_counts + trip_data["all_counts"] = all_counts ai_sets = join_bv_sets(bv_sets, ai_df) - trip_data['ai_sets'] = ai_sets + trip_data["ai_sets"] = ai_sets - df_elog = get_elog_data(boat, trip_info['trip_start_date'], trip_info['trip_end_date']) - trip_data['elogs'] = df_elog + df_elog = get_elog_data(boat, trip_info["trip_start_date"], trip_info["trip_end_date"]) + trip_data["elogs"] = df_elog bv_set_counts = get_bv_set_counts(bv_fish, bv_sets) - trip_data['bv_set_counts'] = bv_set_counts + trip_data["bv_set_counts"] = bv_set_counts - return trip_data - def get_trip_info(boat, trip_no): trip_df = wr.athena.read_sql_query(f"SELECT * FROM {boat}_v1_bv_trips", database="tnc_edge") - - trip_df.sort_values(by = 'trip_end_date', ascending= True, inplace= True) - - trip_id = trip_df['trip_id'].values[trip_no] - - trip_start_date = trip_df['trip_start_date'].values[trip_no] - trip_end_date = trip_df['trip_end_date'].values[trip_no] + + trip_df.sort_values(by="trip_end_date", ascending=True, inplace=True) + + trip_id = trip_df["trip_id"].values[trip_no] + + trip_start_date = trip_df["trip_start_date"].values[trip_no] + trip_end_date = trip_df["trip_end_date"].values[trip_no] trip_info = {} - trip_info['trip_id'] = trip_id - trip_info['trip_start_date'] = trip_start_date - trip_info['trip_end_date'] = trip_end_date + trip_info["trip_id"] = trip_id + trip_info["trip_start_date"] = trip_start_date + trip_info["trip_end_date"] = trip_end_date return trip_info - + + def get_bv_sets(boat, trip_id): - - bv_sets = wr.athena.read_sql_query(f"SELECT * FROM {boat}_v1_bv_sets where trip_id = '{trip_id}'", database = "tnc_edge") + bv_sets = wr.athena.read_sql_query( + f"SELECT * FROM {boat}_v1_bv_sets where trip_id = '{trip_id}'", database="tnc_edge" + ) return bv_sets -def get_bv_fish(boat, trip_id): - bv_fish = wr.athena.read_sql_query(f"SELECT fish.* FROM {boat}_v1_bv_fish fish left join {boat}_v1_bv_sets sets on sets.set_id = fish.set_id where sets.trip_id = '{trip_id}'", database = "tnc_edge") +def get_bv_fish(boat, trip_id): + bv_fish = wr.athena.read_sql_query( + f"SELECT fish.* FROM {boat}_v1_bv_fish fish left join {boat}_v1_bv_sets sets on sets.set_id = fish.set_id where sets.trip_id = '{trip_id}'", + database="tnc_edge", + ) return bv_fish + def get_bv_set_counts(bv_fish, bv_sets): - datetime_cols = ['set_start_datetime','set_end_datetime','haul_start_datetime','haul_end_datetime'] + datetime_cols = [ + "set_start_datetime", + "set_end_datetime", + "haul_start_datetime", + "haul_end_datetime", + ] for col in datetime_cols: - bv_sets[col] = pd.to_datetime(bv_sets[col], utc = True) + bv_sets[col] = pd.to_datetime(bv_sets[col], utc=True) bv_sets[col] = bv_sets[col].dt.tz_convert(None) - - retained_bv_counts = bv_fish[bv_fish['future'] == 'retained'].groupby('set_id').agg({'fish_id':'count'}).reset_index().rename(columns = {'fish_id':'retained_count'}) - joined_bv_sets = pd.merge(bv_sets, retained_bv_counts, how = 'left', on = 'set_id') - + + retained_bv_counts = ( + bv_fish[bv_fish["future"] == "retained"] + .groupby("set_id") + .agg({"fish_id": "count"}) + .reset_index() + .rename(columns={"fish_id": "retained_count"}) + ) + joined_bv_sets = pd.merge(bv_sets, retained_bv_counts, how="left", on="set_id") + return joined_bv_sets + # read in catch countst def get_ai_counts(boat, trip_info): - - model = 'ondeck' if boat == 'stpatrick' else 'aifish' if boat == 'brancol' else None - if model == 'ondeck': + model = "ondeck" if boat == "stpatrick" else "aifish" if boat == "brancol" else None + if model == "ondeck": count_column = None - number_columns = ['overallcatches', 'overallcount','overalldiscards', 'detection_confidence','count'] - elif model == 'aifish': - count_column = 'count' - number_columns = ['count', 'detection_confidence'] - - trip_start_date = trip_info['trip_start_date'] - trip_end_date = trip_info['trip_end_date'] - + number_columns = [ + "overallcatches", + "overallcount", + "overalldiscards", + "detection_confidence", + "count", + ] + elif model == "aifish": + count_column = "count" + number_columns = ["count", "detection_confidence"] + + trip_start_date = trip_info["trip_start_date"] + trip_end_date = trip_info["trip_end_date"] + aifish_sql = f""" - SELECT + SELECT aifd.id, aifd.{count_column} as count, aifd.detection_confidence, - v.start_datetime - FROM {boat}_v1_{model}data aifd - join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path + v.start_datetime + FROM {boat}_v1_{model}data aifd + join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}' """ ondeck_sql = f""" - SELECT + SELECT aifd.id, aifd.overallcount, aifd.overallcatches, aifd.overalldiscards, (cast(aifd.overallcatches as DOUBLE) - cast(aifd.overalldiscards as DOUBLE)) as count, aifd.detection_confidence, - v.start_datetime - FROM {boat}_v1_{model}data aifd - join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path + v.start_datetime + FROM {boat}_v1_{model}data aifd + join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}' - + """ - sql = ondeck_sql if model == 'ondeck' else aifish_sql if model == 'aifish' else None + sql = ondeck_sql if model == "ondeck" else aifish_sql if model == "aifish" else None ai_df = wr.athena.read_sql_query(sql, database="tnc_edge") - ai_df.start_datetime = pd.to_datetime(ai_df.start_datetime, utc = True) - ai_df['utc_start_datetime'] = ai_df['start_datetime'].dt.tz_convert(None) - ai_df['utc_end_datetime'] = ai_df['utc_start_datetime'] + pd.Timedelta(minutes = 5) + ai_df.start_datetime = pd.to_datetime(ai_df.start_datetime, utc=True) + ai_df["utc_start_datetime"] = ai_df["start_datetime"].dt.tz_convert(None) + ai_df["utc_end_datetime"] = ai_df["utc_start_datetime"] + pd.Timedelta(minutes=5) for col in number_columns: - ai_df[col] = pd.to_numeric(ai_df[col], errors='coerce') - - ai_df['weighted_count'] = ai_df['detection_confidence'] * ai_df['count'] + ai_df[col] = pd.to_numeric(ai_df[col], errors="coerce") + + ai_df["weighted_count"] = ai_df["detection_confidence"] * ai_df["count"] return ai_df -def join_bv_sets(bv_sets, ai_df): +def join_bv_sets(bv_sets, ai_df): # join aif_df to sets - df_hauls = bv_sets.loc[:,['set_id','set_number','haul_start_datetime','haul_end_datetime']] - df_hauls['haul_start_datetime'] = pd.to_datetime(df_hauls['haul_start_datetime']) - df_hauls['haul_end_datetime'] = pd.to_datetime(df_hauls['haul_end_datetime']) - df_hauls['haul_start_datetime'] = df_hauls['haul_start_datetime'].dt.tz_convert(None) - df_hauls['haul_end_datetime'] = df_hauls['haul_end_datetime'].dt.tz_convert(None) - - #Make the db in memory - conn = sqlite3.connect(':memory:') - - #write the tables - ai_df.to_sql('ai_counts', conn, index=False) - df_hauls.to_sql('hauls', conn, index=False) - + df_hauls = bv_sets.loc[:, ["set_id", "set_number", "haul_start_datetime", "haul_end_datetime"]] + df_hauls["haul_start_datetime"] = pd.to_datetime(df_hauls["haul_start_datetime"]) + df_hauls["haul_end_datetime"] = pd.to_datetime(df_hauls["haul_end_datetime"]) + df_hauls["haul_start_datetime"] = df_hauls["haul_start_datetime"].dt.tz_convert(None) + df_hauls["haul_end_datetime"] = df_hauls["haul_end_datetime"].dt.tz_convert(None) + + # Make the db in memory + conn = sqlite3.connect(":memory:") + + # write the tables + ai_df.to_sql("ai_counts", conn, index=False) + df_hauls.to_sql("hauls", conn, index=False) + query = """ select * - + from ai_counts left join hauls on ai_counts.utc_start_datetime between hauls.haul_start_datetime and hauls.haul_end_datetime - + """ df = pd.read_sql_query(query, conn) # convert datatypes - df['count'] = pd.to_numeric(df['count']) - df['haul_start_datetime'] = pd.to_datetime(df['haul_start_datetime']) - df['haul_end_datetime'] = pd.to_datetime(df['haul_end_datetime']) + df["count"] = pd.to_numeric(df["count"]) + df["haul_start_datetime"] = pd.to_datetime(df["haul_start_datetime"]) + df["haul_end_datetime"] = pd.to_datetime(df["haul_end_datetime"]) df.start_datetime = pd.to_datetime(df.start_datetime) - df['utc_start_datetime'] = pd.to_datetime(df['utc_start_datetime']) + df["utc_start_datetime"] = pd.to_datetime(df["utc_start_datetime"]) # get flags for haul not haul - df['is_haul_bool'] = df['set_number'].notnull() - df['is_haul'] = df['is_haul_bool'].apply(lambda x: 1 if x else 0) + df["is_haul_bool"] = df["set_number"].notnull() + df["is_haul"] = df["is_haul_bool"].apply(lambda x: 1 if x else 0) + + df.sort_values(by="utc_start_datetime", inplace=True) - df.sort_values(by = 'utc_start_datetime', inplace= True) - return df + def get_bv_counts(ai_df, bv_fish): - #join bv counts to ai counts - #Make the db in memory - conn = sqlite3.connect(':memory:') - - #write the tables - ai_df.to_sql('ai_counts', conn, index=False) - bv_fish.to_sql('bv_fish', conn, index=False) - + # join bv counts to ai counts + # Make the db in memory + conn = sqlite3.connect(":memory:") + + # write the tables + ai_df.to_sql("ai_counts", conn, index=False) + bv_fish.to_sql("bv_fish", conn, index=False) + query = """ select ai_counts.id, ai_counts.utc_start_datetime, bv_fish.* from ai_counts - join bv_fish on bv_fish.catch_datetime >= ai_counts.utc_start_datetime + join bv_fish on bv_fish.catch_datetime >= ai_counts.utc_start_datetime and bv_fish.catch_datetime < ai_counts.utc_end_datetime """ bv_ai_df = pd.read_sql_query(query, conn) - bv_counts = bv_ai_df.groupby('id').fish_id.agg('count').reset_index().rename(columns = {'fish_id':'bv_count'}) - - df_all_counts = pd.merge(ai_df, bv_counts, how = 'left', on = 'id') - df_all_counts.sort_values(by = 'utc_start_datetime', inplace = True) - df_all_counts['bv_count'].fillna(0, inplace= True) + bv_counts = ( + bv_ai_df.groupby("id") + .fish_id.agg("count") + .reset_index() + .rename(columns={"fish_id": "bv_count"}) + ) + + df_all_counts = pd.merge(ai_df, bv_counts, how="left", on="id") + df_all_counts.sort_values(by="utc_start_datetime", inplace=True) + df_all_counts["bv_count"].fillna(0, inplace=True) return df_all_counts def get_elog_data(vessel, trip_start_date, trip_end_date): - #elog data - + # elog data + sql = f""" select elogs.id, @@ -214,34 +240,39 @@ def get_elog_data(vessel, trip_start_date, trip_end_date): elogs.systemendsetdatetime, elogs.systemstarthauldatetime, elogs.systemendhauldatetime - + from {vessel}_v1_deckhandevents_mostrecentlonglineevent_jsonextracted elogs where datetime > '{trip_start_date}' and datetime < '{trip_end_date}' """ - df_elog = wr.athena.read_sql_query( - sql, - database="tnc_edge") - - datetime_cols = ['datetime','systemstartsetdatetime','systemendsetdatetime','systemstarthauldatetime','systemendhauldatetime'] + df_elog = wr.athena.read_sql_query(sql, database="tnc_edge") + + datetime_cols = [ + "datetime", + "systemstartsetdatetime", + "systemendsetdatetime", + "systemstarthauldatetime", + "systemendhauldatetime", + ] for col in datetime_cols: - df_elog[col] = pd.to_datetime(df_elog[col], utc = True) + df_elog[col] = pd.to_datetime(df_elog[col], utc=True) df_elog[col] = df_elog[col].dt.tz_convert(None) return df_elog + def get_vector_data(vessel, vector, trip_info): - trip_start_date = trip_info['trip_start_date'] - trip_end_date = trip_info['trip_end_date'] - + trip_start_date = trip_info["trip_start_date"] + trip_end_date = trip_info["trip_end_date"] + sql = f""" select id, score, datetime, detail, name from {vessel}_v1_tests - where vector_id = '{vector}' and datetime > '{trip_start_date}' and datetime < '{trip_end_date}' + where vector_id = '{vector}' and datetime > '{trip_start_date}' and datetime < '{trip_end_date}' """ df_vector = wr.athena.read_sql_query(sql, database="tnc_edge") - df_vector['datetime'] = pd.to_datetime(df_vector['datetime'], utc=True) - df_vector['datetime'] = df_vector['datetime'].dt.tz_convert(None) - df_vector['score'] = pd.to_numeric(df_vector['score']) - - return df_vector \ No newline at end of file + df_vector["datetime"] = pd.to_datetime(df_vector["datetime"], utc=True) + df_vector["datetime"] = df_vector["datetime"].dt.tz_convert(None) + df_vector["score"] = pd.to_numeric(df_vector["score"]) + + return df_vector diff --git a/notebooks/key_event_detection.ipynb b/notebooks/key_event_detection.ipynb index b960c46..8e5304c 100644 --- a/notebooks/key_event_detection.ipynb +++ b/notebooks/key_event_detection.ipynb @@ -17,7 +17,6 @@ "metadata": {}, "outputs": [], "source": [ - " \n", "%autoreload 2" ] }, @@ -62,42 +61,33 @@ "metadata": {}, "outputs": [], "source": [ - "import awswrangler as wr\n", - "import pandas as pd\n", "import sqlite3\n", - "import seaborn as sns\n", + "import warnings\n", + "\n", + "import awswrangler as wr\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", - "import scipy.stats as stats\n", - "from sklearn.metrics import mean_squared_error\n", - "\n", - "from sklearn.linear_model import LinearRegression\n", + "import pandas as pd\n", + "import seaborn as sns\n", "\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", - "import matplotlib.gridspec as gridspec\n", - "from scipy.stats import chi2_contingency\n", - "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", - "\n", + "import itertools\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "import json\n", - "from tsai.all import *\n", - "from IPython.display import display, Markdown\n", - "import json\n", - "# from pandas.io.json import json_normalize\n", "\n", - "from matplotlib.patches import Patch\n", + "from matplotlib.dates import DayLocator, HourLocator\n", + "from scipy.stats import chi2_contingency\n", "\n", + "warnings.filterwarnings(\"ignore\")\n", "import matplotlib.dates as mdates\n", - "\n", + "from helper_functions.aggregations import *\n", "from helper_functions.data_readers import *\n", "\n", - "from helper_functions.aggregations import *" + "# from pandas.io.json import json_normalize\n", + "from matplotlib.patches import Patch\n", + "from tsai.all import *" ] }, { @@ -106,10 +96,7 @@ "id": "ef050a6e-ceab-4bad-a643-e7cf9f6e0be0", "metadata": {}, "outputs": [], - "source": [ - "\n", - "import matplotlib.ticker as ticker" - ] + "source": [] }, { "cell_type": "markdown", @@ -126,20 +113,84 @@ "metadata": {}, "outputs": [], "source": [ - "colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])\n", - "\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")\n", + "\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", "\n", "%matplotlib inline\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", + "\n", + "\n", "def show_color_pallete(pallete):\n", - "# fig, ax = plt.subplots()\n", - " \n", + " # fig, ax = plt.subplots()\n", + "\n", " sns.palplot(pallete, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(pallete):\n", - " label = f'[{i}]'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}]\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -229,12 +280,12 @@ "# r2 = model.score(x,y)\n", "# coefficients = model.coef_\n", "# intercept = model.intercept_\n", - " \n", + "\n", "\n", "# beta0 = r'$intercept = \\hat\\beta_0 =$' + str(round(intercept[0],2))\n", - " \n", + "\n", "# beta1 = r'$slope = \\hat\\beta_1 =$' + str(round(coefficients[0][0],2))\n", - " \n", + "\n", "# r_squared = r'$R^2 =$' + str(round(r2,2))\n", "\n", "# textstr = '\\n'.join((\n", @@ -279,17 +330,17 @@ ], "source": [ "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)\n", + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", "\n", - "stpatrick1_bv_set_counts = StPatrick1data['bv_set_counts']\n", - "stpatrick1_bv_set_counts = StPatrick2data['bv_set_counts']" + "stpatrick1_bv_set_counts = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick1_bv_set_counts = StPatrick2data[\"bv_set_counts\"]" ] }, { @@ -299,8 +350,8 @@ "metadata": {}, "outputs": [], "source": [ - "stpatrick1_bv_sets = StPatrick1data['bv_sets']\n", - "stpatrick2_bv_sets = StPatrick2data['bv_sets']" + "stpatrick1_bv_sets = StPatrick1data[\"bv_sets\"]\n", + "stpatrick2_bv_sets = StPatrick2data[\"bv_sets\"]" ] }, { @@ -478,30 +529,30 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", - "Brancol3data = get_data(boat = 'brancol', trip_no = 2)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", + "Brancol3data = get_data(boat=\"brancol\", trip_no=2)\n", "\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", - "counts_Brancol3 = Brancol3data['all_counts']\n", + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", + "counts_Brancol3 = Brancol3data[\"all_counts\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", - "brancol3_elog = Brancol3data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", + "brancol3_elog = Brancol3data[\"elogs\"]\n", "\n", - "brancol2_bv_sets = Brancol2data['bv_sets']\n", - "brancol1_bv_sets = Brancol1data['bv_sets']\n", - "brancol3_bv_sets = Brancol3data['bv_sets']\n", + "brancol2_bv_sets = Brancol2data[\"bv_sets\"]\n", + "brancol1_bv_sets = Brancol1data[\"bv_sets\"]\n", + "brancol3_bv_sets = Brancol3data[\"bv_sets\"]\n", "\n", - "brancol1_bv_set_counts = Brancol1data['bv_set_counts']\n", - "brancol2_bv_set_counts = Brancol2data['bv_set_counts']\n", - "brancol3_bv_set_counts = Brancol3data['bv_set_counts']\n", + "brancol1_bv_set_counts = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_set_counts = Brancol2data[\"bv_set_counts\"]\n", + "brancol3_bv_set_counts = Brancol3data[\"bv_set_counts\"]\n", "\n", - "brancol1trip = Brancol1data['trip_info']\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol3trip = Brancol3data['trip_info']" + "brancol1trip = Brancol1data[\"trip_info\"]\n", + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol3trip = Brancol3data[\"trip_info\"]" ] }, { @@ -560,10 +611,10 @@ "# haul_start_col = 'haul_start_datetime'\n", "# haul_end_col = 'haul_end_datetime'\n", "# y_val = 0\n", - " \n", - "# colors = color_dict[source] \n", - " \n", - " \n", + "\n", + "# colors = color_dict[source]\n", + "\n", + "\n", "# #plot_hauling\n", "# plot_hlines(ax, df,y_val, haul_start_col, haul_end_col, 12, colors['haul'], 'haul')\n", "\n", @@ -580,9 +631,9 @@ "# text = f'count: {row[count_col]}'\n", "# x_value = row[x_col]\n", "# ax.text( x_value,y_value , text, fontsize=10, horizontalalignment='right', bbox=props)\n", - " \n", - " \n", - "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes) " + "\n", + "\n", + "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes)" ] }, { @@ -599,7 +650,7 @@ "# haul_start_col = 'systemstarthauldatetime'\n", "# haul_end_col = 'systemendhauldatetime'\n", "# y_val = 1.7\n", - " \n", + "\n", "# elif source == 'bv':\n", "# set_start_col = 'set_start_datetime'\n", "# set_end_col = 'set_end_datetime'\n", @@ -610,18 +661,17 @@ "# df['set_duration'] = df[set_end_col] - df[set_start_col]\n", "# df['haul_duration'] = df[haul_end_col] - df[haul_start_col]\n", "# df['mid_duration'] = df[haul_start_col] - df[set_end_col]\n", - " \n", + "\n", "# set_x = list(zip(df[set_start_col], df['set_duration']))\n", "# haul_x = list(zip(df[haul_start_col], df['haul_duration']))\n", "# mid_x = list(zip(df[set_end_col], df['mid_duration']))\n", - " \n", + "\n", "# y = (y_val, .6)\n", "\n", "# colors = color_dict[source]\n", "# ax.broken_barh(mid_x, y, facecolors = colors['mid'], edgecolor = 'face')\n", "# ax.broken_barh(haul_x, y, facecolors = colors['haul'], edgecolor = 'face')\n", - "# ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n", - " " + "# ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n" ] }, { @@ -635,30 +685,28 @@ "\n", "# # ax[0].autofmt_xdate()\n", "\n", - " \n", + "\n", "# ax.set_yticks([1,2],('bv','elogs'))\n", "# # ax.set_yticks([0,1,2],('bv','elogs',' '))\n", "# fig.suptitle(titles['main'], fontsize = 20)\n", - " \n", + "\n", "# # df1 = brancol1_elog\n", "# # df1sets =brancol1_bv_sets\n", "\n", - " \n", "\n", "# if annotate_counts:\n", "# dfElog['totalcount'] = dfElog['bycatchcount'].astype(int) + dfElog['catchcount'].astype(int)\n", "# dfBV['retained_count'] = dfBV['retained_count'].astype('Int64')\n", "# annotate_counts(ax, dfElog, 'totalcount', 'systemstarthauldatetime', 1.2)\n", "# annotate_counts(ax, dfBV, 'retained_count', 'haul_start_datetime', 0.2)\n", - " \n", + "\n", "\n", "# plot_set_bars(ax, dfElog, 'elog', color_dict)\n", "# plot_set_bars(ax, dfBV, 'bv', color_dict)\n", "\n", - " \n", "\n", "# ax.set_title(title,x = .1, y = 1, fontsize = 9)\n", - " \n", + "\n", "# # ax.autoscale()\n", "# # ax[0].set_ylim(-.5,1.5)\n", "# # ax[0].tick_params(axis='x', labelrotation=45)\n", @@ -666,12 +714,12 @@ "# if legend:\n", "# legend_elements = []\n", "# for label, color in color_dict['elog'].items():\n", - " \n", + "\n", "# legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", "# label=label))\n", "# ax.legend(handles = legend_elements, loc='center', bbox_to_anchor=(.5, -1), ncol = 3, fontsize = 8)\n", "\n", - " \n", + "\n", "# #use consise date formater\n", "\n", "# if display_axis:\n", @@ -691,8 +739,8 @@ "outputs": [], "source": [ "color_dict = {\n", - " 'bv': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", - " 'elog': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", + " \"bv\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", + " \"elog\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", " # 'elog':{'set':'#648fff', 'haul':'#184EAD', 'mid':'#88ccee'}\n", "}" ] @@ -724,27 +772,31 @@ "source": [ "# metrics.ConfusionMatrixDisplay(cm).plot(cmap = 'Blues',ax = ax)\n", "def plot_confusion_matrix(cm, ax, interp, title):\n", - "\n", - " ax.imshow(cm, interpolation='nearest', cmap = 'Blues')\n", + " ax.imshow(cm, interpolation=\"nearest\", cmap=\"Blues\")\n", " tick_marks = np.arange(len(interp.vocab))\n", - " alt_labels = ['haul', 'no haul']\n", + " alt_labels = [\"haul\", \"no haul\"]\n", " # ax.set_xticks(tick_marks, interp.vocab, rotation=0)\n", " ax.set_xticks(tick_marks, alt_labels, rotation=0)\n", " # ax.set_yticks(tick_marks, interp.vocab, va = 'center', rotation=90)\n", - " ax.tick_params(axis='both', which='both', length=0, pad = 3)\n", - " ax.set_yticks(tick_marks, alt_labels, va = 'center', rotation=90)\n", - " ax.set_xlabel('Predicted', fontweight='bold')\n", - " ax.set_ylabel('Actual', fontweight='bold')\n", - " ax.set_ylim(len(interp.vocab)-.5,-.5)\n", + " ax.tick_params(axis=\"both\", which=\"both\", length=0, pad=3)\n", + " ax.set_yticks(tick_marks, alt_labels, va=\"center\", rotation=90)\n", + " ax.set_xlabel(\"Predicted\", fontweight=\"bold\")\n", + " ax.set_ylabel(\"Actual\", fontweight=\"bold\")\n", + " ax.set_ylim(len(interp.vocab) - 0.5, -0.5)\n", " ax.grid(False)\n", - " \n", - " thresh = cm.max() / 2.\n", + "\n", + " thresh = cm.max() / 2.0\n", " for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n", - " coeff = f'{cm[i, j]}'\n", - " ax.text(j, i, coeff, \n", - " fontsize = 10,\n", - " horizontalalignment=\"center\", verticalalignment=\"center\", color=\"white\"\n", - " if cm[i, j] > thresh else \"black\")" + " coeff = f\"{cm[i, j]}\"\n", + " ax.text(\n", + " j,\n", + " i,\n", + " coeff,\n", + " fontsize=10,\n", + " horizontalalignment=\"center\",\n", + " verticalalignment=\"center\",\n", + " color=\"white\" if cm[i, j] > thresh else \"black\",\n", + " )" ] }, { @@ -755,14 +807,14 @@ "outputs": [], "source": [ "def prep_data(df):\n", - " df.sort_values(by = 'utc_start_datetime', inplace = True)\n", - " X = df.drop(columns = 'is_haul')\n", - " y = df['is_haul'].astype('int').to_numpy()\n", + " df.sort_values(by=\"utc_start_datetime\", inplace=True)\n", + " X = df.drop(columns=\"is_haul\")\n", + " y = df[\"is_haul\"].astype(\"int\").to_numpy()\n", "\n", - " X.loc[:,'utc_start_datetime'] = X.loc[:,'utc_start_datetime'].astype('int64')\n", - " X = np.atleast_3d(X).transpose(0,2,1)\n", + " X.loc[:, \"utc_start_datetime\"] = X.loc[:, \"utc_start_datetime\"].astype(\"int64\")\n", + " X = np.atleast_3d(X).transpose(0, 2, 1)\n", "\n", - " haul_map = {1:'haul', 0:'no_haul'}\n", + " haul_map = {1: \"haul\", 0: \"no_haul\"}\n", " labeler = ReLabeler(haul_map)\n", " y = labeler(y)\n", " return X, y" @@ -809,7 +861,7 @@ } ], "source": [ - "Brancol1data['ai_sets'].dtypes" + "Brancol1data[\"ai_sets\"].dtypes" ] }, { @@ -821,15 +873,15 @@ "source": [ "# create copies of training and testing dataframes, set utc_start_datetime as index\n", "# training set\n", - "dfAiSets_Brancol1 = Brancol1data['ai_sets'].copy()\n", - "dfAiSets_Brancol1.set_index('utc_start_datetime', inplace = True)\n", + "dfAiSets_Brancol1 = Brancol1data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol1.set_index(\"utc_start_datetime\", inplace=True)\n", "\n", "# testing sets\n", - "dfAiSets_Brancol2 = Brancol2data['ai_sets'].copy()\n", - "dfAiSets_Brancol2.set_index('utc_start_datetime', inplace = True)\n", + "dfAiSets_Brancol2 = Brancol2data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol2.set_index(\"utc_start_datetime\", inplace=True)\n", "\n", - "dfAiSets_Brancol3 = Brancol3data['ai_sets'].copy()\n", - "dfAiSets_Brancol3.set_index('utc_start_datetime', inplace = True)" + "dfAiSets_Brancol3 = Brancol3data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol3.set_index(\"utc_start_datetime\", inplace=True)" ] }, { @@ -988,7 +1040,7 @@ } ], "source": [ - "Brancol2data['ai_sets'].head()" + "Brancol2data[\"ai_sets\"].head()" ] }, { @@ -998,14 +1050,14 @@ "metadata": {}, "outputs": [], "source": [ - "win = '2h'\n", - "agg_dict = {'weighted_count':'sum','count':'sum'}\n", - "keep_cols = ['weighted_count','detection_confidence','count','is_haul', 'id']\n", + "win = \"2h\"\n", + "agg_dict = {\"weighted_count\": \"sum\", \"count\": \"sum\"}\n", + "keep_cols = [\"weighted_count\", \"detection_confidence\", \"count\", \"is_haul\", \"id\"]\n", "\n", "# add_rolling aggregates creates columns using rolling window functions to aggregate the columns in the agg_dict\n", - "df_train = add_rolling_aggregates(dfAiSets_Brancol1, '2h', agg_dict, keep_cols)\n", - "df_test = add_rolling_aggregates(dfAiSets_Brancol2, '2h', agg_dict, keep_cols)\n", - "df_test3 = add_rolling_aggregates(dfAiSets_Brancol3, '2h', agg_dict, keep_cols)" + "df_train = add_rolling_aggregates(dfAiSets_Brancol1, \"2h\", agg_dict, keep_cols)\n", + "df_test = add_rolling_aggregates(dfAiSets_Brancol2, \"2h\", agg_dict, keep_cols)\n", + "df_test3 = add_rolling_aggregates(dfAiSets_Brancol3, \"2h\", agg_dict, keep_cols)" ] }, { @@ -1016,10 +1068,10 @@ "outputs": [], "source": [ "# set id to integer\n", - "df_train['id'] = df_train['id'].astype(int)\n", + "df_train[\"id\"] = df_train[\"id\"].astype(int)\n", "\n", - "df_test['id'] = df_test['id'].astype(int)\n", - "df_test3['id'] = df_test['id'].astype(int)" + "df_test[\"id\"] = df_test[\"id\"].astype(int)\n", + "df_test3[\"id\"] = df_test[\"id\"].astype(int)" ] }, { @@ -1030,9 +1082,9 @@ "outputs": [], "source": [ "# drop nan values\n", - "df_train.dropna(inplace = True)\n", - "df_test.dropna(inplace = True)\n", - "df_test3.dropna(inplace = True)" + "df_train.dropna(inplace=True)\n", + "df_test.dropna(inplace=True)\n", + "df_test3.dropna(inplace=True)" ] }, { @@ -1071,7 +1123,7 @@ "metadata": {}, "outputs": [], "source": [ - "X,y = prep_data(df_train)\n", + "X, y = prep_data(df_train)\n", "X_test, y_test = prep_data(df_test)\n", "X_test3, y_test3 = prep_data(df_test3)" ] @@ -1117,16 +1169,18 @@ ], "source": [ "## train, test, validation splits to load into the model\n", - "splits = get_splits(y, \n", - " n_splits=1, \n", - " valid_size=0.3, \n", - " test_size=0.1, \n", - " shuffle=True, \n", - " balance=True, \n", - " stratify=True,\n", - " random_state=42, \n", - " show_plot=True, \n", - " verbose=True)\n", + "splits = get_splits(\n", + " y,\n", + " n_splits=1,\n", + " valid_size=0.3,\n", + " test_size=0.1,\n", + " shuffle=True,\n", + " balance=True,\n", + " stratify=True,\n", + " random_state=42,\n", + " show_plot=True,\n", + " verbose=True,\n", + ")\n", "splits" ] }, @@ -1138,11 +1192,11 @@ "outputs": [], "source": [ "## dataset and loaders\n", - "tfms = [None, [Categorize()]]\n", + "tfms = [None, [Categorize()]]\n", "dsets = TSDatasets(X, y, tfms=tfms, splits=splits)\n", - " \n", + "\n", "bs = 10\n", - "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs*2])" + "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs * 2])" ] }, { @@ -1153,7 +1207,7 @@ "outputs": [], "source": [ "# set up architecture for model\n", - "arch, k = (RNNPlus, {'n_layers':4, 'bidirectional': True})\n", + "arch, k = (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True})\n", "model = create_model(arch, dls=dls, **k)" ] }, @@ -1165,7 +1219,7 @@ "outputs": [], "source": [ "# load pre-trained model into architecture\n", - "load_model('models/rnn_plus_haul_classifier.pth', model, opt = None, with_opt = False)" + "load_model(\"models/rnn_plus_haul_classifier.pth\", model, opt=None, with_opt=False)" ] }, { @@ -1211,7 +1265,7 @@ "outputs": [], "source": [ "# use training data to create the learner\n", - "learner = Learner(dls, model, metrics=accuracy)" + "learner = Learner(dls, model, metrics=accuracy)" ] }, { @@ -1232,7 +1286,7 @@ } ], "source": [ - "learner.load('rnn_plus_haul_classifier')" + "learner.load(\"rnn_plus_haul_classifier\")" ] }, { @@ -1250,7 +1304,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "# adding new test dataset\n", "valid_dl = dls.valid\n", "test_ds = valid_dl.dataset.add_test(X_test, y_test)\n", @@ -1314,7 +1367,9 @@ ], "source": [ "# get predictions for trip 2\n", - "_, temp_targets, temp_preds = learner.get_preds(dl=test_dl, with_decoded=True, save_preds=None, save_targs=None)\n", + "_, temp_targets, temp_preds = learner.get_preds(\n", + " dl=test_dl, with_decoded=True, save_preds=None, save_targs=None\n", + ")\n", "\n", "# decode predictions\n", "vocab = learner.dls.vocab\n", @@ -1367,7 +1422,9 @@ ], "source": [ "# get predictions for trip 3\n", - "_, temp_targets3, temp_preds3 = learner.get_preds(dl=test3_dl, with_decoded=True, save_preds=None, save_targs=None)\n", + "_, temp_targets3, temp_preds3 = learner.get_preds(\n", + " dl=test3_dl, with_decoded=True, save_preds=None, save_targs=None\n", + ")\n", "\n", "# decode predictions\n", "vocab = learner.dls.vocab\n", @@ -1525,14 +1582,14 @@ ], "source": [ "# creating a df out of trip 2 predictions to join to the original trip 2 test data set\n", - "pre_dict = {\"predictions\":decoded_preds}\n", + "pre_dict = {\"predictions\": decoded_preds}\n", "df_predict = pd.DataFrame(pre_dict)\n", "\n", - "df_results = pd.concat([df_test.reset_index(drop = True), df_predict.reset_index(drop = True)], axis = 1)\n", - "df_results['utc_start_datetime'] = pd.to_datetime(df_results['utc_start_datetime'])\n", - "df_results['utc_end_datetime'] = df_results['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", - "df_results['haul'] = df_results['is_haul'].map({1:'haul',0:'no_haul'})\n", - "df_results['predict_haul'] = df_results['predictions'].map({'haul':1,'no_haul':0})\n", + "df_results = pd.concat([df_test.reset_index(drop=True), df_predict.reset_index(drop=True)], axis=1)\n", + "df_results[\"utc_start_datetime\"] = pd.to_datetime(df_results[\"utc_start_datetime\"])\n", + "df_results[\"utc_end_datetime\"] = df_results[\"utc_start_datetime\"] + pd.Timedelta(minutes=5)\n", + "df_results[\"haul\"] = df_results[\"is_haul\"].map({1: \"haul\", 0: \"no_haul\"})\n", + "df_results[\"predict_haul\"] = df_results[\"predictions\"].map({\"haul\": 1, \"no_haul\": 0})\n", "\n", "df_results.head()" ] @@ -1688,14 +1745,16 @@ ], "source": [ "# creating a df out of trip 3 predictions to join to the original trip 3 test data set\n", - "pre_dict3 = {\"predictions\":decoded_preds3}\n", + "pre_dict3 = {\"predictions\": decoded_preds3}\n", "df_predict3 = pd.DataFrame(pre_dict3)\n", "\n", - "df_results3 = pd.concat([df_test3.reset_index(drop = True), df_predict3.reset_index(drop = True)], axis = 1)\n", - "df_results3['utc_start_datetime'] = pd.to_datetime(df_results3['utc_start_datetime'])\n", - "df_results3['utc_end_datetime'] = df_results3['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", - "df_results3['haul'] = df_results3['is_haul'].map({1:'haul',0:'no_haul'})\n", - "df_results3['predict_haul'] = df_results3['predictions'].map({'haul':1,'no_haul':0})\n", + "df_results3 = pd.concat(\n", + " [df_test3.reset_index(drop=True), df_predict3.reset_index(drop=True)], axis=1\n", + ")\n", + "df_results3[\"utc_start_datetime\"] = pd.to_datetime(df_results3[\"utc_start_datetime\"])\n", + "df_results3[\"utc_end_datetime\"] = df_results3[\"utc_start_datetime\"] + pd.Timedelta(minutes=5)\n", + "df_results3[\"haul\"] = df_results3[\"is_haul\"].map({1: \"haul\", 0: \"no_haul\"})\n", + "df_results3[\"predict_haul\"] = df_results3[\"predictions\"].map({\"haul\": 1, \"no_haul\": 0})\n", "\n", "df_results3.head()" ] @@ -1925,15 +1984,15 @@ "outputs": [], "source": [ "def get_metrics(cm):\n", - " TP = cm[0][0] # true positives\n", - " FN = cm[0][1] # false negatives\n", - " FP = cm[1][0] # false positives\n", - " TN = cm[1][1] # true negatives\n", - " recall = TP/sum(cm[0])\n", - " precision = TP/(TP + FP)\n", - " accuracy = (TP + TN)/sum(sum(cm))\n", + " TP = cm[0][0] # true positives\n", + " FN = cm[0][1] # false negatives\n", + " FP = cm[1][0] # false positives\n", + " TN = cm[1][1] # true negatives\n", + " recall = TP / sum(cm[0])\n", + " precision = TP / (TP + FP)\n", + " accuracy = (TP + TN) / sum(sum(cm))\n", "\n", - " print(f'recall: {recall} | precision: {precision} | accuracy: {accuracy}')\n", + " print(f\"recall: {recall} | precision: {precision} | accuracy: {accuracy}\")\n", "\n", " return recall, precision, accuracy" ] @@ -1989,7 +2048,7 @@ } ], "source": [ - "recall, precision, accuracy = get_metrics(cm2+cm3)" + "recall, precision, accuracy = get_metrics(cm2 + cm3)" ] }, { @@ -2010,32 +2069,37 @@ } ], "source": [ - "plt.rc('axes', labelsize = 9)\n", - "fig, ax = plt.subplots(1, 2, figsize = (3.6,1.8), dpi = 150)\n", + "plt.rc(\"axes\", labelsize=9)\n", + "fig, ax = plt.subplots(1, 2, figsize=(3.6, 1.8), dpi=150)\n", "plt.tight_layout()\n", "\n", - "plt.subplots_adjust(wspace=.6) # Increase the width padding between subplots\n", - "cm_dict = {'a': {'cm':cm2, 'interp':interp2}, 'b': {'cm':cm3, 'interp':interp3}}\n", + "plt.subplots_adjust(wspace=0.6) # Increase the width padding between subplots\n", + "cm_dict = {\"a\": {\"cm\": cm2, \"interp\": interp2}, \"b\": {\"cm\": cm3, \"interp\": interp3}}\n", "\n", "for i, (label, cm) in enumerate(cm_dict.items()):\n", - " plot_confusion_matrix(cm['cm'], ax[i], cm['interp'], 'confusion matrix')\n", + " plot_confusion_matrix(cm[\"cm\"], ax[i], cm[\"interp\"], \"confusion matrix\")\n", " # plot_confusion_matrix(cm3, ax[i], interp3, 'confusion matrix')\n", - " ax[i].tick_params(axis='x', labelsize=9) # Change x ticks font size to 12\n", - " ax[i].tick_params(axis='y', labelsize=9, left = False) # Change y ticks font size to 12\n", + " ax[i].tick_params(axis=\"x\", labelsize=9) # Change x ticks font size to 12\n", + " ax[i].tick_params(axis=\"y\", labelsize=9, left=False) # Change y ticks font size to 12\n", "\n", - " ax[i].text(-0.3, 1.2, f'({label})', transform=ax[i].transAxes, fontsize=9, fontweight='bold', va='top', ha='left')\n", + " ax[i].text(\n", + " -0.3,\n", + " 1.2,\n", + " f\"({label})\",\n", + " transform=ax[i].transAxes,\n", + " fontsize=9,\n", + " fontweight=\"bold\",\n", + " va=\"top\",\n", + " ha=\"left\",\n", + " )\n", "\n", "\n", "# ax[1].tick_params(axis='x', labelsize=12) # Change x ticks font size to 12\n", "# ax[1].tick_params(axis='y', labelsize=12, left = False) # Change y ticks font size to 12\n", - "sns.despine(trim=True, \n", - " left=True,\n", - " bottom = True\n", - " )\n", - "\n", + "sns.despine(trim=True, left=True, bottom=True)\n", "\n", "\n", - "plt.savefig('chart_pngs/confusion_matrix2.png',bbox_inches = 'tight')" + "plt.savefig(\"chart_pngs/confusion_matrix2.png\", bbox_inches=\"tight\")" ] }, { @@ -2061,24 +2125,32 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "def plot_event_bars(\n", + " df,\n", + " ax,\n", + " label,\n", + " datetime_col,\n", + " duration=None,\n", + " end_col=None,\n", + " duration_col=None,\n", + " y_val=0.7,\n", + " y_height=0.6,\n", + " color=\"#43aa99\",\n", + "):\n", " if duration:\n", " x_duration = np.full(len(df), pd.Timedelta(duration))\n", " elif end_col:\n", - " df['duration'] = df[end_col]- df[datetime_col]\n", - " x_duration = df['duration']\n", + " df[\"duration\"] = df[end_col] - df[datetime_col]\n", + " x_duration = df[\"duration\"]\n", " elif duration_col:\n", " x_duration = df[duration_col]\n", - " \n", - " x = list(zip(df[datetime_col], x_duration))\n", - " y = (y_val, y_height)\n", "\n", - " plot = ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", + " x = list(zip(df[datetime_col], x_duration, strict=False))\n", + " y = (y_val, y_height)\n", "\n", - " return plot\n", + " plot = ax.broken_barh(x, y, facecolors=color, edgecolor=\"face\", label=label, clip_on=False)\n", "\n", - " " + " return plot\n" ] }, { @@ -2091,7 +2163,7 @@ "# def plot_event_vspan(df_events, ax, color_dict):\n", "# for category, color in color_dict.items():\n", "# df_category = df_events.loc[df_events['category']==category]\n", - " \n", + "\n", "# for idx, row in df_category.iterrows():\n", "# ax.axvspan(*mdates.date2num([row['start_time'], row['end_time']]), color=color, edgecolor = 'face',alpha=0.5)" ] @@ -2103,25 +2175,31 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_set_vspan(df_sets, ax, color = '#a2c662'):\n", + "def plot_set_vspan(df_sets, ax, color=\"#a2c662\"):\n", " for idx, row in df_sets.iterrows():\n", - " ax.axvspan(*mdates.date2num([row['haul_start_datetime'], row['haul_end_datetime']]), color=color, edgecolor = '#40a018',alpha=0.5)\n", + " ax.axvspan(\n", + " *mdates.date2num([row[\"haul_start_datetime\"], row[\"haul_end_datetime\"]]),\n", + " color=color,\n", + " edgecolor=\"#40a018\",\n", + " alpha=0.5,\n", + " )\n", + "\n", "\n", "def get_video_times(vessel, trip_info):\n", - " trip_start_date = trip_info['trip_start_date']\n", - " trip_end_date = trip_info['trip_end_date']\n", + " trip_start_date = trip_info[\"trip_start_date\"]\n", + " trip_end_date = trip_info[\"trip_end_date\"]\n", "\n", " sql = f\"\"\"\n", - " SELECT \n", - " v.start_datetime, \n", + " SELECT\n", + " v.start_datetime,\n", " v.cam_name\n", - " \n", - " from {vessel}_v1_video_files v \n", + "\n", + " from {vessel}_v1_video_files v\n", " where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", " \"\"\"\n", " video_df = wr.athena.read_sql_query(sql, database=\"tnc_edge\")\n", " video_df.start_datetime = pd.to_datetime(video_df.start_datetime)\n", - " video_df['utc_start_datetime'] = video_df['start_datetime'].dt.tz_convert(None)\n", + " video_df[\"utc_start_datetime\"] = video_df[\"start_datetime\"].dt.tz_convert(None)\n", " # video_df['utc_end_datetime'] = video_df['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", " return video_df" ] @@ -2142,7 +2220,7 @@ "# # y_var = x_vars[n]\n", "# text = row[text_col]\n", "# data_xy = (row['start_time'], 1.7)\n", - " \n", + "\n", "# an = ax.annotate(\n", "# text,\n", "# xy=data_xy, xycoords='data',\n", @@ -2151,14 +2229,14 @@ "# bbox = bbox_args,\n", "# color = 'white'\n", "# )\n", - " \n", + "\n", "# annots.append(an)\n", - " \n", + "\n", "# x, y = text_xy\n", - " \n", + "\n", "# y = y+y_var\n", "# y_var = y_var * -1\n", - " \n", + "\n", "# text_xy = (x,y)\n", "\n", "# return annots" @@ -2179,7 +2257,7 @@ "metadata": {}, "outputs": [], "source": [ - "sns.set_style(\"whitegrid\", {'axes.grid' : False})" + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})" ] }, { @@ -2189,7 +2267,7 @@ "metadata": {}, "outputs": [], "source": [ - "ai_countsBrancol2 = Brancol2data['ai_sets']" + "ai_countsBrancol2 = Brancol2data[\"ai_sets\"]" ] }, { @@ -2199,7 +2277,7 @@ "metadata": {}, "outputs": [], "source": [ - "bvCounts_Brancol2 = Brancol2data['all_counts']" + "bvCounts_Brancol2 = Brancol2data[\"all_counts\"]" ] }, { @@ -2265,13 +2343,13 @@ "# Trip 2 predictions plot\n", "\n", "# setting style, font sizes, and fig size\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", "sns.set_theme(style=\"ticks\")\n", - "figsize = (7.5,2.5)\n", + "figsize = (7.5, 2.5)\n", "\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 8)\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=8)\n", "\n", "# set up datasets to be used\n", "bv_sets = brancol2_bv_sets\n", @@ -2280,35 +2358,71 @@ "\n", "\n", "# define hex codes for colors used\n", - "bv_color = '#a2c662'\n", - "ai_color = '#184EAD'\n", - "elog_color = '#117347'\n", - "predictions_color = '#43aa99'\n", + "bv_color = \"#a2c662\"\n", + "ai_color = \"#184EAD\"\n", + "elog_color = \"#117347\"\n", + "predictions_color = \"#43aa99\"\n", "\n", "\n", - "\n", - "fig, ax = plt.subplots(2,1,figsize=figsize, gridspec_kw={'height_ratios': [2, 1]},sharex = True)\n", + "fig, ax = plt.subplots(2, 1, figsize=figsize, gridspec_kw={\"height_ratios\": [2, 1]}, sharex=True)\n", "\n", "# hlines for the event bars (the horizonal lines)\n", - "yticks = [.175, .5, .825]\n", - "yheight = .24\n", - "ypos = [tick - (yheight/2) for tick in yticks]\n", - "ax[0].hlines([.175, .5, .825],.01,.99, transform=ax[0].transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + "yticks = [0.175, 0.5, 0.825]\n", + "yheight = 0.24\n", + "ypos = [tick - (yheight / 2) for tick in yticks]\n", + "ax[0].hlines(\n", + " [0.175, 0.5, 0.825], 0.01, 0.99, transform=ax[0].transAxes, colors=\"grey\", lw=0.2, zorder=0\n", + ")\n", "\n", "# ticks for the event bar positions and labels\n", - "ax[0].set_yticks([.175,.5,.825],('Predicted Hauls','Elog Hauls','Analyst Hauls'))\n", + "ax[0].set_yticks([0.175, 0.5, 0.825], (\"Predicted Hauls\", \"Elog Hauls\", \"Analyst Hauls\"))\n", "ax[0].set_ylim([0, 1])\n", "\n", "\n", "# plotting event bars\n", - "plot_event_bars(bv_sets, ax[0], 'Analyst Hauls', 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = ypos[2], y_height = yheight, color = bv_color) #y_val = .675,\n", - "plot_event_bars(elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', color = elog_color, y_val = ypos[1], y_height = yheight) # y_val = .35\n", - "plot_event_bars(results.loc[results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = ypos[0] , y_height = yheight, color = predictions_color) #y_val = .025\n", + "plot_event_bars(\n", + " bv_sets,\n", + " ax[0],\n", + " \"Analyst Hauls\",\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=ypos[2],\n", + " y_height=yheight,\n", + " color=bv_color,\n", + ") # y_val = .675,\n", + "plot_event_bars(\n", + " elog,\n", + " ax[0],\n", + " \"elog_hauls\",\n", + " \"systemstarthauldatetime\",\n", + " end_col=\"systemendhauldatetime\",\n", + " color=elog_color,\n", + " y_val=ypos[1],\n", + " y_height=yheight,\n", + ") # y_val = .35\n", + "plot_event_bars(\n", + " results.loc[results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=ypos[0],\n", + " y_height=yheight,\n", + " color=predictions_color,\n", + ") # y_val = .025\n", "\n", "\n", "# plotting the ai counts\n", - "ln1 = sns.lineplot(x = 'utc_start_datetime', y = 'count', data = results, ax = ax[1], label = 'AI Counts', color =ai_color, clip_on=False, lw = .5)\n", - "\n", + "ln1 = sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"count\",\n", + " data=results,\n", + " ax=ax[1],\n", + " label=\"AI Counts\",\n", + " color=ai_color,\n", + " clip_on=False,\n", + " lw=0.5,\n", + ")\n", "\n", "\n", "# formatting x axis dates\n", @@ -2317,20 +2431,18 @@ "\n", "ax[1].xaxis.set_major_locator(locator)\n", "ax[1].xaxis.set_major_formatter(formatter)\n", - "ax[1].set_xlabel('Datetime (UTC)')\n", + "ax[1].set_xlabel(\"Datetime (UTC)\")\n", "\n", "# labeling y axis for fish counts\n", - "ax[1].set_ylabel('AI Fish Count')\n", - "\n", + "ax[1].set_ylabel(\"AI Fish Count\")\n", "\n", "\n", "# creating custom legend\n", "from matplotlib.lines import Line2D\n", - "from matplotlib.patches import Patch\n", "\n", "# set xlimits for the plot based on the top plot\n", - "x0,x1 = ax[0].get_xlim()\n", - "ax[0].set_xlim(x0, x1) \n", + "x0, x1 = ax[0].get_xlim()\n", + "ax[0].set_xlim(x0, x1)\n", "\n", "# remove legend\n", "ax[1].get_legend().remove()\n", @@ -2339,10 +2451,8 @@ "plt.subplots_adjust(wspace=0, hspace=0)\n", "\n", "# remove spines\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('../chart_pngs/haul_detection2_report.png', bbox_inches='tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"../chart_pngs/haul_detection2_report.png\", bbox_inches=\"tight\", dpi=150)\n", "\n", "plt.show()" ] @@ -2368,13 +2478,13 @@ "# Trip 3 predictions plot\n", "\n", "# setting style, font sizes, and fig size\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", "sns.set_theme(style=\"ticks\")\n", - "figsize = (7.5,2.5)\n", + "figsize = (7.5, 2.5)\n", "\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 8)\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=8)\n", "\n", "# set up datasets to be used\n", "bv_sets = brancol3_bv_sets\n", @@ -2383,35 +2493,71 @@ "\n", "\n", "# define hex codes for colors used\n", - "bv_color = '#a2c662'\n", - "ai_color = '#184EAD'\n", - "elog_color = '#117347'\n", - "predictions_color = '#43aa99'\n", - "\n", + "bv_color = \"#a2c662\"\n", + "ai_color = \"#184EAD\"\n", + "elog_color = \"#117347\"\n", + "predictions_color = \"#43aa99\"\n", "\n", "\n", - "fig, ax = plt.subplots(2,1,figsize=figsize, gridspec_kw={'height_ratios': [2, 1]},sharex = True)\n", + "fig, ax = plt.subplots(2, 1, figsize=figsize, gridspec_kw={\"height_ratios\": [2, 1]}, sharex=True)\n", "\n", "# hlines for the event bars (the horizonal lines)\n", - "yticks = [.175, .5, .825]\n", - "yheight = .24\n", - "ypos = [tick - (yheight/2) for tick in yticks]\n", - "ax[0].hlines([.175, .5, .825],.01,.99, transform=ax[0].transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + "yticks = [0.175, 0.5, 0.825]\n", + "yheight = 0.24\n", + "ypos = [tick - (yheight / 2) for tick in yticks]\n", + "ax[0].hlines(\n", + " [0.175, 0.5, 0.825], 0.01, 0.99, transform=ax[0].transAxes, colors=\"grey\", lw=0.2, zorder=0\n", + ")\n", "\n", "# ticks for the event bar positions and labels\n", - "ax[0].set_yticks([.175,.5,.825],('Predicted Hauls','Elog Hauls','Analyst Hauls'))\n", + "ax[0].set_yticks([0.175, 0.5, 0.825], (\"Predicted Hauls\", \"Elog Hauls\", \"Analyst Hauls\"))\n", "ax[0].set_ylim([0, 1])\n", "\n", "\n", "# plotting event bars\n", - "plot_event_bars(bv_sets, ax[0], 'Analyst Hauls', 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = ypos[2], y_height = yheight, color = bv_color) #y_val = .675,\n", - "plot_event_bars(elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', color = elog_color, y_val = ypos[1], y_height = yheight) # y_val = .35\n", - "plot_event_bars(results.loc[results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = ypos[0] , y_height = yheight, color = predictions_color) #y_val = .025\n", + "plot_event_bars(\n", + " bv_sets,\n", + " ax[0],\n", + " \"Analyst Hauls\",\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=ypos[2],\n", + " y_height=yheight,\n", + " color=bv_color,\n", + ") # y_val = .675,\n", + "plot_event_bars(\n", + " elog,\n", + " ax[0],\n", + " \"elog_hauls\",\n", + " \"systemstarthauldatetime\",\n", + " end_col=\"systemendhauldatetime\",\n", + " color=elog_color,\n", + " y_val=ypos[1],\n", + " y_height=yheight,\n", + ") # y_val = .35\n", + "plot_event_bars(\n", + " results.loc[results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=ypos[0],\n", + " y_height=yheight,\n", + " color=predictions_color,\n", + ") # y_val = .025\n", "\n", "\n", "# plotting the ai counts\n", - "ln1 = sns.lineplot(x = 'utc_start_datetime', y = 'count', data = results, ax = ax[1], label = 'AI Counts', color =ai_color, clip_on=False, lw = .5)\n", - "\n", + "ln1 = sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"count\",\n", + " data=results,\n", + " ax=ax[1],\n", + " label=\"AI Counts\",\n", + " color=ai_color,\n", + " clip_on=False,\n", + " lw=0.5,\n", + ")\n", "\n", "\n", "# formatting x axis dates\n", @@ -2420,20 +2566,17 @@ "\n", "ax[1].xaxis.set_major_locator(locator)\n", "ax[1].xaxis.set_major_formatter(formatter)\n", - "ax[1].set_xlabel('Datetime (UTC)')\n", + "ax[1].set_xlabel(\"Datetime (UTC)\")\n", "\n", "# labeling y axis for fish counts\n", - "ax[1].set_ylabel('AI Fish Count')\n", - "\n", + "ax[1].set_ylabel(\"AI Fish Count\")\n", "\n", "\n", "# creating custom legend\n", - "from matplotlib.lines import Line2D\n", - "from matplotlib.patches import Patch\n", "\n", "# set xlimits for the plot based on the top plot\n", - "x0,x1 = ax[0].get_xlim()\n", - "ax[0].set_xlim(x0, x1) \n", + "x0, x1 = ax[0].get_xlim()\n", + "ax[0].set_xlim(x0, x1)\n", "\n", "# remove legend\n", "ax[1].get_legend().remove()\n", @@ -2442,10 +2585,8 @@ "plt.subplots_adjust(wspace=0, hspace=0)\n", "\n", "# remove spines\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('../chart_pngs/haul_detection3_report.png', bbox_inches='tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"../chart_pngs/haul_detection3_report.png\", bbox_inches=\"tight\", dpi=150)\n", "\n", "plt.show()" ] @@ -2474,7 +2615,7 @@ "outputs": [], "source": [ "# def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' , alpha = 1):\n", - " \n", + "\n", "# if duration:\n", "# x_duration = np.full(len(df), pd.Timedelta(duration))\n", "# elif end_col:\n", @@ -2484,14 +2625,13 @@ "# x_duration = df[duration_col]\n", "# else:\n", "# x_duration = np.full(len(df), 2)\n", - " \n", + "\n", "# x = list(zip(df[datetime_col], x_duration))\n", "# y = (y_val, y_height)\n", "\n", "# plot = ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=True, alpha = alpha)\n", "\n", - "# return plot\n", - " " + "# return plot\n" ] }, { @@ -2501,15 +2641,23 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height ):\n", + "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height):\n", " n = len(category_color_dict)\n", " y_vals = create_array(n, y_val_start, y_height)\n", " for idx, (category, color) in enumerate(category_color_dict.items()):\n", - " df_category = df_events.loc[df_events['category']==category].copy()\n", - " y_val = y_vals[idx] - (y_height/2)\n", - " \n", - " \n", - " plot_event_bars(df_category, ax,category, 'start_time',end_col = 'end_time', y_val=y_val, y_height = y_height, color= color )" + " df_category = df_events.loc[df_events[\"category\"] == category].copy()\n", + " y_val = y_vals[idx] - (y_height / 2)\n", + "\n", + " plot_event_bars(\n", + " df_category,\n", + " ax,\n", + " category,\n", + " \"start_time\",\n", + " end_col=\"end_time\",\n", + " y_val=y_val,\n", + " y_height=y_height,\n", + " color=color,\n", + " )" ] }, { @@ -2548,13 +2696,13 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes = pd.read_csv('../data/reviewer_notes_6-20.csv')\n", + "df_notes = pd.read_csv(\"../data/reviewer_notes_6-20.csv\")\n", "# df_notes = pd.read_csv('../data/reviewer_notes.csv')\n", - "df_notes['start_time'] = pd.to_datetime(df_notes['start_time'], format = 'mixed')\n", + "df_notes[\"start_time\"] = pd.to_datetime(df_notes[\"start_time\"], format=\"mixed\")\n", "\n", - "df_notes['end_time'] = pd.to_datetime(df_notes['end_time'], format = 'mixed')\n", + "df_notes[\"end_time\"] = pd.to_datetime(df_notes[\"end_time\"], format=\"mixed\")\n", "\n", - "df_notes['category'].value_counts()" + "df_notes[\"category\"].value_counts()" ] }, { @@ -2564,8 +2712,8 @@ "metadata": {}, "outputs": [], "source": [ - "eventsBrancol2 = df_notes.loc[(df_notes['vessel'] == 'Brancol') & (df_notes['trip_number']==2)]\n", - "eventsBrancol3 = df_notes.loc[(df_notes['vessel'] == 'Brancol') & (df_notes['trip_number']==3)]" + "eventsBrancol2 = df_notes.loc[(df_notes[\"vessel\"] == \"Brancol\") & (df_notes[\"trip_number\"] == 2)]\n", + "eventsBrancol3 = df_notes.loc[(df_notes[\"vessel\"] == \"Brancol\") & (df_notes[\"trip_number\"] == 3)]" ] }, { @@ -2594,7 +2742,7 @@ } ], "source": [ - "eventsBrancol2['category'].value_counts()" + "eventsBrancol2[\"category\"].value_counts()" ] }, { @@ -2754,7 +2902,12 @@ "outputs": [], "source": [ "# create dictionary of colors for each category\n", - "category_dict = {'Haul Stop':colors[13],'Camera Blocked':colors[10],'No Video': colors[6], 'Abnormal Catch':colors[8]}" + "category_dict = {\n", + " \"Haul Stop\": colors[13],\n", + " \"Camera Blocked\": colors[10],\n", + " \"No Video\": colors[6],\n", + " \"Abnormal Catch\": colors[8],\n", + "}" ] }, { @@ -2775,62 +2928,71 @@ } ], "source": [ - "eventsBrancol2_notna = eventsBrancol2[eventsBrancol2['end_time'].notna()]\n", + "eventsBrancol2_notna = eventsBrancol2[eventsBrancol2[\"end_time\"].notna()]\n", "\n", "\n", "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# plt.subplots_adjust(wspace=0, hspace=-.2)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "# plt.tight_layout()\n", - "fig, ax = plt.subplots(1,1,figsize=(11,2), sharex = True\n", - " # , gridspec_kw={'height_ratios': [2, 1]}, \n", - " )\n", + "fig, ax = plt.subplots(\n", + " 1,\n", + " 1,\n", + " figsize=(11, 2),\n", + " sharex=True,\n", + " # , gridspec_kw={'height_ratios': [2, 1]},\n", + ")\n", "\n", "# trip2 = Brancol2data['trip_info']\n", "plot_set_vspan(brancol2_bv_sets, ax)\n", "\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax, 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .95, y_height = .1)\n", - "plot_event_category_bars(eventsBrancol2_notna, ax, category_dict, 1.2, .1)\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax,\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.95,\n", + " y_height=0.1,\n", + ")\n", + "plot_event_category_bars(eventsBrancol2_notna, ax, category_dict, 1.2, 0.1)\n", "\n", "\n", "n = len(category_dict)\n", - "y_vals = [0,1.0] + list(create_array(n, 1.2, .1))\n", - "y_labels = ['','predicted_hauls'] + list(category_dict.keys())\n", + "y_vals = [0, 1.0] + list(create_array(n, 1.2, 0.1))\n", + "y_labels = [\"\", \"predicted_hauls\"] + list(category_dict.keys())\n", "\n", - "ax.set_yticks(y_vals,y_labels)\n", - "ax.set_ylim([.85, max(y_vals)+.2])\n", + "ax.set_yticks(y_vals, y_labels)\n", + "ax.set_ylim([0.85, max(y_vals) + 0.2])\n", "# ax[1].set_ylim([0, 50])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax.tick_params(axis = 'both', labelsize = 9)\n", + "ax.tick_params(axis=\"both\", labelsize=9)\n", "ax.xaxis.set_major_locator(locator)\n", "ax.xaxis.set_major_formatter(formatter)\n", "\n", - "ax.spines['bottom'].set_visible(False)\n", + "ax.spines[\"bottom\"].set_visible(False)\n", "# ax[1].spines['top'].set_visible(False)\n", "ax.legend()\n", "\n", - "from matplotlib.lines import Line2D\n", - "from matplotlib.patches import Patch\n", "\n", - "legend_elements = [Patch(facecolor='#a2c662', edgecolor='#40a018', alpha = .4,\n", - " label='BV Hauls'),\n", - " Patch(facecolor='#117347', edgecolor='#117347',\n", - " label='Elog Hauls'),\n", - " Patch(facecolor='#43aa99', edgecolor='#43aa99',\n", - " label='Predicted Hauls'),\n", - " Line2D([0], [0], color='#184EAD', lw=2, label='AI Counts')]\n", + "legend_elements = [\n", + " Patch(facecolor=\"#a2c662\", edgecolor=\"#40a018\", alpha=0.4, label=\"BV Hauls\"),\n", + " Patch(facecolor=\"#117347\", edgecolor=\"#117347\", label=\"Elog Hauls\"),\n", + " Patch(facecolor=\"#43aa99\", edgecolor=\"#43aa99\", label=\"Predicted Hauls\"),\n", + " Line2D([0], [0], color=\"#184EAD\", lw=2, label=\"AI Counts\"),\n", + "]\n", "\n", "# ax[0].legend(handles=legend_elements, loc='upper left', fontsize = 9)\n", "# ax[1].get_legend().remove()\n", "\n", - "plt.legend( bbox_to_anchor=(1.1, 1))\n", + "plt.legend(bbox_to_anchor=(1.1, 1))\n", "\n", - "plt.subplots_adjust(wspace=0, hspace=-.2)\n", + "plt.subplots_adjust(wspace=0, hspace=-0.2)\n", "\n", - "plt.savefig('../chart_pngs/reivewer_events_haul_detection.png')\n", + "plt.savefig(\"../chart_pngs/reivewer_events_haul_detection.png\")\n", "plt.show()" ] }, @@ -2859,9 +3021,13 @@ "metadata": {}, "outputs": [], "source": [ - "video_events = df_notes.loc[(df_notes['category'] == 'No Video') |(df_notes['category'] == 'Camera Blocked')]\n", + "video_events = df_notes.loc[\n", + " (df_notes[\"category\"] == \"No Video\") | (df_notes[\"category\"] == \"Camera Blocked\")\n", + "]\n", "\n", - "video_eventsBrancol2 = video_events.loc[(video_events['vessel'] == 'Brancol') & (video_events['trip_number']==2)]" + "video_eventsBrancol2 = video_events.loc[\n", + " (video_events[\"vessel\"] == \"Brancol\") & (video_events[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -2879,11 +3045,15 @@ "metadata": {}, "outputs": [], "source": [ - "haul_stops = df_notes.loc[(df_notes['category'] == 'Haul Stop') |(df_notes['category'] == 'Gear Issue')]\n", + "haul_stops = df_notes.loc[\n", + " (df_notes[\"category\"] == \"Haul Stop\") | (df_notes[\"category\"] == \"Gear Issue\")\n", + "]\n", "\n", "haul_stops.dtypes\n", "\n", - "haul_stopsBrancol2 = haul_stops.loc[(haul_stops['vessel'] == 'Brancol') & (haul_stops['trip_number']==2)]" + "haul_stopsBrancol2 = haul_stops.loc[\n", + " (haul_stops[\"vessel\"] == \"Brancol\") & (haul_stops[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -3194,13 +3364,13 @@ "outputs": [], "source": [ "# create dataframe that joins the hauls from bv sets to the predicted hauls (df_results) to get the set number for trip 2\n", - "conn = sqlite3.connect(':memory:')\n", - "bv_sets = Brancol2data['bv_sets']\n", - "df_hauls = bv_sets.loc[:,['set_number','haul_start_datetime','haul_end_datetime']] \n", + "conn = sqlite3.connect(\":memory:\")\n", + "bv_sets = Brancol2data[\"bv_sets\"]\n", + "df_hauls = bv_sets.loc[:, [\"set_number\", \"haul_start_datetime\", \"haul_end_datetime\"]]\n", "\n", - "#write the tables\n", - "df_results.to_sql('results', conn, index=False)\n", - "df_hauls.to_sql('hauls', conn, index=False)\n", + "# write the tables\n", + "df_results.to_sql(\"results\", conn, index=False)\n", + "df_hauls.to_sql(\"hauls\", conn, index=False)\n", "\n", "query = \"\"\"\n", "select\n", @@ -3212,8 +3382,10 @@ "\n", "\"\"\"\n", "df_results_setnumber = pd.read_sql_query(query, conn)\n", - "df_results_setnumber['utc_end_datetime'] = pd.to_datetime(df_results_setnumber['utc_end_datetime'])\n", - "df_results_setnumber['utc_start_datetime'] = pd.to_datetime(df_results_setnumber['utc_start_datetime'])" + "df_results_setnumber[\"utc_end_datetime\"] = pd.to_datetime(df_results_setnumber[\"utc_end_datetime\"])\n", + "df_results_setnumber[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_results_setnumber[\"utc_start_datetime\"]\n", + ")" ] }, { @@ -3385,13 +3557,13 @@ "outputs": [], "source": [ "# create dataframe that joins the hauls from bv sets to the predicted hauls (df_results) to get the set number for trip 3\n", - "conn = sqlite3.connect(':memory:')\n", - "bv_sets3 = Brancol3data['bv_sets']\n", - "df_hauls3 = bv_sets3.loc[:,['set_number','haul_start_datetime','haul_end_datetime']] \n", + "conn = sqlite3.connect(\":memory:\")\n", + "bv_sets3 = Brancol3data[\"bv_sets\"]\n", + "df_hauls3 = bv_sets3.loc[:, [\"set_number\", \"haul_start_datetime\", \"haul_end_datetime\"]]\n", "\n", - "#write the tables\n", - "df_results3.to_sql('results', conn, index=False)\n", - "df_hauls3.to_sql('hauls', conn, index=False)\n", + "# write the tables\n", + "df_results3.to_sql(\"results\", conn, index=False)\n", + "df_hauls3.to_sql(\"hauls\", conn, index=False)\n", "\n", "query = \"\"\"\n", "select\n", @@ -3403,8 +3575,12 @@ "\n", "\"\"\"\n", "df_results3_setnumber = pd.read_sql_query(query, conn)\n", - "df_results3_setnumber['utc_end_datetime'] = pd.to_datetime(df_results3_setnumber['utc_end_datetime'])\n", - "df_results3_setnumber['utc_start_datetime'] = pd.to_datetime(df_results3_setnumber['utc_start_datetime'])" + "df_results3_setnumber[\"utc_end_datetime\"] = pd.to_datetime(\n", + " df_results3_setnumber[\"utc_end_datetime\"]\n", + ")\n", + "df_results3_setnumber[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_results3_setnumber[\"utc_start_datetime\"]\n", + ")" ] }, { @@ -3434,58 +3610,106 @@ "\n", "%matplotlib inline\n", "# %matplotlib widget\n", - "fig, axes = plt.subplots(1, 4, figsize = (8,1), sharey = True)\n", - "label = 'haul stops'\n", - "chart_labels = ['a', 'b', 'c', 'd']\n", + "fig, axes = plt.subplots(1, 4, figsize=(8, 1), sharey=True)\n", + "label = \"haul stops\"\n", + "chart_labels = [\"a\", \"b\", \"c\", \"d\"]\n", "\n", - "df_hauls = results.loc[results.predict_haul ==1]\n", + "df_hauls = results.loc[results.predict_haul == 1]\n", "\n", "# hlines for the event bars\n", - "yticks = [.175, .5, .825]\n", - "yticks = [.3, .7, 1.825]\n", - "yheight = .4\n", - "ypos = [tick - (yheight/2) for tick in yticks]\n", + "yticks = [0.175, 0.5, 0.825]\n", + "yticks = [0.3, 0.7, 1.825]\n", + "yheight = 0.4\n", + "ypos = [tick - (yheight / 2) for tick in yticks]\n", "\n", "for i, set_n in enumerate(interest_sets):\n", " ax = axes[i]\n", - " ax.hlines(yticks ,.01,.99, transform=ax.transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + " ax.hlines(yticks, 0.01, 0.99, transform=ax.transAxes, colors=\"grey\", lw=0.2, zorder=0)\n", " # ticks for the event bar positions and labels\n", - " ytick_labels = ['Predicted Hauls','Haul Stops','Analyst Hauls']\n", - " ax.set_yticks(yticks,ytick_labels)\n", + " ytick_labels = [\"Predicted Hauls\", \"Haul Stops\", \"Analyst Hauls\"]\n", + " ax.set_yticks(yticks, ytick_labels)\n", " ax.set_yticklabels([])\n", " ax.set_ylim([0, 1])\n", - " \n", + "\n", " # plotting event bars\n", - " p1 = plot_event_bars(brancol2_bv_sets.loc[brancol2_bv_sets['set_number'].astype(int) == set_n].copy(), ax, ytick_labels[2], 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = ypos[2], y_height = yheight, color = bv_color)\n", - " p2 = plot_event_bars(haul_stopsBrancol2.loc[haul_stopsBrancol2['set_number'] == set_n].copy(), ax,ytick_labels[1], 'start_time',end_col = 'end_time', y_val=ypos[1], y_height = yheight, color= colors[13] )\n", + " p1 = plot_event_bars(\n", + " brancol2_bv_sets.loc[brancol2_bv_sets[\"set_number\"].astype(int) == set_n].copy(),\n", + " ax,\n", + " ytick_labels[2],\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=ypos[2],\n", + " y_height=yheight,\n", + " color=bv_color,\n", + " )\n", + " p2 = plot_event_bars(\n", + " haul_stopsBrancol2.loc[haul_stopsBrancol2[\"set_number\"] == set_n].copy(),\n", + " ax,\n", + " ytick_labels[1],\n", + " \"start_time\",\n", + " end_col=\"end_time\",\n", + " y_val=ypos[1],\n", + " y_height=yheight,\n", + " color=colors[13],\n", + " )\n", " xlim = p1.axes.get_xlim()\n", - " p3 = plot_event_bars(df_hauls, ax, ytick_labels[0], 'utc_start_datetime', duration = '5m', y_val = ypos[0], y_height = yheight)\n", - " \n", - " xmin = pd.Timestamp(xlim[0],unit = 'D')- pd.Timedelta('1h')\n", - " xmax = pd.Timestamp(xlim[1],unit = 'D')+ pd.Timedelta('1h')\n", + " p3 = plot_event_bars(\n", + " df_hauls,\n", + " ax,\n", + " ytick_labels[0],\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=ypos[0],\n", + " y_height=yheight,\n", + " )\n", + "\n", + " xmin = pd.Timestamp(xlim[0], unit=\"D\") - pd.Timedelta(\"1h\")\n", + " xmax = pd.Timestamp(xlim[1], unit=\"D\") + pd.Timedelta(\"1h\")\n", " ax.set_xlim(xmin, xmax)\n", " # p3.axes.set_xlim(xlim)\n", "\n", - " locator = DayLocator(bymonthday = 1)\n", - " hour_locator = HourLocator(interval = 2)\n", + " locator = DayLocator(bymonthday=1)\n", + " hour_locator = HourLocator(interval=2)\n", " # formatter = mdates.ConciseDateFormatter(locator)\n", - " hour_formatter = mdates.ConciseDateFormatter(hour_locator, formats = ['%Y', '%b', '%d', '%H h', '%H:%M', '%S.%f'], offset_formats = ['', '%Y', '%Y-%b', '%Y-%b-%d', '%Y-%b-%d', '%Y-%b-%d %H:%M'], usetex = True)\n", - " fmt = '%H h'\n", + " hour_formatter = mdates.ConciseDateFormatter(\n", + " hour_locator,\n", + " formats=[\"%Y\", \"%b\", \"%d\", \"%H h\", \"%H:%M\", \"%S.%f\"],\n", + " offset_formats=[\"\", \"%Y\", \"%Y-%b\", \"%Y-%b-%d\", \"%Y-%b-%d\", \"%Y-%b-%d %H:%M\"],\n", + " usetex=True,\n", + " )\n", + " fmt = \"%H h\"\n", "\n", " ax.xaxis.set_major_locator(hour_locator)\n", " ax.xaxis.set_major_formatter(hour_formatter)\n", - " ax.xaxis.get_offset_text().set_fontweight('bold')\n", - " ax.tick_params(axis='y', length=0)\n", - " sns.despine(trim=True, left=True )\n", - "\n", - " ax.text(0.02, 1.05, f'({chart_labels[i]})', transform=ax.transAxes, fontsize=9, fontweight='bold', va='top', ha='left')\n", + " ax.xaxis.get_offset_text().set_fontweight(\"bold\")\n", + " ax.tick_params(axis=\"y\", length=0)\n", + " sns.despine(trim=True, left=True)\n", + "\n", + " ax.text(\n", + " 0.02,\n", + " 1.05,\n", + " f\"({chart_labels[i]})\",\n", + " transform=ax.transAxes,\n", + " fontsize=9,\n", + " fontweight=\"bold\",\n", + " va=\"top\",\n", + " ha=\"left\",\n", + " )\n", "\n", "handles, labels = ax.get_legend_handles_labels()\n", - "fig.legend(handles[1:], labels[1:], loc='upper center', fontsize = 9, bbox_to_anchor=(0.5, 1.25), ncol=2, frameon = False)\n", + "fig.legend(\n", + " handles[1:],\n", + " labels[1:],\n", + " loc=\"upper center\",\n", + " fontsize=9,\n", + " bbox_to_anchor=(0.5, 1.25),\n", + " ncol=2,\n", + " frameon=False,\n", + ")\n", "\n", "# Adding bolded labels to the top left corner of each subplot\n", "\n", - "plt.savefig('haulstop_subsets.png', bbox_inches='tight')\n" + "plt.savefig(\"haulstop_subsets.png\", bbox_inches=\"tight\")\n" ] }, { @@ -3664,7 +3888,7 @@ "metadata": {}, "outputs": [], "source": [ - "categories = eventsBrancol2['category'].unique()\n", + "categories = eventsBrancol2[\"category\"].unique()\n", "\n", "# categories = np.delete(categories, -1)\n" ] @@ -3698,7 +3922,7 @@ "metadata": {}, "outputs": [], "source": [ - "categories = [category for category in categories if str(category) != 'nan']" + "categories = [category for category in categories if str(category) != \"nan\"]" ] }, { @@ -3708,7 +3932,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "df_results = df_results.loc[:, ~df_results.columns.duplicated()]\n", "# df_results.head()" ] @@ -3725,57 +3948,54 @@ "df_category_results = df_results_setnumber.copy()\n", "# iterate through each category\n", "select_columns = df_results.columns.to_list()\n", - "select_columns = ['results.'+x for x in select_columns]\n", + "select_columns = [\"results.\" + x for x in select_columns]\n", "for category in categories:\n", " # create a category df\n", - " df_category = df_events.loc[df_events['category']==category].copy()\n", - " df_category = df_category.loc[:,['start_time','end_time','category','set_number']]\n", - "\n", - " column_label = 'is_'+ '_'.join(category.lower().split(' '))\n", - " \n", - " #Make the db in memory\n", - " conn = sqlite3.connect(':memory:')\n", - " #write the tables\n", - " df_category_results.to_sql('results', conn, index=False)\n", - " df_category.to_sql('category', conn, index=False)\n", - "\n", - " \n", + " df_category = df_events.loc[df_events[\"category\"] == category].copy()\n", + " df_category = df_category.loc[:, [\"start_time\", \"end_time\", \"category\", \"set_number\"]]\n", + "\n", + " column_label = \"is_\" + \"_\".join(category.lower().split(\" \"))\n", + "\n", + " # Make the db in memory\n", + " conn = sqlite3.connect(\":memory:\")\n", + " # write the tables\n", + " df_category_results.to_sql(\"results\", conn, index=False)\n", + " df_category.to_sql(\"category\", conn, index=False)\n", + "\n", " query = f\"\"\"\n", " select\n", " {', '.join(select_columns)},\n", " category.category is not null as {column_label}\n", - " \n", + "\n", " from results\n", - " left join category on \n", - " results.utc_start_datetime between category.start_time and category.end_time or \n", - " category.start_time between results.utc_start_datetime and results.utc_end_datetime \n", + " left join category on\n", + " results.utc_start_datetime between category.start_time and category.end_time or\n", + " category.start_time between results.utc_start_datetime and results.utc_end_datetime\n", " \"\"\"\n", "\n", " df_category_results = pd.read_sql_query(query, conn)\n", " select_columns.append(column_label)\n", "\n", "\n", - "if 'set_number' not in df_category_results.columns.to_list():\n", - " conn = sqlite3.connect(':memory:')\n", - " bv_sets = Brancol2data['bv_sets']\n", - " df_hauls = bv_sets.loc[:,['set_number','haul_start_datetime','haul_end_datetime']] \n", - " \n", - " #write the tables to add set_number\n", - " df_category_results.to_sql('results', conn, index=False)\n", - " df_hauls.to_sql('hauls', conn, index=False)\n", - " \n", + "if \"set_number\" not in df_category_results.columns.to_list():\n", + " conn = sqlite3.connect(\":memory:\")\n", + " bv_sets = Brancol2data[\"bv_sets\"]\n", + " df_hauls = bv_sets.loc[:, [\"set_number\", \"haul_start_datetime\", \"haul_end_datetime\"]]\n", + "\n", + " # write the tables to add set_number\n", + " df_category_results.to_sql(\"results\", conn, index=False)\n", + " df_hauls.to_sql(\"hauls\", conn, index=False)\n", + "\n", " query = \"\"\"\n", " select\n", " results.*,\n", " hauls.set_number\n", - " \n", + "\n", " from results\n", " left join hauls on results.utc_start_datetime between hauls.haul_start_datetime and hauls.haul_end_datetime\n", - " \n", - " \"\"\"\n", - " df_category_results = pd.read_sql_query(query, conn)\n", "\n", - "\n" + " \"\"\"\n", + " df_category_results = pd.read_sql_query(query, conn)\n" ] }, { @@ -3807,10 +4027,10 @@ ], "source": [ "# looking for correlations with the no_haul predictions\n", - "df_category_only_hauls['predict_no_haul'] = df_category_only_hauls['predict_haul'].map({0:1, 1:0})\n", + "df_category_only_hauls[\"predict_no_haul\"] = df_category_only_hauls[\"predict_haul\"].map({0: 1, 1: 0})\n", "\n", "\n", - "df_category_only_hauls.corr()['predict_no_haul']" + "df_category_only_hauls.corr()[\"predict_no_haul\"]" ] }, { @@ -3835,21 +4055,21 @@ "\n", "# get a list of the results columns to use for the sql select statement\n", "select_columns = df_results_setnumber.columns.to_list()\n", - "select_columns = ['results.'+x for x in select_columns]\n", + "select_columns = [\"results.\" + x for x in select_columns]\n", "\n", "# creating a df of only HaulStop events\n", - "category = 'Haul Stop'\n", - "dfHaulStop = df_events.loc[df_events['category']==category].copy()\n", - "dfHaulStop = dfHaulStop.loc[:,['start_time','end_time','category','set_number']]\n", - "dfHaulStop = dfHaulStop.reset_index().rename(columns={'index':'haul_stop_id'})\n", + "category = \"Haul Stop\"\n", + "dfHaulStop = df_events.loc[df_events[\"category\"] == category].copy()\n", + "dfHaulStop = dfHaulStop.loc[:, [\"start_time\", \"end_time\", \"category\", \"set_number\"]]\n", + "dfHaulStop = dfHaulStop.reset_index().rename(columns={\"index\": \"haul_stop_id\"})\n", "\n", - "column_label = 'is_'+ '_'.join(category.lower().split(' '))\n", + "column_label = \"is_\" + \"_\".join(category.lower().split(\" \"))\n", "\n", - "#Make the db in memory\n", - "conn = sqlite3.connect(':memory:')\n", - "#write the tables\n", - "df_haul_stop_results.to_sql('results', conn, index=False)\n", - "dfHaulStop.to_sql('category', conn, index=False)\n", + "# Make the db in memory\n", + "conn = sqlite3.connect(\":memory:\")\n", + "# write the tables\n", + "df_haul_stop_results.to_sql(\"results\", conn, index=False)\n", + "dfHaulStop.to_sql(\"category\", conn, index=False)\n", "\n", "# sql query to join haul stop events to the results if the prediction window is between the haul stop start/end or if the haul stop start time is between the prediction window\n", "query = f\"\"\"\n", @@ -3859,26 +4079,41 @@ " category.haul_stop_id,\n", " category.start_time as haul_stop_start,\n", " category.end_time as haul_stop_end\n", - " \n", + "\n", " from results\n", - " left join category on \n", - " results.utc_start_datetime between category.start_time and category.end_time or \n", - " category.start_time between results.utc_start_datetime and results.utc_end_datetime \n", + " left join category on\n", + " results.utc_start_datetime between category.start_time and category.end_time or\n", + " category.start_time between results.utc_start_datetime and results.utc_end_datetime\n", " \"\"\"\n", "\n", "df_haul_stop_results = pd.read_sql_query(query, conn)\n", "# setting up a column of predict_no_haul that is opposite of predict_haul\n", - "df_haul_stop_results['predict_no_haul'] = df_haul_stop_results['predict_haul'].map({0:1, 1:0})\n", - "df_haul_stop_results = df_haul_stop_results.loc[df_haul_stop_results['is_haul']==1]\n", - "df_haul_stop_results = df_haul_stop_results[['utc_start_datetime', 'utc_end_datetime','is_haul', 'predict_haul', 'predict_no_haul','set_number','is_haul_stop','haul_stop_id', 'haul_stop_start', 'haul_stop_end']]\n", - "\n", - "#converting columns to datetime\n", - "df_haul_stop_results['utc_end_datetime'] = pd.to_datetime(df_haul_stop_results['utc_end_datetime'])\n", - "df_haul_stop_results['utc_start_datetime'] = pd.to_datetime(df_haul_stop_results['utc_start_datetime'])\n", - "df_haul_stop_results['haul_stop_end'] = pd.to_datetime(df_haul_stop_results['haul_stop_end'])\n", - "df_haul_stop_results['haul_stop_start'] = pd.to_datetime(df_haul_stop_results['haul_stop_start'])\n", - "dfHaulStop['end_time'] = pd.to_datetime(dfHaulStop['end_time'])\n", - "dfHaulStop['start_time'] = pd.to_datetime(dfHaulStop['start_time'])" + "df_haul_stop_results[\"predict_no_haul\"] = df_haul_stop_results[\"predict_haul\"].map({0: 1, 1: 0})\n", + "df_haul_stop_results = df_haul_stop_results.loc[df_haul_stop_results[\"is_haul\"] == 1]\n", + "df_haul_stop_results = df_haul_stop_results[\n", + " [\n", + " \"utc_start_datetime\",\n", + " \"utc_end_datetime\",\n", + " \"is_haul\",\n", + " \"predict_haul\",\n", + " \"predict_no_haul\",\n", + " \"set_number\",\n", + " \"is_haul_stop\",\n", + " \"haul_stop_id\",\n", + " \"haul_stop_start\",\n", + " \"haul_stop_end\",\n", + " ]\n", + "]\n", + "\n", + "# converting columns to datetime\n", + "df_haul_stop_results[\"utc_end_datetime\"] = pd.to_datetime(df_haul_stop_results[\"utc_end_datetime\"])\n", + "df_haul_stop_results[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_haul_stop_results[\"utc_start_datetime\"]\n", + ")\n", + "df_haul_stop_results[\"haul_stop_end\"] = pd.to_datetime(df_haul_stop_results[\"haul_stop_end\"])\n", + "df_haul_stop_results[\"haul_stop_start\"] = pd.to_datetime(df_haul_stop_results[\"haul_stop_start\"])\n", + "dfHaulStop[\"end_time\"] = pd.to_datetime(dfHaulStop[\"end_time\"])\n", + "dfHaulStop[\"start_time\"] = pd.to_datetime(dfHaulStop[\"start_time\"])" ] }, { @@ -3895,20 +4130,20 @@ "select_columns = df_results3_setnumber.columns.to_list()\n", "\n", "\n", - "select_columns = ['results.'+x for x in select_columns]\n", + "select_columns = [\"results.\" + x for x in select_columns]\n", "\n", - "category = 'Haul Stop'\n", - "dfHaulStop3 = df_events3.loc[df_events3['category']==category].copy()\n", - "dfHaulStop3 = dfHaulStop3.loc[:,['start_time','end_time','category','set_number']]\n", - "dfHaulStop3 = dfHaulStop3.reset_index().rename(columns={'index':'haul_stop_id'})\n", + "category = \"Haul Stop\"\n", + "dfHaulStop3 = df_events3.loc[df_events3[\"category\"] == category].copy()\n", + "dfHaulStop3 = dfHaulStop3.loc[:, [\"start_time\", \"end_time\", \"category\", \"set_number\"]]\n", + "dfHaulStop3 = dfHaulStop3.reset_index().rename(columns={\"index\": \"haul_stop_id\"})\n", "\n", - "column_label = 'is_'+ '_'.join(category.lower().split(' '))\n", + "column_label = \"is_\" + \"_\".join(category.lower().split(\" \"))\n", "\n", - "#Make the db in memory\n", - "conn = sqlite3.connect(':memory:')\n", - "#write the tables\n", - "df_haul_stop_results3.to_sql('results', conn, index=False)\n", - "dfHaulStop3.to_sql('category', conn, index=False)\n", + "# Make the db in memory\n", + "conn = sqlite3.connect(\":memory:\")\n", + "# write the tables\n", + "df_haul_stop_results3.to_sql(\"results\", conn, index=False)\n", + "dfHaulStop3.to_sql(\"category\", conn, index=False)\n", "\n", "\n", "query = f\"\"\"\n", @@ -3918,28 +4153,45 @@ " category.haul_stop_id,\n", " category.start_time as haul_stop_start,\n", " category.end_time as haul_stop_end\n", - " \n", + "\n", " from results\n", - " left join category on \n", - " results.utc_start_datetime between category.start_time and category.end_time or \n", - " category.start_time between results.utc_start_datetime and results.utc_end_datetime \n", + " left join category on\n", + " results.utc_start_datetime between category.start_time and category.end_time or\n", + " category.start_time between results.utc_start_datetime and results.utc_end_datetime\n", " \"\"\"\n", "\n", "print(query)\n", "\n", "df_haul_stop_results3 = pd.read_sql_query(query, conn)\n", - "df_haul_stop_results3['predict_no_haul'] = df_haul_stop_results3['predict_haul'].map({0:1, 1:0})\n", - "df_haul_stop_results3 = df_haul_stop_results3.loc[df_haul_stop_results3['is_haul']==1]\n", - "df_haul_stop_results3 = df_haul_stop_results3[['utc_start_datetime', 'utc_end_datetime','is_haul', 'predict_haul', 'predict_no_haul','set_number','is_haul_stop','haul_stop_id', 'haul_stop_start', 'haul_stop_end']]\n", - "\n", - "df_haul_stop_results3['utc_end_datetime'] = pd.to_datetime(df_haul_stop_results3['utc_end_datetime'])\n", - "df_haul_stop_results3['utc_start_datetime'] = pd.to_datetime(df_haul_stop_results3['utc_start_datetime'])\n", - "\n", - "df_haul_stop_results3['haul_stop_end'] = pd.to_datetime(df_haul_stop_results3['haul_stop_end'])\n", - "df_haul_stop_results3['haul_stop_start'] = pd.to_datetime(df_haul_stop_results3['haul_stop_start'])\n", - "\n", - "dfHaulStop3['end_time'] = pd.to_datetime(dfHaulStop3['end_time'])\n", - "dfHaulStop3['start_time'] = pd.to_datetime(dfHaulStop3['start_time'])" + "df_haul_stop_results3[\"predict_no_haul\"] = df_haul_stop_results3[\"predict_haul\"].map({0: 1, 1: 0})\n", + "df_haul_stop_results3 = df_haul_stop_results3.loc[df_haul_stop_results3[\"is_haul\"] == 1]\n", + "df_haul_stop_results3 = df_haul_stop_results3[\n", + " [\n", + " \"utc_start_datetime\",\n", + " \"utc_end_datetime\",\n", + " \"is_haul\",\n", + " \"predict_haul\",\n", + " \"predict_no_haul\",\n", + " \"set_number\",\n", + " \"is_haul_stop\",\n", + " \"haul_stop_id\",\n", + " \"haul_stop_start\",\n", + " \"haul_stop_end\",\n", + " ]\n", + "]\n", + "\n", + "df_haul_stop_results3[\"utc_end_datetime\"] = pd.to_datetime(\n", + " df_haul_stop_results3[\"utc_end_datetime\"]\n", + ")\n", + "df_haul_stop_results3[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_haul_stop_results3[\"utc_start_datetime\"]\n", + ")\n", + "\n", + "df_haul_stop_results3[\"haul_stop_end\"] = pd.to_datetime(df_haul_stop_results3[\"haul_stop_end\"])\n", + "df_haul_stop_results3[\"haul_stop_start\"] = pd.to_datetime(df_haul_stop_results3[\"haul_stop_start\"])\n", + "\n", + "dfHaulStop3[\"end_time\"] = pd.to_datetime(dfHaulStop3[\"end_time\"])\n", + "dfHaulStop3[\"start_time\"] = pd.to_datetime(dfHaulStop3[\"start_time\"])" ] }, { @@ -3962,22 +4214,32 @@ ], "source": [ "# get the total number of minutes of haul events recorded by bv for trip 3\n", - "print('trip 3 durations')\n", - "brancol3_bv_sets['duration_minutes'] = brancol3_bv_sets['duration'].apply(lambda x: x.total_seconds()/60)\n", + "print(\"trip 3 durations\")\n", + "brancol3_bv_sets[\"duration_minutes\"] = brancol3_bv_sets[\"duration\"].apply(\n", + " lambda x: x.total_seconds() / 60\n", + ")\n", "print(f'bv haul duration: {sum(brancol3_bv_sets['duration_minutes'])}')\n", "\n", - "df_haul_stop_results3['haul_duration'] = df_haul_stop_results3.apply(lambda x: (x['utc_end_datetime'] - x['utc_start_datetime']).total_seconds()/60, axis = 1)\n", + "df_haul_stop_results3[\"haul_duration\"] = df_haul_stop_results3.apply(\n", + " lambda x: (x[\"utc_end_datetime\"] - x[\"utc_start_datetime\"]).total_seconds() / 60, axis=1\n", + ")\n", "print(f'bv haul duration aligned with ai results: {sum(df_haul_stop_results3['haul_duration'])}')\n", "\n", "# get total duration of haul stops from the haul stops that have available results\n", - "only_stops3 = df_haul_stop_results3.loc[df_haul_stop_results3['is_haul_stop']==1].copy()\n", - "only_stops3 = only_stops3[['haul_stop_id','haul_stop_start','haul_stop_end']].drop_duplicates()\n", - "only_stops3['duration'] = only_stops3.apply(lambda x: (x['haul_stop_end'] - x['haul_stop_start']).total_seconds()/60, axis = 1)\n", - "haul_stop_total3 = sum(only_stops3['duration'])\n", - "print(f'haul stop total duration: {haul_stop_total3}')\n", + "only_stops3 = df_haul_stop_results3.loc[df_haul_stop_results3[\"is_haul_stop\"] == 1].copy()\n", + "only_stops3 = only_stops3[[\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"]].drop_duplicates()\n", + "only_stops3[\"duration\"] = only_stops3.apply(\n", + " lambda x: (x[\"haul_stop_end\"] - x[\"haul_stop_start\"]).total_seconds() / 60, axis=1\n", + ")\n", + "haul_stop_total3 = sum(only_stops3[\"duration\"])\n", + "print(f\"haul stop total duration: {haul_stop_total3}\")\n", "\n", - "haul_stops_group3 = df_haul_stop_results3.groupby(['haul_stop_id','haul_stop_start','haul_stop_end']).agg({'predict_no_haul':'max'}).reset_index()\n", - "print(f'number of haul stops that aligned with no haul prediction: {len(haul_stops_group3)}')" + "haul_stops_group3 = (\n", + " df_haul_stop_results3.groupby([\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"])\n", + " .agg({\"predict_no_haul\": \"max\"})\n", + " .reset_index()\n", + ")\n", + "print(f\"number of haul stops that aligned with no haul prediction: {len(haul_stops_group3)}\")" ] }, { @@ -4000,23 +4262,33 @@ ], "source": [ "# get the total number of minutes of haul events recorded by bv for trip 2\n", - "print('trip 2 durations')\n", - "brancol2_bv_sets['duration_minutes'] = brancol2_bv_sets['duration'].apply(lambda x: x.total_seconds()/60)\n", + "print(\"trip 2 durations\")\n", + "brancol2_bv_sets[\"duration_minutes\"] = brancol2_bv_sets[\"duration\"].apply(\n", + " lambda x: x.total_seconds() / 60\n", + ")\n", "print(f'bv total haul duration: {sum(brancol2_bv_sets['duration_minutes'])}')\n", "\n", "# get the total number of minutes of bv haul events where a bv haul aligned with the ai results (if there were no ai catch count results, then those times are excluded)\n", - "df_haul_stop_results['haul_duration'] = df_haul_stop_results.apply(lambda x: (x['utc_end_datetime'] - x['utc_start_datetime']).total_seconds()/60, axis = 1)\n", + "df_haul_stop_results[\"haul_duration\"] = df_haul_stop_results.apply(\n", + " lambda x: (x[\"utc_end_datetime\"] - x[\"utc_start_datetime\"]).total_seconds() / 60, axis=1\n", + ")\n", "print(f'bv haul duration aligned with ai results: {sum(df_haul_stop_results['haul_duration'])}')\n", "\n", "# get total duration of haul stops from the haul stops that have available results\n", - "only_stops = df_haul_stop_results.loc[df_haul_stop_results['is_haul_stop']==1].copy()\n", - "only_stops = only_stops[['haul_stop_id','haul_stop_start','haul_stop_end']].drop_duplicates()\n", - "only_stops['duration'] = only_stops.apply(lambda x: (x['haul_stop_end'] - x['haul_stop_start']).total_seconds()/60, axis = 1)\n", - "haul_stop_total = sum(only_stops['duration'])\n", - "print(f'haul stop total duration: {haul_stop_total}')\n", + "only_stops = df_haul_stop_results.loc[df_haul_stop_results[\"is_haul_stop\"] == 1].copy()\n", + "only_stops = only_stops[[\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"]].drop_duplicates()\n", + "only_stops[\"duration\"] = only_stops.apply(\n", + " lambda x: (x[\"haul_stop_end\"] - x[\"haul_stop_start\"]).total_seconds() / 60, axis=1\n", + ")\n", + "haul_stop_total = sum(only_stops[\"duration\"])\n", + "print(f\"haul stop total duration: {haul_stop_total}\")\n", "\n", - "haul_stops_group = df_haul_stop_results.groupby(['haul_stop_id','haul_stop_start','haul_stop_end']).agg({'predict_no_haul':'max'}).reset_index()\n", - "print(f'number of haul stops that aligned with no haul prediction: {len(haul_stops_group)}')" + "haul_stops_group = (\n", + " df_haul_stop_results.groupby([\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"])\n", + " .agg({\"predict_no_haul\": \"max\"})\n", + " .reset_index()\n", + ")\n", + "print(f\"number of haul stops that aligned with no haul prediction: {len(haul_stops_group)}\")" ] }, { @@ -4126,7 +4398,7 @@ } ], "source": [ - "dfHaulStop['duration'] = dfHaulStop['end_time']-dfHaulStop['start_time']\n", + "dfHaulStop[\"duration\"] = dfHaulStop[\"end_time\"] - dfHaulStop[\"start_time\"]\n", "dfHaulStop.head()" ] }, @@ -4139,10 +4411,10 @@ "source": [ "def find_overlap_duration(row):\n", " # finds the overlap time of haul stops with the predicted no_haul events\n", - " if row['is_haul_stop'] == 1:\n", - " latest_start = max(row['utc_start_datetime'],row['haul_stop_start'])\n", - " earliest_end = min(row['utc_end_datetime'],row['haul_stop_end'])\n", - " duration = (earliest_end - latest_start).total_seconds()/60\n", + " if row[\"is_haul_stop\"] == 1:\n", + " latest_start = max(row[\"utc_start_datetime\"], row[\"haul_stop_start\"])\n", + " earliest_end = min(row[\"utc_end_datetime\"], row[\"haul_stop_end\"])\n", + " duration = (earliest_end - latest_start).total_seconds() / 60\n", " else:\n", " duration = 0\n", " return duration" @@ -4155,8 +4427,9 @@ "metadata": {}, "outputs": [], "source": [ - "df_haul_stop_results['overlap_minutes'] = df_haul_stop_results.apply(lambda x: find_overlap_duration(x), axis = 1)\n", - "\n" + "df_haul_stop_results[\"overlap_minutes\"] = df_haul_stop_results.apply(\n", + " lambda x: find_overlap_duration(x), axis=1\n", + ")\n" ] }, { @@ -4174,7 +4447,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_results3['duration'] = df_results3['utc_end_datetime']-df_results3['utc_start_datetime']\n", + "df_results3[\"duration\"] = df_results3[\"utc_end_datetime\"] - df_results3[\"utc_start_datetime\"]\n", "df_onlyhaul3 = df_results3.loc[df_results3.is_haul == 1].copy()" ] }, @@ -4196,8 +4469,8 @@ } ], "source": [ - "df_onlyhaul3['duration_minutes'] = df_results3['duration'].apply(lambda x: x.total_seconds()/60)\n", - "sum(df_onlyhaul3['duration_minutes'].loc[df_onlyhaul3['predict_haul']==0])/60" + "df_onlyhaul3[\"duration_minutes\"] = df_results3[\"duration\"].apply(lambda x: x.total_seconds() / 60)\n", + "sum(df_onlyhaul3[\"duration_minutes\"].loc[df_onlyhaul3[\"predict_haul\"] == 0]) / 60" ] }, { @@ -4219,11 +4492,19 @@ ], "source": [ "# get duration of overlap\n", - "haulstop_total2 = sum(df_haul_stop_results['overlap_minutes']) # total number of minutes where a haul stop over laps with a haul?\n", - "overlap_nohaul2 = sum(df_haul_stop_results.loc[df_haul_stop_results['predict_haul']==0]['overlap_minutes']) # haul stops where predicted haul is 0, \n", - "overlap_haul2 = sum(df_haul_stop_results.loc[df_haul_stop_results['predict_haul']==1]['overlap_minutes']) # hauls stops where there is a haul predicted\n", - "total_nohaul2 = sum(df_haul_stop_results.loc[df_haul_stop_results['predict_haul']==0]['haul_duration']) # total prediction of haul gaps\n", - "overlap_nohaul2/haulstop_total2" + "haulstop_total2 = sum(\n", + " df_haul_stop_results[\"overlap_minutes\"]\n", + ") # total number of minutes where a haul stop over laps with a haul?\n", + "overlap_nohaul2 = sum(\n", + " df_haul_stop_results.loc[df_haul_stop_results[\"predict_haul\"] == 0][\"overlap_minutes\"]\n", + ") # haul stops where predicted haul is 0,\n", + "overlap_haul2 = sum(\n", + " df_haul_stop_results.loc[df_haul_stop_results[\"predict_haul\"] == 1][\"overlap_minutes\"]\n", + ") # hauls stops where there is a haul predicted\n", + "total_nohaul2 = sum(\n", + " df_haul_stop_results.loc[df_haul_stop_results[\"predict_haul\"] == 0][\"haul_duration\"]\n", + ") # total prediction of haul gaps\n", + "overlap_nohaul2 / haulstop_total2" ] }, { @@ -4245,13 +4526,21 @@ ], "source": [ "# get duration of overlap\n", - "df_haul_stop_results3['overlap_minutes'] = df_haul_stop_results3.apply(lambda x: find_overlap_duration(x), axis = 1)\n", - "overlap_total3 = sum(df_haul_stop_results3['overlap_minutes'])\n", - "haulstop_total3 = sum(df_haul_stop_results3['overlap_minutes'])\n", - "overlap_nohaul3 = sum(df_haul_stop_results3.loc[df_haul_stop_results3['predict_haul']==0]['overlap_minutes'])\n", - "overlap_haul3 = sum(df_haul_stop_results3.loc[df_haul_stop_results3['predict_haul']==1]['overlap_minutes'])\n", - "total_nohaul3 = sum(df_haul_stop_results3.loc[df_haul_stop_results3['predict_haul']==0]['haul_duration'])\n", - "overlap_nohaul3/haulstop_total3 #percentage of nohauls that are covered by haul stops?" + "df_haul_stop_results3[\"overlap_minutes\"] = df_haul_stop_results3.apply(\n", + " lambda x: find_overlap_duration(x), axis=1\n", + ")\n", + "overlap_total3 = sum(df_haul_stop_results3[\"overlap_minutes\"])\n", + "haulstop_total3 = sum(df_haul_stop_results3[\"overlap_minutes\"])\n", + "overlap_nohaul3 = sum(\n", + " df_haul_stop_results3.loc[df_haul_stop_results3[\"predict_haul\"] == 0][\"overlap_minutes\"]\n", + ")\n", + "overlap_haul3 = sum(\n", + " df_haul_stop_results3.loc[df_haul_stop_results3[\"predict_haul\"] == 1][\"overlap_minutes\"]\n", + ")\n", + "total_nohaul3 = sum(\n", + " df_haul_stop_results3.loc[df_haul_stop_results3[\"predict_haul\"] == 0][\"haul_duration\"]\n", + ")\n", + "overlap_nohaul3 / haulstop_total3 # percentage of nohauls that are covered by haul stops?" ] }, { @@ -4262,9 +4551,9 @@ "outputs": [], "source": [ "def format_hours_minutes(minutes):\n", - " hrs = minutes//60\n", - " mins = minutes%60\n", - " formatted = \"%dh %dm\" %(hrs,mins) \n", + " hrs = minutes // 60\n", + " mins = minutes % 60\n", + " formatted = \"%dh %dm\" % (hrs, mins)\n", " return formatted" ] }, @@ -4297,63 +4586,93 @@ ], "source": [ "# venn diagram for trip 2 and 3 combined\n", - "from matplotlib_venn import venn2, venn2_circles\n", - "plt.figure(figsize=(3,3))\n", + "from matplotlib_venn import venn2\n", + "\n", + "plt.figure(figsize=(3, 3))\n", "# Use the venn2 function\n", "\n", "nohaul_color = colors[18]\n", "overlap_color = colors[17]\n", "haulstop_color = colors[13]\n", "\n", - "haulstop_id = '100'\n", - "overlap_id = '110'\n", - "nohaul_id = '010'\n", + "haulstop_id = \"100\"\n", + "overlap_id = \"110\"\n", + "nohaul_id = \"010\"\n", "\n", - "haulstop_total = haulstop_total2+haulstop_total3\n", - "total_nohaul = total_nohaul2+total_nohaul3\n", - "overlap_nohaul = overlap_nohaul2+overlap_nohaul3\n", + "haulstop_total = haulstop_total2 + haulstop_total3\n", + "total_nohaul = total_nohaul2 + total_nohaul3\n", + "overlap_nohaul = overlap_nohaul2 + overlap_nohaul3\n", "\n", "haulstop_total\n", - "v = venn2(subsets = (haulstop_total, total_nohaul, overlap_nohaul), set_labels = ('Haul Stops', 'Haul Prediction Gaps'))\n", + "v = venn2(\n", + " subsets=(haulstop_total, total_nohaul, overlap_nohaul),\n", + " set_labels=(\"Haul Stops\", \"Haul Prediction Gaps\"),\n", + ")\n", "\n", - "v.get_label_by_id(haulstop_id).set_text('')\n", - "v.get_label_by_id(overlap_id).set_text('')\n", - "v.get_label_by_id(nohaul_id).set_text('')\n", - "v.get_label_by_id('A').set_fontsize(10)\n", - "v.get_label_by_id('B').set_fontsize(10)\n", + "v.get_label_by_id(haulstop_id).set_text(\"\")\n", + "v.get_label_by_id(overlap_id).set_text(\"\")\n", + "v.get_label_by_id(nohaul_id).set_text(\"\")\n", + "v.get_label_by_id(\"A\").set_fontsize(10)\n", + "v.get_label_by_id(\"B\").set_fontsize(10)\n", "\n", "\n", - "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", + "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", "v.get_patch_by_id(overlap_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", + "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", "v.get_patch_by_id(nohaul_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", + "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", "v.get_patch_by_id(haulstop_id).set_alpha(1)\n", "\n", "\n", - "\n", - "plt.annotate(format_hours_minutes(total_nohaul-overlap_nohaul), xy=v.get_label_by_id(nohaul_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold',\n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha = .6)]\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(overlap_nohaul), xy=v.get_label_by_id(overlap_id).get_position(), xytext=(-20,50), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = overlap_color, weight = 'bold',\n", - " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", - " arrowprops=dict(arrowstyle='->',lw = 1.5, connectionstyle='arc3,rad=-0.3',color='white', edgecolor=overlap_color, \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = .6)]),\n", - " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(haulstop_total-overlap_nohaul), xy=v.get_label_by_id(haulstop_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold', \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha = .6)]\n", - " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.savefig('haul_stop_venn.png', bbox_inches='tight')" + "plt.annotate(\n", + " format_hours_minutes(total_nohaul - overlap_nohaul),\n", + " xy=v.get_label_by_id(nohaul_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha=0.6)],\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(overlap_nohaul),\n", + " xy=v.get_label_by_id(overlap_id).get_position(),\n", + " xytext=(-20, 50),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=overlap_color,\n", + " weight=\"bold\",\n", + " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", + " arrowprops=dict(\n", + " arrowstyle=\"->\",\n", + " lw=1.5,\n", + " connectionstyle=\"arc3,rad=-0.3\",\n", + " color=\"white\",\n", + " edgecolor=overlap_color,\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha=0.6)],\n", + " ),\n", + " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(haulstop_total - overlap_nohaul),\n", + " xy=v.get_label_by_id(haulstop_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha=0.6)],\n", + " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.savefig(\"haul_stop_venn.png\", bbox_inches=\"tight\")" ] }, { @@ -4374,7 +4693,7 @@ } ], "source": [ - "overlap_nohaul/haulstop_total" + "overlap_nohaul / haulstop_total" ] }, { @@ -4396,58 +4715,88 @@ ], "source": [ "# venn diagram for trip 3\n", - "from matplotlib_venn import venn2, venn2_circles\n", - "plt.figure(figsize=(3,3))\n", + "from matplotlib_venn import venn2\n", + "\n", + "plt.figure(figsize=(3, 3))\n", "# Use the venn2 function\n", "\n", "nohaul_color = colors[18]\n", "overlap_color = colors[17]\n", "haulstop_color = colors[13]\n", "\n", - "haulstop_id = '100'\n", - "overlap_id = '110'\n", - "nohaul_id = '010'\n", + "haulstop_id = \"100\"\n", + "overlap_id = \"110\"\n", + "nohaul_id = \"010\"\n", "\n", - "v = venn2(subsets = (overlap_total3, total_nohaul3, overlap_nohaul3), set_labels = ('Haul Stops', 'Haul Prediction Gaps'))\n", + "v = venn2(\n", + " subsets=(overlap_total3, total_nohaul3, overlap_nohaul3),\n", + " set_labels=(\"Haul Stops\", \"Haul Prediction Gaps\"),\n", + ")\n", "\n", - "v.get_label_by_id(haulstop_id).set_text('')\n", - "v.get_label_by_id(overlap_id).set_text('')\n", - "v.get_label_by_id(nohaul_id).set_text('')\n", - "v.get_label_by_id('A').set_fontsize(10)\n", - "v.get_label_by_id('B').set_fontsize(10)\n", + "v.get_label_by_id(haulstop_id).set_text(\"\")\n", + "v.get_label_by_id(overlap_id).set_text(\"\")\n", + "v.get_label_by_id(nohaul_id).set_text(\"\")\n", + "v.get_label_by_id(\"A\").set_fontsize(10)\n", + "v.get_label_by_id(\"B\").set_fontsize(10)\n", "\n", "\n", - "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", + "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", "v.get_patch_by_id(overlap_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", + "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", "v.get_patch_by_id(nohaul_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", + "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", "v.get_patch_by_id(haulstop_id).set_alpha(1)\n", "\n", "\n", - "\n", - "plt.annotate(format_hours_minutes(total_nohaul3-overlap_nohaul3), xy=v.get_label_by_id(nohaul_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold',\n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha = .6)]\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(overlap_nohaul3), xy=v.get_label_by_id(overlap_id).get_position(), xytext=(-20,50), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = overlap_color, weight = 'bold',\n", - " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", - " arrowprops=dict(arrowstyle='->',lw = 1.5, connectionstyle='arc3,rad=-0.3',color='white', edgecolor=overlap_color, \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = .6)]),\n", - " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(overlap_total3-overlap_nohaul3), xy=v.get_label_by_id(haulstop_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold', \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha = .6)]\n", - " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.savefig('haul_stop_venn3.png', bbox_inches='tight')" + "plt.annotate(\n", + " format_hours_minutes(total_nohaul3 - overlap_nohaul3),\n", + " xy=v.get_label_by_id(nohaul_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha=0.6)],\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(overlap_nohaul3),\n", + " xy=v.get_label_by_id(overlap_id).get_position(),\n", + " xytext=(-20, 50),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=overlap_color,\n", + " weight=\"bold\",\n", + " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", + " arrowprops=dict(\n", + " arrowstyle=\"->\",\n", + " lw=1.5,\n", + " connectionstyle=\"arc3,rad=-0.3\",\n", + " color=\"white\",\n", + " edgecolor=overlap_color,\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha=0.6)],\n", + " ),\n", + " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(overlap_total3 - overlap_nohaul3),\n", + " xy=v.get_label_by_id(haulstop_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha=0.6)],\n", + " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.savefig(\"haul_stop_venn3.png\", bbox_inches=\"tight\")" ] }, { @@ -4522,7 +4871,7 @@ } ], "source": [ - "(8*60)+16" + "(8 * 60) + 16" ] }, { @@ -4564,7 +4913,7 @@ } ], "source": [ - "overlap_total%60" + "overlap_total % 60" ] }, { @@ -4598,8 +4947,8 @@ "# Perform Chi-Square Test for each pair of events\n", "chi2_dict = {}\n", "for category in category_cols:\n", - " chi2, p = chi_square_test('predict_haul', category, df_category_only_hauls)\n", - " chi2_dict[category] = {'chi2': chi2, 'p':p}" + " chi2, p = chi_square_test(\"predict_haul\", category, df_category_only_hauls)\n", + " chi2_dict[category] = {\"chi2\": chi2, \"p\": p}" ] }, { @@ -4889,10 +5238,10 @@ } ], "source": [ - "x = 'is_haul_stop'\n", - "y = 'predict_no_haul'\n", - "z = 'predict_haul'\n", - "coocc[x][y]/coocc[x][x]" + "x = \"is_haul_stop\"\n", + "y = \"predict_no_haul\"\n", + "z = \"predict_haul\"\n", + "coocc[x][y] / coocc[x][x]" ] }, { @@ -4913,7 +5262,7 @@ } ], "source": [ - "coocc[y][y]+coocc[z][z]" + "coocc[y][y] + coocc[z][z]" ] }, { @@ -4931,12 +5280,12 @@ "metadata": {}, "outputs": [], "source": [ - "def association_confidence(x,y,coocc):\n", - " confidence = coocc[x][y]/coocc[x][x]\n", + "def association_confidence(x, y, coocc):\n", + " confidence = coocc[x][y] / coocc[x][x]\n", "\n", - " fraction_y = coocc[y][y]/len_cats\n", + " fraction_y = coocc[y][y] / len_cats\n", "\n", - " lift = confidence/fraction_y\n", + " lift = confidence / fraction_y\n", "\n", " return confidence, lift" ] @@ -4973,8 +5322,8 @@ "source": [ "for category in category_cols:\n", " print(category)\n", - " confidence, lift = association_confidence(category,'predict_no_haul', coocc)\n", - " print(f'confidence: {confidence}, lift: {lift}')" + " confidence, lift = association_confidence(category, \"predict_no_haul\", coocc)\n", + " print(f\"confidence: {confidence}, lift: {lift}\")" ] }, { @@ -5009,8 +5358,8 @@ "source": [ "for category in category_cols:\n", " print(category)\n", - " confidence, lift = association_confidence(category,'predict_no_haul', coocc)\n", - " print(f'confidence: {confidence}, lift: {lift}')" + " confidence, lift = association_confidence(category, \"predict_no_haul\", coocc)\n", + " print(f\"confidence: {confidence}, lift: {lift}\")" ] }, { diff --git a/notebooks/timeseries_classifier_model.ipynb b/notebooks/timeseries_classifier_model.ipynb index 29019cb..340cdbc 100644 --- a/notebooks/timeseries_classifier_model.ipynb +++ b/notebooks/timeseries_classifier_model.ipynb @@ -17,7 +17,6 @@ "metadata": {}, "outputs": [], "source": [ - " \n", "%autoreload 2" ] }, @@ -40,17 +39,19 @@ "from sklearn.linear_model import LinearRegression\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", + "import itertools\n", "import matplotlib.gridspec as gridspec\n", "\n", "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "import json\n", "from tsai.all import *\n", "from IPython.display import display, Markdown\n", @@ -81,7 +82,29 @@ "metadata": {}, "outputs": [], "source": [ - "colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])" + "colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")" ] }, { @@ -91,7 +114,27 @@ "metadata": {}, "outputs": [], "source": [ - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']" + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]" ] }, { @@ -102,14 +145,36 @@ "outputs": [], "source": [ "%matplotlib inline\n", + "\n", + "\n", "def show_color_pallete():\n", - "# fig, ax = plt.subplots()\n", - " color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + " # fig, ax = plt.subplots()\n", + " color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", " sns.palplot(color_list, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(color_list):\n", - " label = f'[{i}] {name}'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}] {name}\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -136,12 +201,12 @@ "# r2 = model.score(x,y)\n", "# coefficients = model.coef_\n", "# intercept = model.intercept_\n", - " \n", + "\n", "\n", "# beta0 = r'$intercept = \\hat\\beta_0 =$' + str(round(intercept[0],2))\n", - " \n", + "\n", "# beta1 = r'$slope = \\hat\\beta_1 =$' + str(round(coefficients[0][0],2))\n", - " \n", + "\n", "# r_squared = r'$R^2 =$' + str(round(r2,2))\n", "\n", "# textstr = '\\n'.join((\n", @@ -176,17 +241,17 @@ ], "source": [ "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)\n", + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", "\n", - "stpatrick1_bv_set_counts = StPatrick1data['bv_set_counts']\n", - "stpatrick1_bv_set_counts = StPatrick2data['bv_set_counts']" + "stpatrick1_bv_set_counts = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick1_bv_set_counts = StPatrick2data[\"bv_set_counts\"]" ] }, { @@ -196,8 +261,8 @@ "metadata": {}, "outputs": [], "source": [ - "stpatrick1_bv_sets = StPatrick1data['bv_sets']\n", - "stpatrick2_bv_sets = StPatrick2data['bv_sets']" + "stpatrick1_bv_sets = StPatrick1data[\"bv_sets\"]\n", + "stpatrick2_bv_sets = StPatrick2data[\"bv_sets\"]" ] }, { @@ -366,24 +431,24 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", "\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", "\n", - "brancol1_bv_sets = Brancol1data['bv_sets']\n", - "brancol2_bv_sets = Brancol2data['bv_sets']\n", + "brancol1_bv_sets = Brancol1data[\"bv_sets\"]\n", + "brancol2_bv_sets = Brancol2data[\"bv_sets\"]\n", "\n", - "brancol1_bv_set_counts = Brancol1data['bv_set_counts']\n", - "brancol2_bv_set_counts = Brancol2data['bv_set_counts']\n", + "brancol1_bv_set_counts = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_set_counts = Brancol2data[\"bv_set_counts\"]\n", "\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol1trip = Brancol1data['trip_info']" + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol1trip = Brancol1data[\"trip_info\"]" ] }, { @@ -420,37 +485,35 @@ } ], "source": [ + "fig, ax = plt.subplots(2, 1, figsize=(40, 10))\n", "\n", - "\n", - "fig, ax = plt.subplots(2, 1, figsize=(40,10))\n", - "\n", - "dfBrancol1 = Brancol1data['ai_sets']\n", + "dfBrancol1 = Brancol1data[\"ai_sets\"]\n", "data = dfBrancol1\n", - "sets = Brancol1data['bv_sets']\n", + "sets = Brancol1data[\"bv_sets\"]\n", "haul_starts = sets.haul_start_datetime.unique().dropna()\n", "haul_ends = sets.haul_end_datetime.unique().dropna()\n", - "ymax = data['count'].max()\n", + "ymax = data[\"count\"].max()\n", "\n", - "ax[0].title.set_text('Brancol Trip 1')\n", - "sns.lineplot(x = 'utc_start_datetime', y = 'count', data = data, ax = ax[0], alpha = .5)\n", - "#draw verticle lines (I can do this in one line)\n", - "ax[0].vlines(haul_starts, 0, ymax, colors='green')\n", - "ax[0].vlines(haul_ends, 0, ymax, colors='r')\n", - "ax[0].set_xlabel('')\n", + "ax[0].title.set_text(\"Brancol Trip 1\")\n", + "sns.lineplot(x=\"utc_start_datetime\", y=\"count\", data=data, ax=ax[0], alpha=0.5)\n", + "# draw verticle lines (I can do this in one line)\n", + "ax[0].vlines(haul_starts, 0, ymax, colors=\"green\")\n", + "ax[0].vlines(haul_ends, 0, ymax, colors=\"r\")\n", + "ax[0].set_xlabel(\"\")\n", "\n", - "dfBrancol2 = Brancol2data['ai_sets']\n", + "dfBrancol2 = Brancol2data[\"ai_sets\"]\n", "data = dfBrancol2\n", - "sets = Brancol2data['bv_sets']\n", + "sets = Brancol2data[\"bv_sets\"]\n", "haul_starts = sets.haul_start_datetime.unique().dropna()\n", "haul_ends = sets.haul_end_datetime.unique().dropna()\n", - "ymax = data['count'].max()\n", + "ymax = data[\"count\"].max()\n", "\n", - "ax[1].title.set_text('Brancol Trip 2')\n", - "sns.lineplot(x = 'utc_start_datetime', y = 'count', data = data, ax = ax[1], alpha = .5)\n", - "#draw verticle lines (I can do this in one line)\n", - "ax[1].vlines(haul_starts, 0, ymax, colors='green')\n", - "ax[1].vlines(haul_ends, 0, ymax, colors='r')\n", - "ax[1].set_xlabel('')\n", + "ax[1].title.set_text(\"Brancol Trip 2\")\n", + "sns.lineplot(x=\"utc_start_datetime\", y=\"count\", data=data, ax=ax[1], alpha=0.5)\n", + "# draw verticle lines (I can do this in one line)\n", + "ax[1].vlines(haul_starts, 0, ymax, colors=\"green\")\n", + "ax[1].vlines(haul_ends, 0, ymax, colors=\"r\")\n", + "ax[1].set_xlabel(\"\")\n", "\n", "plt.show()" ] @@ -482,50 +545,50 @@ "source": [ "def plot_hlines(ax, df, y_val, start_col, end_col, width, color, label):\n", " ax.hlines(\n", - " y = np.full(len(df), y_val),\n", - " xmin = df[start_col].values.reshape((-1,1)),\n", - " xmax =df[end_col].values.reshape((-1,1)),\n", - " linewidth = width,colors= color, label = label\n", + " y=np.full(len(df), y_val),\n", + " xmin=df[start_col].values.reshape((-1, 1)),\n", + " xmax=df[end_col].values.reshape((-1, 1)),\n", + " linewidth=width,\n", + " colors=color,\n", + " label=label,\n", " )\n", "\n", + "\n", "def plot_set_hlines(ax, df, source, color_dict):\n", - " if source == 'elog':\n", - " set_start_col = 'systemstartsetdatetime'\n", - " set_end_col = 'systemendsetdatetime'\n", - " haul_start_col = 'systemstarthauldatetime'\n", - " haul_end_col = 'systemendhauldatetime'\n", + " if source == \"elog\":\n", + " set_start_col = \"systemstartsetdatetime\"\n", + " set_end_col = \"systemendsetdatetime\"\n", + " haul_start_col = \"systemstarthauldatetime\"\n", + " haul_end_col = \"systemendhauldatetime\"\n", " y_val = 1\n", "\n", - "\n", - " elif source == 'bv':\n", - " set_start_col = 'set_start_datetime'\n", - " set_end_col = 'set_end_datetime'\n", - " haul_start_col = 'haul_start_datetime'\n", - " haul_end_col = 'haul_end_datetime'\n", + " elif source == \"bv\":\n", + " set_start_col = \"set_start_datetime\"\n", + " set_end_col = \"set_end_datetime\"\n", + " haul_start_col = \"haul_start_datetime\"\n", + " haul_end_col = \"haul_end_datetime\"\n", " y_val = 0\n", - " \n", - " colors = color_dict[source] \n", - " \n", - " \n", - " #plot_hauling\n", - " plot_hlines(ax, df,y_val, haul_start_col, haul_end_col, 12, colors['haul'], 'haul')\n", "\n", - " #plot tweener time\n", - " plot_hlines(ax, df,y_val, set_end_col, haul_start_col, 12, colors['between'], 'mid')\n", + " colors = color_dict[source]\n", + "\n", + " # plot_hauling\n", + " plot_hlines(ax, df, y_val, haul_start_col, haul_end_col, 12, colors[\"haul\"], \"haul\")\n", + "\n", + " # plot tweener time\n", + " plot_hlines(ax, df, y_val, set_end_col, haul_start_col, 12, colors[\"between\"], \"mid\")\n", "\n", - " #plot setting\n", - " plot_hlines(ax, df, y_val, set_start_col, set_end_col, 12, colors['set'], 'set')\n", + " # plot setting\n", + " plot_hlines(ax, df, y_val, set_start_col, set_end_col, 12, colors[\"set\"], \"set\")\n", "\n", "\n", "def annotate_counts(ax, df, count_col, x_col, y_value):\n", - " props = dict(boxstyle='round', facecolor='white', alpha=0.35)\n", + " props = dict(boxstyle=\"round\", facecolor=\"white\", alpha=0.35)\n", " for idx, row in df.iterrows():\n", - " text = f'count: {row[count_col]}'\n", + " text = f\"count: {row[count_col]}\"\n", " x_value = row[x_col]\n", - " ax.text( x_value,y_value , text, fontsize=10, horizontalalignment='right', bbox=props)\n", - " \n", - " \n", - " # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes) " + " ax.text(x_value, y_value, text, fontsize=10, horizontalalignment=\"right\", bbox=props)\n", + "\n", + " # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes)" ] }, { @@ -536,35 +599,34 @@ "outputs": [], "source": [ "def plot_set_bars(ax, df, source, color_dict):\n", - " if source == 'elog':\n", - " set_start_col = 'systemstartsetdatetime'\n", - " set_end_col = 'systemendsetdatetime'\n", - " haul_start_col = 'systemstarthauldatetime'\n", - " haul_end_col = 'systemendhauldatetime'\n", + " if source == \"elog\":\n", + " set_start_col = \"systemstartsetdatetime\"\n", + " set_end_col = \"systemendsetdatetime\"\n", + " haul_start_col = \"systemstarthauldatetime\"\n", + " haul_end_col = \"systemendhauldatetime\"\n", " y_val = 1.7\n", - " \n", - " elif source == 'bv':\n", - " set_start_col = 'set_start_datetime'\n", - " set_end_col = 'set_end_datetime'\n", - " haul_start_col = 'haul_start_datetime'\n", - " haul_end_col = 'haul_end_datetime'\n", - " y_val = .7\n", - "\n", - " df['set_duration'] = df[set_end_col] - df[set_start_col]\n", - " df['haul_duration'] = df[haul_end_col] - df[haul_start_col]\n", - " df['mid_duration'] = df[haul_start_col] - df[set_end_col]\n", - " \n", - " set_x = list(zip(df[set_start_col], df['set_duration']))\n", - " haul_x = list(zip(df[haul_start_col], df['haul_duration']))\n", - " mid_x = list(zip(df[set_end_col], df['mid_duration']))\n", - " \n", - " y = (y_val, .6)\n", + "\n", + " elif source == \"bv\":\n", + " set_start_col = \"set_start_datetime\"\n", + " set_end_col = \"set_end_datetime\"\n", + " haul_start_col = \"haul_start_datetime\"\n", + " haul_end_col = \"haul_end_datetime\"\n", + " y_val = 0.7\n", + "\n", + " df[\"set_duration\"] = df[set_end_col] - df[set_start_col]\n", + " df[\"haul_duration\"] = df[haul_end_col] - df[haul_start_col]\n", + " df[\"mid_duration\"] = df[haul_start_col] - df[set_end_col]\n", + "\n", + " set_x = list(zip(df[set_start_col], df[\"set_duration\"]))\n", + " haul_x = list(zip(df[haul_start_col], df[\"haul_duration\"]))\n", + " mid_x = list(zip(df[set_end_col], df[\"mid_duration\"]))\n", + "\n", + " y = (y_val, 0.6)\n", "\n", " colors = color_dict[source]\n", - " ax.broken_barh(mid_x, y, facecolors = colors['mid'], edgecolor = 'face')\n", - " ax.broken_barh(haul_x, y, facecolors = colors['haul'], edgecolor = 'face')\n", - " ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n", - " " + " ax.broken_barh(mid_x, y, facecolors=colors[\"mid\"], edgecolor=\"face\")\n", + " ax.broken_barh(haul_x, y, facecolors=colors[\"haul\"], edgecolor=\"face\")\n", + " ax.broken_barh(set_x, y, facecolors=colors[\"set\"], edgecolor=\"face\")\n" ] }, { @@ -574,48 +636,42 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_elog_comparisons(ax, dfElog, dfBV, title, legend = True, annotate_counts = False, display_axis= True):\n", - "\n", + "def plot_elog_comparisons(\n", + " ax, dfElog, dfBV, title, legend=True, annotate_counts=False, display_axis=True\n", + "):\n", " # ax[0].autofmt_xdate()\n", "\n", - " \n", - " ax.set_yticks([1,2],('bv','elogs'))\n", + " ax.set_yticks([1, 2], (\"bv\", \"elogs\"))\n", " # ax.set_yticks([0,1,2],('bv','elogs',' '))\n", - " fig.suptitle(titles['main'], fontsize = 20)\n", - " \n", + " fig.suptitle(titles[\"main\"], fontsize=20)\n", + "\n", " # df1 = brancol1_elog\n", " # df1sets =brancol1_bv_sets\n", "\n", - " \n", - "\n", " if annotate_counts:\n", - " dfElog['totalcount'] = dfElog['bycatchcount'].astype(int) + dfElog['catchcount'].astype(int)\n", - " dfBV['retained_count'] = dfBV['retained_count'].astype('Int64')\n", - " annotate_counts(ax, dfElog, 'totalcount', 'systemstarthauldatetime', 1.2)\n", - " annotate_counts(ax, dfBV, 'retained_count', 'haul_start_datetime', 0.2)\n", - " \n", + " dfElog[\"totalcount\"] = dfElog[\"bycatchcount\"].astype(int) + dfElog[\"catchcount\"].astype(int)\n", + " dfBV[\"retained_count\"] = dfBV[\"retained_count\"].astype(\"Int64\")\n", + " annotate_counts(ax, dfElog, \"totalcount\", \"systemstarthauldatetime\", 1.2)\n", + " annotate_counts(ax, dfBV, \"retained_count\", \"haul_start_datetime\", 0.2)\n", "\n", - " plot_set_bars(ax, dfElog, 'elog', color_dict)\n", - " plot_set_bars(ax, dfBV, 'bv', color_dict)\n", + " plot_set_bars(ax, dfElog, \"elog\", color_dict)\n", + " plot_set_bars(ax, dfBV, \"bv\", color_dict)\n", "\n", - " \n", + " ax.set_title(title, x=0.1, y=1, fontsize=9)\n", "\n", - " ax.set_title(title,x = .1, y = 1, fontsize = 9)\n", - " \n", " # ax.autoscale()\n", " # ax[0].set_ylim(-.5,1.5)\n", " # ax[0].tick_params(axis='x', labelrotation=45)\n", "\n", " if legend:\n", " legend_elements = []\n", - " for label, color in color_dict['elog'].items():\n", - " \n", - " legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", - " label=label))\n", - " ax.legend(handles = legend_elements, loc='center', bbox_to_anchor=(.5, -1), ncol = 3, fontsize = 8)\n", + " for label, color in color_dict[\"elog\"].items():\n", + " legend_elements.append(Patch(facecolor=color, edgecolor=color, label=label))\n", + " ax.legend(\n", + " handles=legend_elements, loc=\"center\", bbox_to_anchor=(0.5, -1), ncol=3, fontsize=8\n", + " )\n", "\n", - " \n", - " #use consise date formater\n", + " # use consise date formater\n", "\n", " if display_axis:\n", " locator = DayLocator()\n", @@ -642,8 +698,8 @@ "outputs": [], "source": [ "color_dict = {\n", - " 'bv': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", - " 'elog': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", + " \"bv\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", + " \"elog\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", " # 'elog':{'set':'#648fff', 'haul':'#184EAD', 'mid':'#88ccee'}\n", "}" ] @@ -681,21 +737,26 @@ "source": [ "# metrics.ConfusionMatrixDisplay(cm).plot(cmap = 'Blues',ax = ax)\n", "def plot_confusion_matrix(cm, ax, interp, title):\n", - "\n", - " ax.imshow(cm, interpolation='nearest', cmap = 'Blues')\n", + " ax.imshow(cm, interpolation=\"nearest\", cmap=\"Blues\")\n", " tick_marks = np.arange(len(interp.vocab))\n", " ax.set_xticks(tick_marks, interp.vocab, rotation=0)\n", " ax.set_yticks(tick_marks, interp.vocab, rotation=0)\n", - " ax.set_xlabel('Predicted')\n", - " ax.set_ylabel('Actual')\n", - " ax.set_ylim(len(interp.vocab)-.5,-.5)\n", + " ax.set_xlabel(\"Predicted\")\n", + " ax.set_ylabel(\"Actual\")\n", + " ax.set_ylim(len(interp.vocab) - 0.5, -0.5)\n", " ax.grid(False)\n", - " \n", - " thresh = cm.max() / 2.\n", + "\n", + " thresh = cm.max() / 2.0\n", " for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n", - " coeff = f'{cm[i, j]}'\n", - " plt.text(j, i, coeff, horizontalalignment=\"center\", verticalalignment=\"center\", color=\"white\"\n", - " if cm[i, j] > thresh else \"black\")" + " coeff = f\"{cm[i, j]}\"\n", + " plt.text(\n", + " j,\n", + " i,\n", + " coeff,\n", + " horizontalalignment=\"center\",\n", + " verticalalignment=\"center\",\n", + " color=\"white\" if cm[i, j] > thresh else \"black\",\n", + " )" ] }, { @@ -706,14 +767,14 @@ "outputs": [], "source": [ "def prep_data(df):\n", - " df.sort_values(by = 'utc_start_datetime', inplace = True)\n", - " X = df.drop(columns = 'is_haul')\n", - " y = df['is_haul'].astype('int').to_numpy()\n", + " df.sort_values(by=\"utc_start_datetime\", inplace=True)\n", + " X = df.drop(columns=\"is_haul\")\n", + " y = df[\"is_haul\"].astype(\"int\").to_numpy()\n", "\n", - " X.loc[:,'utc_start_datetime'] = X.loc[:,'utc_start_datetime'].astype('int64')\n", - " X = np.atleast_3d(X).transpose(0,2,1)\n", + " X.loc[:, \"utc_start_datetime\"] = X.loc[:, \"utc_start_datetime\"].astype(\"int64\")\n", + " X = np.atleast_3d(X).transpose(0, 2, 1)\n", "\n", - " haul_map = {1:'haul', 0:'no_haul'}\n", + " haul_map = {1: \"haul\", 0: \"no_haul\"}\n", " labeler = ReLabeler(haul_map)\n", " y = labeler(y)\n", " return X, y" @@ -735,8 +796,8 @@ "outputs": [], "source": [ "# training data\n", - "dfAiSets_Brancol1 = Brancol1data['ai_sets'].copy()\n", - "dfAiSets_Brancol1.set_index('utc_start_datetime', inplace = True)" + "dfAiSets_Brancol1 = Brancol1data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol1.set_index(\"utc_start_datetime\", inplace=True)" ] }, { @@ -747,8 +808,8 @@ "outputs": [], "source": [ "# testing data\n", - "dfAiSets_Brancol2 = Brancol2data['ai_sets'].copy()\n", - "dfAiSets_Brancol2.set_index('utc_start_datetime', inplace = True)" + "dfAiSets_Brancol2 = Brancol2data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol2.set_index(\"utc_start_datetime\", inplace=True)" ] }, { @@ -758,12 +819,12 @@ "metadata": {}, "outputs": [], "source": [ - "win = '2h'\n", - "agg_dict = {'weighted_count':'sum','count':'sum'}\n", - "keep_cols = ['weighted_count','detection_confidence','count','is_haul', 'id']\n", + "win = \"2h\"\n", + "agg_dict = {\"weighted_count\": \"sum\", \"count\": \"sum\"}\n", + "keep_cols = [\"weighted_count\", \"detection_confidence\", \"count\", \"is_haul\", \"id\"]\n", "\n", - "df_train = add_rolling_aggregates(dfAiSets_Brancol1, '2h', agg_dict, keep_cols)\n", - "df_test = add_rolling_aggregates(dfAiSets_Brancol2, '2h', agg_dict, keep_cols)" + "df_train = add_rolling_aggregates(dfAiSets_Brancol1, \"2h\", agg_dict, keep_cols)\n", + "df_test = add_rolling_aggregates(dfAiSets_Brancol2, \"2h\", agg_dict, keep_cols)" ] }, { @@ -774,9 +835,9 @@ "outputs": [], "source": [ "# df_train['id'].fillna(0, inplace = True)\n", - "df_train['id'] = df_train['id'].astype(int)\n", + "df_train[\"id\"] = df_train[\"id\"].astype(int)\n", "# df_test['id'].fillna(0, inplace = True)\n", - "df_test['id'] = df_test['id'].astype(int)" + "df_test[\"id\"] = df_test[\"id\"].astype(int)" ] }, { @@ -786,8 +847,8 @@ "metadata": {}, "outputs": [], "source": [ - "df_train.dropna(inplace = True)\n", - "df_test.dropna(inplace = True)" + "df_train.dropna(inplace=True)\n", + "df_test.dropna(inplace=True)" ] }, { @@ -826,7 +887,7 @@ "metadata": {}, "outputs": [], "source": [ - "X,y = prep_data(df_train)\n", + "X, y = prep_data(df_train)\n", "X_test, y_test = prep_data(df_test)" ] }, @@ -869,16 +930,18 @@ ], "source": [ "## train, test, validation splits\n", - "splits = get_splits(y, \n", - " n_splits=1, \n", - " valid_size=0.3, \n", - " test_size=0.1, \n", - " shuffle=True, \n", - " balance=True, \n", - " stratify=True,\n", - " random_state=42, \n", - " show_plot=True, \n", - " verbose=True)\n", + "splits = get_splits(\n", + " y,\n", + " n_splits=1,\n", + " valid_size=0.3,\n", + " test_size=0.1,\n", + " shuffle=True,\n", + " balance=True,\n", + " stratify=True,\n", + " random_state=42,\n", + " show_plot=True,\n", + " verbose=True,\n", + ")\n", "splits" ] }, @@ -891,11 +954,11 @@ "source": [ "## dataset and loaders\n", "\n", - "tfms = [None, [Categorize()]]\n", + "tfms = [None, [Categorize()]]\n", "dsets = TSDatasets(X, y, tfms=tfms, splits=splits)\n", - " \n", + "\n", "bs = 10\n", - "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs*2])" + "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs * 2])" ] }, { @@ -907,18 +970,18 @@ "source": [ "# all the different models to test\n", "archs = [\n", - " (RNNPlus, {'n_layers':3, 'bidirectional': True} ),\n", - " (LSTMPlus,{'n_layers':3, 'bidirectional': True} ),\n", - " (LSTMPlus,{'n_layers':4, 'bidirectional': True} ),\n", - " (GRUPlus, {'n_layers':3, 'bidirectional': True} ), \n", - " (RNNPlus, {'n_layers':4, 'bidirectional': True} ),\n", - " (RNNPlus, {'n_layers':4, 'bidirectional': True}), \n", - " (LSTM, {'n_layers':3, 'bidirectional': False}), \n", - " (RNN, {'n_layers':3, 'bidirectional': True} ), \n", - " (LSTM, {'n_layers':3, 'bidirectional': True} ),\n", - " (LSTM, {'n_layers':4, 'bidirectional': True} ),\n", - " (GRU, {'n_layers':3, 'bidirectional': True} ), \n", - " ]" + " (RNNPlus, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTMPlus, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTMPlus, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (GRUPlus, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (LSTM, {\"n_layers\": 3, \"bidirectional\": False}),\n", + " (RNN, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTM, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTM, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (GRU, {\"n_layers\": 3, \"bidirectional\": True}),\n", + "]" ] }, { @@ -935,11 +998,11 @@ "# results = pd.DataFrame(columns=['arch', 'hyperparams', 'total params', 'train loss', 'valid loss', 'accuracy', 'time'])\n", "# models = {}\n", "# for i, (arch, k) in enumerate(archs):\n", - " \n", + "\n", "# model = create_model(arch, dls=dls, **k)\n", - " \n", + "\n", "# print(model.__class__.__name__)\n", - " \n", + "\n", "# learn = Learner(dls, model, metrics=accuracy)\n", "# start = time.time()\n", "# learn.fit_one_cycle(20, 1e-3)\n", @@ -947,9 +1010,9 @@ "# vals = learn.recorder.values[-1]\n", "# results.loc[i] = [arch.__name__, k, count_parameters(model), vals[0], vals[1], vals[2], int(elapsed)]\n", "# results.sort_values(by='accuracy', ascending=False, ignore_index=True, inplace=True)\n", - " \n", + "\n", "# models[f'{arch.__name__} {k}'] = learn\n", - " \n", + "\n", "# clear_output()\n", "# display(results)\n", "# return models" @@ -971,7 +1034,7 @@ ], "source": [ "if \"RNNPlus {'n_layers': 3, 'bidirectional': True}\" in models.keys():\n", - " print('yup!')" + " print(\"yup!\")" ] }, { @@ -1173,26 +1236,32 @@ "from IPython.display import clear_output\n", "\n", "\n", - "\n", - "results = pd.DataFrame(columns=['arch', 'hyperparams', 'total params', 'train loss', 'valid loss', 'accuracy', 'time'])\n", + "results = pd.DataFrame(\n", + " columns=[\"arch\", \"hyperparams\", \"total params\", \"train loss\", \"valid loss\", \"accuracy\", \"time\"]\n", + ")\n", "models = {}\n", "for i, (arch, k) in enumerate(archs):\n", - "\n", - "\n", - "\n", " model = create_model(arch, dls=dls, **k)\n", - " \n", + "\n", " print(model.__class__.__name__)\n", - " \n", - " learn = Learner(dls, model, metrics=accuracy)\n", + "\n", + " learn = Learner(dls, model, metrics=accuracy)\n", " start = time.time()\n", " learn.fit_one_cycle(20, 1e-3)\n", " elapsed = time.time() - start\n", " vals = learn.recorder.values[-1]\n", - " results.loc[i] = [arch.__name__, k, count_parameters(model), vals[0], vals[1], vals[2], int(elapsed)]\n", - " results.sort_values(by='accuracy', ascending=False, ignore_index=True, inplace=True)\n", - "\n", - " models[f'{arch.__name__} {k}'] = learn\n", + " results.loc[i] = [\n", + " arch.__name__,\n", + " k,\n", + " count_parameters(model),\n", + " vals[0],\n", + " vals[1],\n", + " vals[2],\n", + " int(elapsed),\n", + " ]\n", + " results.sort_values(by=\"accuracy\", ascending=False, ignore_index=True, inplace=True)\n", + "\n", + " models[f\"{arch.__name__} {k}\"] = learn\n", "\n", " clear_output()\n", " display(results)" @@ -2062,7 +2131,7 @@ " # add a new subplot iteratively using nrows and cols\n", " ax = plt.subplot(nrows, ncols, n + 1)\n", " ax.set_title(arch)\n", - " # plt.sca(ax) \n", + " # plt.sca(ax)\n", " interp = ClassificationInterpretation.from_learner(model)\n", " plot_confusion_matrix(interp.confusion_matrix(), ax, interp, arch)\n", " # print(type(fig))\n", @@ -2103,13 +2172,13 @@ "metadata": {}, "outputs": [], "source": [ - "# choosing the RNN Plus model and saving it \n", - "arch, k = (RNNPlus, {'n_layers':4, 'bidirectional': True})\n", + "# choosing the RNN Plus model and saving it\n", + "arch, k = (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True})\n", "model = create_model(arch, dls=dls, **k)\n", "\n", - "learner = load_model('models/rnn_plus_haul_classifier.pth', model, opt = None, with_opt = False)\n", + "learner = load_model(\"models/rnn_plus_haul_classifier.pth\", model, opt=None, with_opt=False)\n", "\n", - "learner = Learner(dls, model, metrics=accuracy)" + "learner = Learner(dls, model, metrics=accuracy)" ] }, { @@ -2135,22 +2204,30 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "def plot_event_bars(\n", + " df,\n", + " ax,\n", + " label,\n", + " datetime_col,\n", + " duration=None,\n", + " end_col=None,\n", + " duration_col=None,\n", + " y_val=0.7,\n", + " y_height=0.6,\n", + " color=\"#43aa99\",\n", + "):\n", " if duration:\n", " x_duration = np.full(len(df), pd.Timedelta(duration))\n", " elif end_col:\n", - " df['duration'] = df[end_col]- df[datetime_col]\n", - " x_duration = df['duration']\n", + " df[\"duration\"] = df[end_col] - df[datetime_col]\n", + " x_duration = df[\"duration\"]\n", " elif duration_col:\n", " x_duration = df[duration_col]\n", - " \n", + "\n", " x = list(zip(df[datetime_col], x_duration))\n", " y = (y_val, y_height)\n", "\n", - " ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + " ax.broken_barh(x, y, facecolors=color, edgecolor=\"face\", label=label, clip_on=False)\n" ] }, { @@ -2162,10 +2239,15 @@ "source": [ "def plot_event_vspan(df_events, ax, color_dict):\n", " for category, color in color_dict.items():\n", - " df_category = df_events.loc[df_events['category']==category]\n", - " \n", + " df_category = df_events.loc[df_events[\"category\"] == category]\n", + "\n", " for idx, row in df_category.iterrows():\n", - " ax.axvspan(*mdates.date2num([row['start_time'], row['end_time']]), color=color, edgecolor = 'face',alpha=0.5)" + " ax.axvspan(\n", + " *mdates.date2num([row[\"start_time\"], row[\"end_time\"]]),\n", + " color=color,\n", + " edgecolor=\"face\",\n", + " alpha=0.5,\n", + " )" ] }, { @@ -2175,25 +2257,31 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_set_vspan(df_sets, ax, color = '#a2c662'):\n", + "def plot_set_vspan(df_sets, ax, color=\"#a2c662\"):\n", " for idx, row in df_sets.iterrows():\n", - " ax.axvspan(*mdates.date2num([row['haul_start_datetime'], row['haul_end_datetime']]), color=color, edgecolor = '#40a018',alpha=0.5)\n", + " ax.axvspan(\n", + " *mdates.date2num([row[\"haul_start_datetime\"], row[\"haul_end_datetime\"]]),\n", + " color=color,\n", + " edgecolor=\"#40a018\",\n", + " alpha=0.5,\n", + " )\n", + "\n", "\n", "def get_video_times(vessel, trip_info):\n", - " trip_start_date = trip_info['trip_start_date']\n", - " trip_end_date = trip_info['trip_end_date']\n", + " trip_start_date = trip_info[\"trip_start_date\"]\n", + " trip_end_date = trip_info[\"trip_end_date\"]\n", "\n", " sql = f\"\"\"\n", - " SELECT \n", - " v.start_datetime, \n", + " SELECT\n", + " v.start_datetime,\n", " v.cam_name\n", - " \n", - " from {vessel}_v1_video_files v \n", + "\n", + " from {vessel}_v1_video_files v\n", " where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", " \"\"\"\n", " video_df = wr.athena.read_sql_query(sql, database=\"tnc_edge\")\n", " video_df.start_datetime = pd.to_datetime(video_df.start_datetime)\n", - " video_df['utc_start_datetime'] = video_df['start_datetime'].dt.tz_convert(None)\n", + " video_df[\"utc_start_datetime\"] = video_df[\"start_datetime\"].dt.tz_convert(None)\n", " # video_df['utc_end_datetime'] = video_df['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", " return video_df" ] @@ -2205,33 +2293,35 @@ "metadata": {}, "outputs": [], "source": [ - "def annotate_notes(ax, df, text_col, text_xy = (-60, 30)):\n", - " arrowprops=dict(arrowstyle=\"->\",connectionstyle=\"arc3,rad=.2\", color = 'black')\n", + "def annotate_notes(ax, df, text_col, text_xy=(-60, 30)):\n", + " arrowprops = dict(arrowstyle=\"->\", connectionstyle=\"arc3,rad=.2\", color=\"black\")\n", " y_var = 20\n", " annots = []\n", - " bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + " bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", " for idx, row in df.iterrows():\n", " # y_var = x_vars[n]\n", " text = row[text_col]\n", - " data_xy = (row['start_time'], 1.7)\n", - " \n", + " data_xy = (row[\"start_time\"], 1.7)\n", + "\n", " an = ax.annotate(\n", " text,\n", - " xy=data_xy, xycoords='data',\n", - " xytext=text_xy, textcoords='offset points',\n", + " xy=data_xy,\n", + " xycoords=\"data\",\n", + " xytext=text_xy,\n", + " textcoords=\"offset points\",\n", " arrowprops=arrowprops,\n", - " bbox = bbox_args,\n", - " color = 'white'\n", + " bbox=bbox_args,\n", + " color=\"white\",\n", " )\n", - " \n", + "\n", " annots.append(an)\n", - " \n", + "\n", " x, y = text_xy\n", - " \n", - " y = y+y_var\n", + "\n", + " y = y + y_var\n", " y_var = y_var * -1\n", - " \n", - " text_xy = (x,y)\n", + "\n", + " text_xy = (x, y)\n", "\n", " return annots" ] @@ -2251,7 +2341,7 @@ "metadata": {}, "outputs": [], "source": [ - "dfVector5 = get_vector_data('brancol',5,brancol2trip)" + "dfVector5 = get_vector_data(\"brancol\", 5, brancol2trip)" ] }, { @@ -2362,7 +2452,7 @@ } ], "source": [ - "dfVector4 = get_vector_data('brancol',4,brancol2trip)\n", + "dfVector4 = get_vector_data(\"brancol\", 4, brancol2trip)\n", "dfVector4.head()" ] }, @@ -2373,8 +2463,7 @@ "metadata": {}, "outputs": [], "source": [ - "\n", - "video_Brancol2 = get_video_times('brancol', brancol2trip)" + "video_Brancol2 = get_video_times(\"brancol\", brancol2trip)" ] }, { @@ -2394,14 +2483,18 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes = pd.read_csv('../data/reviewer_notes.csv')\n", - "df_notes['start_time'] = pd.to_datetime(df_notes['start_time'], format = 'mixed')\n", + "df_notes = pd.read_csv(\"../data/reviewer_notes.csv\")\n", + "df_notes[\"start_time\"] = pd.to_datetime(df_notes[\"start_time\"], format=\"mixed\")\n", "\n", - "video_events = df_notes.loc[(df_notes['category'] == 'No Video') |(df_notes['category'] == 'Camera Covered')]\n", + "video_events = df_notes.loc[\n", + " (df_notes[\"category\"] == \"No Video\") | (df_notes[\"category\"] == \"Camera Covered\")\n", + "]\n", "\n", "video_events.dtypes\n", "\n", - "video_eventsBrancol2 = video_events.loc[(video_events['vessel'] == 'Brancol') & (video_events['trip_number']==2)]" + "video_eventsBrancol2 = video_events.loc[\n", + " (video_events[\"vessel\"] == \"Brancol\") & (video_events[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -2411,7 +2504,7 @@ "metadata": {}, "outputs": [], "source": [ - "sns.set_style(\"whitegrid\", {'axes.grid' : False})" + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})" ] }, { @@ -2421,7 +2514,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "import matplotlib.ticker as ticker" ] }, @@ -2459,34 +2551,55 @@ ], "source": [ "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "plt.tight_layout()\n", - "fig, ax = plt.subplots(3,1,figsize=(16,4), sharex = True, gridspec_kw={'height_ratios': [3, 1, 1]}, )\n", + "fig, ax = plt.subplots(\n", + " 3,\n", + " 1,\n", + " figsize=(16, 4),\n", + " sharex=True,\n", + " gridspec_kw={\"height_ratios\": [3, 1, 1]},\n", + ")\n", "\n", "plot_set_vspan(brancol2_bv_sets, ax[0])\n", "plot_set_vspan(brancol2_bv_sets, ax[1])\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .7, )\n", - "plot_event_bars(video_Brancol2.loc[video_Brancol2['cam_name']=='cam1'], ax[0], 'video coverage', 'utc_start_datetime', duration = '5m', y_val = 1.4, color = '#117347')\n", - "\n", - "\n", - "sns.lineplot(x = 'datetime', y = 'score', data = dfVector4, ax = ax[1], label = 'vector 4')\n", - "sns.lineplot(x = 'datetime', y = 'score', data = dfVector5, ax = ax[2], label = 'vector 5')\n", - "\n", - "ax[0].set_yticks([1,1.7],('predicted_hauls','video coverage'))\n", - "ax[0].set_ylim([.5, 3])\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.7,\n", + ")\n", + "plot_event_bars(\n", + " video_Brancol2.loc[video_Brancol2[\"cam_name\"] == \"cam1\"],\n", + " ax[0],\n", + " \"video coverage\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=1.4,\n", + " color=\"#117347\",\n", + ")\n", + "\n", + "\n", + "sns.lineplot(x=\"datetime\", y=\"score\", data=dfVector4, ax=ax[1], label=\"vector 4\")\n", + "sns.lineplot(x=\"datetime\", y=\"score\", data=dfVector5, ax=ax[2], label=\"vector 5\")\n", + "\n", + "ax[0].set_yticks([1, 1.7], (\"predicted_hauls\", \"video coverage\"))\n", + "ax[0].set_ylim([0.5, 3])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax[1].tick_params(axis = 'y', labelsize = 7)\n", - "ax[2].tick_params(axis = 'y', labelsize = 7)\n", + "ax[1].tick_params(axis=\"y\", labelsize=7)\n", + "ax[2].tick_params(axis=\"y\", labelsize=7)\n", "ax[2].xaxis.set_major_locator(locator)\n", "ax[2].xaxis.set_major_formatter(formatter)\n", "\n", - "ax[0].spines['bottom'].set_visible(False)\n", - "ax[1].spines['top'].set_visible(False)\n", - "ax[2].spines['top'].set_visible(False)\n", - "annots = annotate_notes(ax[0],video_eventsBrancol2, 'category')\n", + "ax[0].spines[\"bottom\"].set_visible(False)\n", + "ax[1].spines[\"top\"].set_visible(False)\n", + "ax[2].spines[\"top\"].set_visible(False)\n", + "annots = annotate_notes(ax[0], video_eventsBrancol2, \"category\")\n", "\n", "# setup(axs[5], title=\"AutoLocator()\")\n", "locator1 = ticker.MultipleLocator(0.03, offset=0.02)\n", @@ -2513,7 +2626,7 @@ "label_coords = []\n", "for ann in annots:\n", " box = matplotlib.text.Text.get_window_extent(ann)\n", - " coords = ax[0].transAxes.inverted().transform(box)\n", + " coords = ax[0].transAxes.inverted().transform(box)\n", "\n", " label_coords.append(coords)" ] @@ -2525,7 +2638,7 @@ "metadata": {}, "outputs": [], "source": [ - "ai_countsBrancol2 = Brancol2data['ai_sets']" + "ai_countsBrancol2 = Brancol2data[\"ai_sets\"]" ] }, { @@ -3139,7 +3252,7 @@ "metadata": {}, "outputs": [], "source": [ - "bvCounts_Brancol2 = Brancol2data['all_counts']" + "bvCounts_Brancol2 = Brancol2data[\"all_counts\"]" ] }, { @@ -3414,7 +3527,7 @@ } ], "source": [ - "df_results.loc[df_results['count']>0].head()" + "df_results.loc[df_results[\"count\"] > 0].head()" ] }, { @@ -3458,79 +3571,124 @@ } ], "source": [ - "\n", - "\n", "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# plt.subplots_adjust(wspace=0, hspace=-.2)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "# plt.tight_layout()\n", - "fig, ax = plt.subplots(2,1,figsize=(11,2), sharex = True\n", - " # , gridspec_kw={'height_ratios': [2, 1]}, \n", - " )\n", + "fig, ax = plt.subplots(\n", + " 2,\n", + " 1,\n", + " figsize=(11, 2),\n", + " sharex=True,\n", + " # , gridspec_kw={'height_ratios': [2, 1]},\n", + ")\n", "# plt.tight_layout()\n", "# trip2 = Brancol2data['trip_info']\n", "# plot_set_vspan(brancol2_bv_sets, ax[0])\n", "# plot_set_vspan(brancol2_bv_sets, ax[1])\n", - "ax[0].hlines([.175, .5, .825],.01,.99, transform=ax[0].transAxes, colors = 'grey', lw = .3, zorder = 0)\n", - "\n", - "\n", - "\n", - "\n", - " # matplotlib.pyplot.hlines(y, xmin, xmax, colors=None, linestyles='solid', label='', *, data=None, **kwargs)\n", - "\n", - "ax2 = ax[1].twinx() \n", - "ln1 = sns.lineplot(x = 'utc_start_datetime', y = 'count', data = df_results, ax = ax[1], label = 'AI Counts', color ='#184EAD', clip_on=False, lw = .4)\n", - "ln2 = sns.lineplot(x = 'start_datetime', y = 'bv_count', data = bvCounts_Brancol2, ax = ax2, label = 'Reviewer Counts', color ='#a2c662', clip_on=False, lw = .4)\n", - "\n", - "ax[0].set_yticks([.175,.5,.825],('Predicted Hauls','Elog Hauls','Reviewer Hauls'))\n", + "ax[0].hlines(\n", + " [0.175, 0.5, 0.825], 0.01, 0.99, transform=ax[0].transAxes, colors=\"grey\", lw=0.3, zorder=0\n", + ")\n", + "\n", + "\n", + "# matplotlib.pyplot.hlines(y, xmin, xmax, colors=None, linestyles='solid', label='', *, data=None, **kwargs)\n", + "\n", + "ax2 = ax[1].twinx()\n", + "ln1 = sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"count\",\n", + " data=df_results,\n", + " ax=ax[1],\n", + " label=\"AI Counts\",\n", + " color=\"#184EAD\",\n", + " clip_on=False,\n", + " lw=0.4,\n", + ")\n", + "ln2 = sns.lineplot(\n", + " x=\"start_datetime\",\n", + " y=\"bv_count\",\n", + " data=bvCounts_Brancol2,\n", + " ax=ax2,\n", + " label=\"Reviewer Counts\",\n", + " color=\"#a2c662\",\n", + " clip_on=False,\n", + " lw=0.4,\n", + ")\n", + "\n", + "ax[0].set_yticks([0.175, 0.5, 0.825], (\"Predicted Hauls\", \"Elog Hauls\", \"Reviewer Hauls\"))\n", "ax[0].set_ylim([0, 1])\n", "# ax[1].set_ylim([0, 50])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax[1].tick_params(axis = 'both', labelsize = 9)\n", + "ax[1].tick_params(axis=\"both\", labelsize=9)\n", "ax[1].xaxis.set_major_locator(locator)\n", "ax[1].xaxis.set_major_formatter(formatter)\n", - "ax[1].set_xlabel('Datetime (UTC)')\n", - "ax[1].set_ylabel('AI Fish Count')\n", - "ax2.set_ylabel('Reviewer Fish Count')\n", + "ax[1].set_xlabel(\"Datetime (UTC)\")\n", + "ax[1].set_ylabel(\"AI Fish Count\")\n", + "ax2.set_ylabel(\"Reviewer Fish Count\")\n", "\n", "# ax[0].spines['bottom'].set_visible(False)\n", "# ax[1].spines['top'].set_visible(False)\n", "\n", - "plot_event_bars(brancol2_bv_sets, ax[0], 'Reviewer Hauls', 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = .675, y_height = .3, color = '#a2c662')\n", - "plot_event_bars(brancol2_elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', y_val = .35, color = '#117347', y_height = .3)\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .025, y_height = .3)\n", + "plot_event_bars(\n", + " brancol2_bv_sets,\n", + " ax[0],\n", + " \"Reviewer Hauls\",\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=0.675,\n", + " y_height=0.3,\n", + " color=\"#a2c662\",\n", + ")\n", + "plot_event_bars(\n", + " brancol2_elog,\n", + " ax[0],\n", + " \"elog_hauls\",\n", + " \"systemstarthauldatetime\",\n", + " end_col=\"systemendhauldatetime\",\n", + " y_val=0.35,\n", + " color=\"#117347\",\n", + " y_height=0.3,\n", + ")\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.025,\n", + " y_height=0.3,\n", + ")\n", "\n", "\n", "from matplotlib.lines import Line2D\n", "from matplotlib.patches import Patch\n", "\n", - "legend_elements = [Patch(facecolor='#a2c662', edgecolor='#a2c662',\n", - " label='BV Hauls'),\n", - " Patch(facecolor='#117347', edgecolor='#117347',\n", - " label='Elog Hauls'),\n", - " Patch(facecolor='#43aa99', edgecolor='#43aa99',\n", - " label='Predicted Hauls'),\n", - " Line2D([0], [0], color='#184EAD', lw=2, label='AI Counts')]\n", + "legend_elements = [\n", + " Patch(facecolor=\"#a2c662\", edgecolor=\"#a2c662\", label=\"BV Hauls\"),\n", + " Patch(facecolor=\"#117347\", edgecolor=\"#117347\", label=\"Elog Hauls\"),\n", + " Patch(facecolor=\"#43aa99\", edgecolor=\"#43aa99\", label=\"Predicted Hauls\"),\n", + " Line2D([0], [0], color=\"#184EAD\", lw=2, label=\"AI Counts\"),\n", + "]\n", "\n", "# ax[0].legend(handles=legend_elements, loc='upper left', fontsize = 9)\n", "\n", "\n", - "x0,x1 = ax[0].get_xlim()\n", + "x0, x1 = ax[0].get_xlim()\n", "\n", "h1, l1 = ax[1].get_legend_handles_labels()\n", "h2, l2 = ax2.get_legend_handles_labels()\n", - "ax[1].legend(h1+h2, l1+l2, loc=2)\n", + "ax[1].legend(h1 + h2, l1 + l2, loc=2)\n", "\n", - "ax[0].set_xlim(x0, x1) \n", + "ax[0].set_xlim(x0, x1)\n", "ax2.get_legend().remove()\n", "# plt.legend(fontsize=20)\n", "\n", "plt.subplots_adjust(wspace=0, hspace=0)\n", "\n", - "plt.savefig('haul_detection.png')\n", + "plt.savefig(\"haul_detection.png\")\n", "\n", "plt.show()" ] @@ -3564,7 +3722,7 @@ "metadata": {}, "outputs": [], "source": [ - "x0,x1 = ax[1].get_xlim()" + "x0, x1 = ax[1].get_xlim()" ] }, { @@ -3585,7 +3743,7 @@ } ], "source": [ - "x1+300" + "x1 + 300" ] }, { @@ -3647,8 +3805,8 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes = pd.read_csv('../data/reviewer_notes.csv')\n", - "df_notes['start_time'] = pd.to_datetime(df_notes['start_time'], format = 'mixed')" + "df_notes = pd.read_csv(\"../data/reviewer_notes.csv\")\n", + "df_notes[\"start_time\"] = pd.to_datetime(df_notes[\"start_time\"], format=\"mixed\")" ] }, { @@ -3658,7 +3816,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes['end_time'] = pd.to_datetime(df_notes['end_time'], format = 'mixed')" + "df_notes[\"end_time\"] = pd.to_datetime(df_notes[\"end_time\"], format=\"mixed\")" ] }, { @@ -3668,11 +3826,15 @@ "metadata": {}, "outputs": [], "source": [ - "video_events = df_notes.loc[(df_notes['category'] == 'No Video') |(df_notes['category'] == 'Camera Covered')]\n", + "video_events = df_notes.loc[\n", + " (df_notes[\"category\"] == \"No Video\") | (df_notes[\"category\"] == \"Camera Covered\")\n", + "]\n", "\n", "video_events.dtypes\n", "\n", - "video_eventsBrancol2 = video_events.loc[(video_events['vessel'] == 'Brancol') & (video_events['trip_number']==2)]" + "video_eventsBrancol2 = video_events.loc[\n", + " (video_events[\"vessel\"] == \"Brancol\") & (video_events[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -3706,7 +3868,7 @@ } ], "source": [ - "df_notes['category'].value_counts()" + "df_notes[\"category\"].value_counts()" ] }, { @@ -3716,7 +3878,7 @@ "metadata": {}, "outputs": [], "source": [ - "eventsBrancol2 = df_notes.loc[(df_notes['vessel'] == 'Brancol') & (df_notes['trip_number']==2)]" + "eventsBrancol2 = df_notes.loc[(df_notes[\"vessel\"] == \"Brancol\") & (df_notes[\"trip_number\"] == 2)]" ] }, { @@ -3753,7 +3915,7 @@ } ], "source": [ - "eventsBrancol2['category'].value_counts()" + "eventsBrancol2[\"category\"].value_counts()" ] }, { @@ -3784,7 +3946,13 @@ "metadata": {}, "outputs": [], "source": [ - "category_dict = {'Haul Stop':colors[13],'Other Gear':colors[12],'Camera Covered':colors[10],'No Video': colors[6], 'Abnormal Catch':colors[8]}" + "category_dict = {\n", + " \"Haul Stop\": colors[13],\n", + " \"Other Gear\": colors[12],\n", + " \"Camera Covered\": colors[10],\n", + " \"No Video\": colors[6],\n", + " \"Abnormal Catch\": colors[8],\n", + "}" ] }, { @@ -3835,15 +4003,23 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height ):\n", + "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height):\n", " n = len(category_color_dict)\n", " y_vals = create_array(n, y_val_start, y_height)\n", " for idx, (category, color) in enumerate(category_color_dict.items()):\n", - " df_category = df_events.loc[df_events['category']==category].copy()\n", + " df_category = df_events.loc[df_events[\"category\"] == category].copy()\n", " y_val = y_vals[idx]\n", - " \n", - " \n", - " plot_event_bars(df_category, ax,category, 'start_time',end_col = 'end_time', y_val=y_val, y_height = y_height, color= color )" + "\n", + " plot_event_bars(\n", + " df_category,\n", + " ax,\n", + " category,\n", + " \"start_time\",\n", + " end_col=\"end_time\",\n", + " y_val=y_val,\n", + " y_height=y_height,\n", + " color=color,\n", + " )" ] }, { @@ -3853,24 +4029,32 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "def plot_event_bars(\n", + " df,\n", + " ax,\n", + " label,\n", + " datetime_col,\n", + " duration=None,\n", + " end_col=None,\n", + " duration_col=None,\n", + " y_val=0.7,\n", + " y_height=0.6,\n", + " color=\"#43aa99\",\n", + "):\n", " if duration:\n", " x_duration = np.full(len(df), pd.Timedelta(duration))\n", " elif end_col:\n", - " df['duration'] = df[end_col]- df[datetime_col]\n", - " x_duration = df['duration']\n", + " df[\"duration\"] = df[end_col] - df[datetime_col]\n", + " x_duration = df[\"duration\"]\n", " elif duration_col:\n", " x_duration = df[duration_col]\n", " else:\n", " x_duration = np.full(len(df), 2)\n", - " \n", + "\n", " x = list(zip(df[datetime_col], x_duration))\n", " y = (y_val, y_height)\n", "\n", - " ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + " ax.broken_barh(x, y, facecolors=color, edgecolor=\"face\", label=label, clip_on=False)\n" ] }, { @@ -3937,7 +4121,7 @@ "metadata": {}, "outputs": [], "source": [ - "y_vals = [1.2, " + "y_vals = [1.2," ] }, { @@ -3968,7 +4152,7 @@ "metadata": {}, "outputs": [], "source": [ - "y_labels = ['','predicted_hauls'] + list(category_dict.keys())" + "y_labels = [\"\", \"predicted_hauls\"] + list(category_dict.keys())" ] }, { @@ -4016,7 +4200,7 @@ } ], "source": [ - "len(create_array(n, 1.2, .2))" + "len(create_array(n, 1.2, 0.2))" ] }, { @@ -4040,7 +4224,7 @@ } ], "source": [ - "eventsBrancol2['category'].value_counts()" + "eventsBrancol2[\"category\"].value_counts()" ] }, { @@ -4061,7 +4245,7 @@ } ], "source": [ - "category_dict.pop('Other Gear')" + "category_dict.pop(\"Other Gear\")" ] }, { @@ -4131,65 +4315,73 @@ } ], "source": [ - "\n", - "\n", - "\n", "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# plt.subplots_adjust(wspace=0, hspace=-.2)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "# plt.tight_layout()\n", - "fig, ax = plt.subplots(1,1,figsize=(11,2), sharex = True\n", - " # , gridspec_kw={'height_ratios': [2, 1]}, \n", - " )\n", + "fig, ax = plt.subplots(\n", + " 1,\n", + " 1,\n", + " figsize=(11, 2),\n", + " sharex=True,\n", + " # , gridspec_kw={'height_ratios': [2, 1]},\n", + ")\n", "\n", "# trip2 = Brancol2data['trip_info']\n", "plot_set_vspan(brancol2_bv_sets, ax)\n", "# plot_set_vspan(brancol2_bv_sets, ax[1])\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax, 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .6, y_height = .5)\n", - "plot_event_category_bars(eventsBrancol2, ax, category_dict, 1.2, .1)\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax,\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.6,\n", + " y_height=0.5,\n", + ")\n", + "plot_event_category_bars(eventsBrancol2, ax, category_dict, 1.2, 0.1)\n", "\n", "\n", "# plot_event_bars(brancol2_elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', y_val = 1.2, color = '#117347', y_height = .4)\n", "# sns.lineplot(x = 'utc_start_datetime', y = 'count', data = df_results, ax = ax[1], label = 'AI Counts', color ='#184EAD', clip_on=False, lw = .4)\n", "\n", "n = len(category_dict)\n", - "y_vals = [0,.9] + list(create_array(n, 1.2, .2))\n", - "y_labels = ['','predicted_hauls'] + list(category_dict.keys())\n", + "y_vals = [0, 0.9] + list(create_array(n, 1.2, 0.2))\n", + "y_labels = [\"\", \"predicted_hauls\"] + list(category_dict.keys())\n", "\n", - "ax.set_yticks(y_vals,y_labels)\n", - "ax.set_ylim([.5, max(y_vals)+.2])\n", + "ax.set_yticks(y_vals, y_labels)\n", + "ax.set_ylim([0.5, max(y_vals) + 0.2])\n", "# ax[1].set_ylim([0, 50])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax.tick_params(axis = 'both', labelsize = 9)\n", + "ax.tick_params(axis=\"both\", labelsize=9)\n", "ax.xaxis.set_major_locator(locator)\n", "ax.xaxis.set_major_formatter(formatter)\n", "\n", - "ax.spines['bottom'].set_visible(False)\n", + "ax.spines[\"bottom\"].set_visible(False)\n", "# ax[1].spines['top'].set_visible(False)\n", "ax.legend()\n", "\n", "from matplotlib.lines import Line2D\n", "from matplotlib.patches import Patch\n", "\n", - "legend_elements = [Patch(facecolor='#a2c662', edgecolor='#40a018', alpha = .4,\n", - " label='BV Hauls'),\n", - " Patch(facecolor='#117347', edgecolor='#117347',\n", - " label='Elog Hauls'),\n", - " Patch(facecolor='#43aa99', edgecolor='#43aa99',\n", - " label='Predicted Hauls'),\n", - " Line2D([0], [0], color='#184EAD', lw=2, label='AI Counts')]\n", + "legend_elements = [\n", + " Patch(facecolor=\"#a2c662\", edgecolor=\"#40a018\", alpha=0.4, label=\"BV Hauls\"),\n", + " Patch(facecolor=\"#117347\", edgecolor=\"#117347\", label=\"Elog Hauls\"),\n", + " Patch(facecolor=\"#43aa99\", edgecolor=\"#43aa99\", label=\"Predicted Hauls\"),\n", + " Line2D([0], [0], color=\"#184EAD\", lw=2, label=\"AI Counts\"),\n", + "]\n", "\n", "# ax[0].legend(handles=legend_elements, loc='upper left', fontsize = 9)\n", "# ax[1].get_legend().remove()\n", "\n", "# plt.legend(fontsize=20)\n", "\n", - "plt.subplots_adjust(wspace=0, hspace=-.2)\n", + "plt.subplots_adjust(wspace=0, hspace=-0.2)\n", "\n", - "plt.savefig('haul_detection.png')\n", + "plt.savefig(\"haul_detection.png\")\n", "plt.show()" ] }, @@ -4200,7 +4392,7 @@ "metadata": {}, "outputs": [], "source": [ - "df = pd.read_csv('your.csv', " + "df = pd.read_csv('your.csv'," ] }, { @@ -4220,7 +4412,9 @@ "metadata": {}, "outputs": [], "source": [ - "dfSystem_Brancol = pd.read_csv('../data/sessions_brancol.csv',on_bad_lines=lambda x: bad_lines.append(str(x)), engine='python')" + "dfSystem_Brancol = pd.read_csv(\n", + " \"../data/sessions_brancol.csv\", on_bad_lines=lambda x: bad_lines.append(str(x)), engine=\"python\"\n", + ")" ] }, { diff --git a/notebooks/tnc-edge-catch-plots.ipynb b/notebooks/tnc-edge-catch-plots.ipynb index 657af92..8082d00 100644 --- a/notebooks/tnc-edge-catch-plots.ipynb +++ b/notebooks/tnc-edge-catch-plots.ipynb @@ -34,14 +34,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')" ] @@ -58,19 +58,21 @@ "import json\n", "import re\n", "from datetime import datetime, timezone\n", + "\n", + "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -80,12 +82,15 @@ "metadata": {}, "outputs": [], "source": [ - "elog_df = awswrangler.athena.read_sql_query(f\"SELECT id,jsonblob,datetime from stpatrick_v1_deckhandevents where jsonblob like '%\\\"eventType\\\": \\\"tripDetailsEvent\\\"%' and datetime < '2024-02-01';\", database='tnc_edge')\n", - "elog_df['jsonblob'] = elog_df['jsonblob'].apply(lambda x: re.sub('\"gearPhoto\": \"[^\"]*\"', '', x))\n", + "elog_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT id,jsonblob,datetime from stpatrick_v1_deckhandevents where jsonblob like '%\\\"eventType\\\": \\\"tripDetailsEvent\\\"%' and datetime < '2024-02-01';\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "elog_df[\"jsonblob\"] = elog_df[\"jsonblob\"].apply(lambda x: re.sub('\"gearPhoto\": \"[^\"]*\"', \"\", x))\n", "# elog_df['euuid'] = elog_df['json'].apply(lambda x: x['eventId'])\n", "# elog_df['tuuid'] = elog_df['json'].apply(lambda x: x['tripId'])\n", "\n", - "display_full(elog_df.sort_values('datetime'))" + "display_full(elog_df.sort_values(\"datetime\"))" ] }, { @@ -95,7 +100,10 @@ "metadata": {}, "outputs": [], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT stpatrick_v1_video_files.*, stpatrick_v1_ondeckdata.video_uri, stpatrick_v1_ondeckdata.cocoannotations_uri, stpatrick_v1_ondeckdata.datetime, stpatrick_v1_ondeckdata.overallcount, stpatrick_v1_ondeckdata.overallruntimems, stpatrick_v1_ondeckdata.tracked_confidence, stpatrick_v1_ondeckdata.status, stpatrick_v1_ondeckdata.overallcatches, stpatrick_v1_ondeckdata.overalldiscards, stpatrick_v1_ondeckdata.detection_confidence FROM stpatrick_v1_video_files left join stpatrick_v1_ondeckdata on decrypted_path = video_uri where cam_name = 'cam1' and start_datetime > '2024-03-01' order by start_datetime asc limit 10000\", database='tnc_edge')\n", + "s = awswrangler.athena.read_sql_query(\n", + " f\"SELECT stpatrick_v1_video_files.*, stpatrick_v1_ondeckdata.video_uri, stpatrick_v1_ondeckdata.cocoannotations_uri, stpatrick_v1_ondeckdata.datetime, stpatrick_v1_ondeckdata.overallcount, stpatrick_v1_ondeckdata.overallruntimems, stpatrick_v1_ondeckdata.tracked_confidence, stpatrick_v1_ondeckdata.status, stpatrick_v1_ondeckdata.overallcatches, stpatrick_v1_ondeckdata.overalldiscards, stpatrick_v1_ondeckdata.detection_confidence FROM stpatrick_v1_video_files left join stpatrick_v1_ondeckdata on decrypted_path = video_uri where cam_name = 'cam1' and start_datetime > '2024-03-01' order by start_datetime asc limit 10000\",\n", + " database=\"tnc_edge\",\n", + ")\n", "display(s)" ] }, @@ -106,10 +114,10 @@ "metadata": {}, "outputs": [], "source": [ - "pandas.set_option('display.max_rows', 500)\n", - "pandas.set_option('display.min_rows', 500)\n", + "pandas.set_option(\"display.max_rows\", 500)\n", + "pandas.set_option(\"display.min_rows\", 500)\n", "\n", - "display(s[['start_datetime','status']])" + "display(s[[\"start_datetime\", \"status\"]])" ] }, { @@ -578,7 +586,10 @@ } ], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\", database='tnc_edge')\n", + "s = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "s" ] }, @@ -601,11 +612,31 @@ ], "source": [ "d = s.copy()\n", - "d = d.sort_values('systemstartsetdatetime')\n", - "for i in [\"systemstartsetdatetime\",\"systemendsetdatetime\",\"systemstarthauldatetime\",\"systemendhauldatetime\"]:\n", + "d = d.sort_values(\"systemstartsetdatetime\")\n", + "for i in [\n", + " \"systemstartsetdatetime\",\n", + " \"systemendsetdatetime\",\n", + " \"systemstarthauldatetime\",\n", + " \"systemendhauldatetime\",\n", + "]:\n", " d[i] = pandas.to_datetime(s[i])\n", " d[i] = d[i].transform(lambda x: x.astimezone(timezone.utc))\n", - "d[[\"systemstartsetdatetime\",\"systemstartsetlatitude\",\"systemstartsetlongitude\",\"systemendsetdatetime\",\"systemendsetlatitude\",\"systemendsetlongitude\",\"systemstarthauldatetime\",\"systemstarthaullatitude\",\"systemstarthaullongitude\",\"systemendhauldatetime\",\"systemendhaullatitude\",\"systemendhaullongitude\"]].to_csv()" + "d[\n", + " [\n", + " \"systemstartsetdatetime\",\n", + " \"systemstartsetlatitude\",\n", + " \"systemstartsetlongitude\",\n", + " \"systemendsetdatetime\",\n", + " \"systemendsetlatitude\",\n", + " \"systemendsetlongitude\",\n", + " \"systemstarthauldatetime\",\n", + " \"systemstarthaullatitude\",\n", + " \"systemstarthaullongitude\",\n", + " \"systemendhauldatetime\",\n", + " \"systemendhaullatitude\",\n", + " \"systemendhaullongitude\",\n", + " ]\n", + "].to_csv()" ] }, { @@ -696,7 +727,9 @@ } ], "source": [ - "trip_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM brancol_v1_bv_trips\", database=\"tnc_edge\")\n", + "trip_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM brancol_v1_bv_trips\", database=\"tnc_edge\"\n", + ")\n", "trip_df" ] }, @@ -788,7 +821,9 @@ } ], "source": [ - "trip_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM stpatrick_v1_bv_trips\", database=\"tnc_edge\")\n", + "trip_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM stpatrick_v1_bv_trips\", database=\"tnc_edge\"\n", + ")\n", "trip_df" ] }, @@ -1163,7 +1198,10 @@ ], "source": [ "# sets_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM brancol_v1_bv_sets\", database=\"tnc_edge\")\n", - "sets_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\", database=\"tnc_edge\")\n", + "sets_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "sets_df" ] }, @@ -1541,15 +1579,18 @@ ], "source": [ "# boat='brancol'\n", - "boat = 'stpatrick'\n", + "boat = \"stpatrick\"\n", "\n", - "trip_id = trip_df['trip_id'].values[0]\n", - "trip_start_date = trip_df['trip_start_date'].values[0]\n", - "trip_end_date = trip_df['trip_end_date'].values[0]\n", + "trip_id = trip_df[\"trip_id\"].values[0]\n", + "trip_start_date = trip_df[\"trip_start_date\"].values[0]\n", + "trip_end_date = trip_df[\"trip_end_date\"].values[0]\n", "\n", - "bv_df = awswrangler.athena.read_sql_query(f\"SELECT bv_f.* FROM {boat}_v1_bv_fish bv_f \\\n", + "bv_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT bv_f.* FROM {boat}_v1_bv_fish bv_f \\\n", "join {boat}_v1_bv_sets bv_s on bv_f.set_id = bv_s.set_id \\\n", - "where trip_id = '{trip_id}'\", database=\"tnc_edge\")\n", + "where trip_id = '{trip_id}'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "bv_df" ] @@ -1915,14 +1956,14 @@ } ], "source": [ - "\n", - "if boat != 'brancol':\n", - " raise Error('wrong boat')\n", + "if boat != \"brancol\":\n", + " raise Error(\"wrong boat\")\n", "aif_df = awswrangler.athena.read_sql_query(\n", " f\"SELECT aifd.*, v.start_datetime FROM {boat}_v1_aifishdata aifd \\\n", " join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path \\\n", " where start_datetime >= '{trip_start_date}' and start_datetime <= '{trip_end_date}'\",\n", - " database=\"tnc_edge\")\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "aif_df" ] @@ -2312,14 +2353,14 @@ } ], "source": [ - "\n", - "if boat != 'stpatrick':\n", - " raise Error('wrong boat')\n", + "if boat != \"stpatrick\":\n", + " raise Error(\"wrong boat\")\n", "ond_df = awswrangler.athena.read_sql_query(\n", " f\"SELECT ond.*, v.start_datetime FROM {boat}_v1_ondeckdata ond \\\n", " join {boat}_v1_video_files v on ond.video_uri = v.decrypted_path \\\n", " where start_datetime >= '{trip_start_date}' and start_datetime <= '{trip_end_date}'\",\n", - " database=\"tnc_edge\")\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "ond_df" ] @@ -2354,6 +2395,7 @@ "source": [ "from dateutil.parser import parse as parse_dt\n", "from datetime import datetime, timedelta, timezone, date, time\n", + "\n", "# import datetime\n", "import pandas as pd\n", "\n", @@ -2364,28 +2406,38 @@ "\n", "\n", "# maxmin = pd.DataFrame({\n", - "# \"max\": ping_series.resample('1d').max(), \n", + "# \"max\": ping_series.resample('1d').max(),\n", "# \"min\": ping_series.resample('1d').min()\n", "# })\n", "# print(maxmin.sort_index().to_string())\n", "\n", "bv_df.catch_datetime = pd.to_datetime(bv_df.catch_datetime)\n", "\n", - "bv_df = bv_df.append(pd.DataFrame([\n", - " [(pd.Timestamp(trip_start_date) + timedelta(0)).replace(tzinfo=timezone.utc), ''],\n", - " [(pd.Timestamp(trip_start_date) + timedelta(hours=23,minutes=59)).replace(tzinfo=timezone.utc), ''],\n", - "], columns=['catch_datetime', 'fish_id']))\n", + "bv_df = bv_df.append(\n", + " pd.DataFrame(\n", + " [\n", + " [(pd.Timestamp(trip_start_date) + timedelta(0)).replace(tzinfo=timezone.utc), \"\"],\n", + " [\n", + " (pd.Timestamp(trip_start_date) + timedelta(hours=23, minutes=59)).replace(\n", + " tzinfo=timezone.utc\n", + " ),\n", + " \"\",\n", + " ],\n", + " ],\n", + " columns=[\"catch_datetime\", \"fish_id\"],\n", + " )\n", + ")\n", "\n", "# bv_df\n", "\n", - "cnt = bv_df.groupby('catch_datetime').count()[['fish_id']]\n", + "cnt = bv_df.groupby(\"catch_datetime\").count()[[\"fish_id\"]]\n", "\n", "# cnt\n", "bv_cnt_ts = cnt.resample(timedelta(minutes=30)).sum(min_count=1)\n", - "bv_cnt_ts['bv_count'] = bv_cnt_ts.pop('fish_id')\n", + "bv_cnt_ts[\"bv_count\"] = bv_cnt_ts.pop(\"fish_id\")\n", "\n", "# bv_cnt_ts.plot()\n", - "bv_cnt_ts.plot(figsize=(100,5))\n" + "bv_cnt_ts.plot(figsize=(100, 5))\n" ] }, { @@ -2523,16 +2575,15 @@ } ], "source": [ - "\n", - "if boat != 'brancol':\n", - " raise Error('wrong boat')\n", + "if boat != \"brancol\":\n", + " raise Error(\"wrong boat\")\n", "aif_df\n", "aif_df.start_datetime = pd.to_datetime(aif_df.start_datetime)\n", "\n", - "cnt = aif_df.groupby('start_datetime').sum()[['count']]\n", + "cnt = aif_df.groupby(\"start_datetime\").sum()[[\"count\"]]\n", "\n", "# cnt.count()\n", - "cnt['count'] = pd.to_numeric(cnt['count'])\n", + "cnt[\"count\"] = pd.to_numeric(cnt[\"count\"])\n", "# cnt.dtypes\n", "# cnt = cnt[cnt != '']\n", "# # cnt[cnt.index[0]]\n", @@ -2541,11 +2592,11 @@ "aif_cnt_ts = cnt.resample(timedelta(minutes=30)).sum(min_count=1)\n", "# aif_cnt_ts\n", "# aif_cnt_ts.loc['2024-01-16']\n", - "aif_cnt_ts['aifish_count'] = aif_cnt_ts.pop('count')\n", + "aif_cnt_ts[\"aifish_count\"] = aif_cnt_ts.pop(\"count\")\n", "\n", "display(aif_cnt_ts)\n", "\n", - "aif_cnt_ts.plot(figsize=(100,5))\n" + "aif_cnt_ts.plot(figsize=(100, 5))\n" ] }, { @@ -2683,16 +2734,15 @@ } ], "source": [ - "\n", - "if boat != 'stpatrick':\n", - " raise Error('wrong boat')\n", + "if boat != \"stpatrick\":\n", + " raise Error(\"wrong boat\")\n", "ond_df\n", "ond_df.start_datetime = pd.to_datetime(ond_df.start_datetime)\n", "\n", - "cnt = ond_df.groupby('start_datetime').sum()[['overallcount']]\n", + "cnt = ond_df.groupby(\"start_datetime\").sum()[[\"overallcount\"]]\n", "\n", "# cnt.count()\n", - "cnt['count'] = pd.to_numeric(cnt['overallcount'])\n", + "cnt[\"count\"] = pd.to_numeric(cnt[\"overallcount\"])\n", "# cnt.dtypes\n", "# cnt = cnt[cnt != '']\n", "# # cnt[cnt.index[0]]\n", @@ -2701,11 +2751,11 @@ "ond_cnt_ts = cnt.resample(timedelta(minutes=30)).sum(min_count=1)\n", "# ond_cnt_ts\n", "# ond_cnt_ts.loc['2024-01-16']\n", - "ond_cnt_ts['ondeck_count'] = ond_cnt_ts.pop('count')\n", + "ond_cnt_ts[\"ondeck_count\"] = ond_cnt_ts.pop(\"count\")\n", "\n", "# display(aif_cnt_ts)\n", "\n", - "ond_cnt_ts.plot(figsize=(100,5))\n" + "ond_cnt_ts.plot(figsize=(100, 5))\n" ] }, { @@ -2751,13 +2801,12 @@ } ], "source": [ - "\n", - "if boat == 'brancol':\n", + "if boat == \"brancol\":\n", " both_ts = aif_cnt_ts.merge(bv_cnt_ts, left_index=True, right_index=True)\n", - "if boat == 'stpatrick':\n", + "if boat == \"stpatrick\":\n", " both_ts = ond_cnt_ts.merge(bv_cnt_ts, left_index=True, right_index=True)\n", "\n", - "both_ts.plot(figsize=(100,10))\n" + "both_ts.plot(figsize=(100, 10))\n" ] }, { @@ -3258,11 +3307,13 @@ } ], "source": [ + "if boat != \"brancol\":\n", + " raise Error(\"wrong boat\")\n", "\n", - "if boat != 'brancol':\n", - " raise Error('wrong boat')\n", - "\n", - "elog_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\", database=\"tnc_edge\")\n", + "elog_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "# '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", "elog_df" @@ -3285,13 +3336,15 @@ } ], "source": [ + "if boat != \"stpatrick\":\n", + " raise Error(\"wrong boat\")\n", "\n", - "if boat != 'stpatrick':\n", - " raise Error('wrong boat')\n", + "elog_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", - "elog_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\", database=\"tnc_edge\")\n", - "\n", - "display(elog_df[elog_df.columns.difference(['jsonblob'])].to_string())\n", + "display(elog_df[elog_df.columns.difference([\"jsonblob\"])].to_string())\n", "\n", "# elog_df" ] @@ -3401,22 +3454,22 @@ "source": [ "elog_df_ts = None\n", "\n", - "for (k, row) in elog_df.iterrows():\n", - " start_haul = parse_dt(row['systemstarthauldatetime'])\n", - " end_haul = parse_dt(row['systemendhauldatetime'])\n", - "# print(end_haul - start_haul)\n", + "for k, row in elog_df.iterrows():\n", + " start_haul = parse_dt(row[\"systemstarthauldatetime\"])\n", + " end_haul = parse_dt(row[\"systemendhauldatetime\"])\n", + " # print(end_haul - start_haul)\n", " i = pd.DatetimeIndex([start_haul, end_haul])\n", " df = pd.DataFrame(index=i)\n", - "# df['mycol'] = [0, 0]\n", + " # df['mycol'] = [0, 0]\n", " df = df.resample(timedelta(minutes=5)).sum()\n", - " df['elog_count'] = (float(row['catchcount']) + float(row['bycatchcount'])) / len(df.index)\n", - "# print(df)\n", - "# break\n", + " df[\"elog_count\"] = (float(row[\"catchcount\"]) + float(row[\"bycatchcount\"])) / len(df.index)\n", + " # print(df)\n", + " # break\n", " if elog_df_ts is None:\n", " elog_df_ts = df\n", " else:\n", " elog_df_ts = elog_df_ts.append(df)\n", - " \n", + "\n", "elog_df_ts\n", "\n", "\n", @@ -3457,7 +3510,7 @@ "source": [ "triple_df = elog_df_ts.merge(both_ts, left_index=True, right_index=True)\n", "\n", - "triple_df.plot(figsize=(200,30))" + "triple_df.plot(figsize=(200, 30))" ] }, { @@ -4019,10 +4072,12 @@ } ], "source": [ - "aif_df['tmp'] = pd.to_datetime(aif_df['start_datetime'])\n", + "aif_df[\"tmp\"] = pd.to_datetime(aif_df[\"start_datetime\"])\n", "\n", - "aif_df.loc[aif_df['tmp'] >= parse_dt('2024-01-13 22:25:00Z')].loc[aif_df['tmp'] <= parse_dt('2024-01-13 23:55:00Z')].sort_values('tmp')\n", - "# \n", + "aif_df.loc[aif_df[\"tmp\"] >= parse_dt(\"2024-01-13 22:25:00Z\")].loc[\n", + " aif_df[\"tmp\"] <= parse_dt(\"2024-01-13 23:55:00Z\")\n", + "].sort_values(\"tmp\")\n", + "#\n", "\n", "# aif_df" ] diff --git a/notebooks/tnc-edge-data-integration.ipynb b/notebooks/tnc-edge-data-integration.ipynb index 26f8ef6..435e0a4 100644 --- a/notebooks/tnc-edge-data-integration.ipynb +++ b/notebooks/tnc-edge-data-integration.ipynb @@ -41,44 +41,46 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')\n", "\n", - "import pandas\n", - "import numpy\n", + "import io\n", "import math\n", + "from datetime import UTC, datetime, timedelta\n", + "\n", "import awswrangler\n", - "from datetime import datetime, timezone, timedelta\n", + "import numpy\n", + "import pandas\n", "from dateutil import parser\n", - "import pytz\n", - "import io\n", + "\n", "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 5000)\n", - " pandas.set_option('display.min_rows', 1000)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 5000)\n", + " pandas.set_option(\"display.min_rows\", 1000)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n", + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n", + "\n", "\n", "try:\n", " import pyperclip\n", "except ModuleNotFoundError:\n", - " print('no copypaste functionality today... please `pip install pyperclip`')" + " print(\"no copypaste functionality today... please `pip install pyperclip`\")" ] }, { @@ -104,10 +106,10 @@ " buf = io.StringIO(pyperclip.paste())\n", " for l in buf.readlines():\n", " l = l.strip()\n", - "# print(l)\n", + " # print(l)\n", " try:\n", " pasted_array.append(parser.isoparse(l))\n", - " except ValueError as e:\n", + " except ValueError:\n", " pass\n", "pasted_array" ] @@ -1212,12 +1214,16 @@ } ], "source": [ - "branc_gps = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "branc_gps['datetime'] = pandas.to_datetime(branc_gps['datetime'], utc=True)\n", - "branc_gps['gps_datetime'] = pandas.to_datetime(branc_gps['gps_datetime'], utc=True)\n", - "stpat_gps = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "stpat_gps['datetime'] = pandas.to_datetime(stpat_gps['datetime'], utc=True)\n", - "stpat_gps['gps_datetime'] = pandas.to_datetime(stpat_gps['gps_datetime'], utc=True)\n", + "branc_gps = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "branc_gps[\"datetime\"] = pandas.to_datetime(branc_gps[\"datetime\"], utc=True)\n", + "branc_gps[\"gps_datetime\"] = pandas.to_datetime(branc_gps[\"gps_datetime\"], utc=True)\n", + "stpat_gps = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_gps[\"datetime\"] = pandas.to_datetime(stpat_gps[\"datetime\"], utc=True)\n", + "stpat_gps[\"gps_datetime\"] = pandas.to_datetime(stpat_gps[\"gps_datetime\"], utc=True)\n", "\n", "branc_gps" ] @@ -1244,14 +1250,14 @@ "source": [ "gps_rows = len(branc_gps) + len(stpat_gps)\n", "\n", - "print(branc_gps[['datetime', \t'gps_datetime', \t'lat', \t'lon']].to_csv()[:300])\n", + "print(branc_gps[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]].to_csv()[:300])\n", "\n", - "gps_bytes = \\\n", - " len(branc_gps[['datetime', \t'gps_datetime', \t'lat', \t'lon']].to_csv()) + \\\n", - " len(stpat_gps[['datetime', \t'gps_datetime', \t'lat', \t'lon']].to_csv())\n", + "gps_bytes = len(branc_gps[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]].to_csv()) + len(\n", + " stpat_gps[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]].to_csv()\n", + ")\n", "\n", "\n", - "print(\"gps rows\", gps_rows, 'gps MiB', gps_bytes/1024/1024)" + "print(\"gps rows\", gps_rows, \"gps MiB\", gps_bytes / 1024 / 1024)" ] }, { @@ -3016,19 +3022,45 @@ } ], "source": [ - "branc_vids = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_video_files where last_modified <> '' and start_datetime = ''\", database='tnc_edge')\n", + "branc_vids = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_video_files where last_modified <> '' and start_datetime = ''\",\n", + " database=\"tnc_edge\",\n", + ")\n", "print(\"could not process \", len(branc_vids))\n", "\n", - "branc_vids = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_video_files where start_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['last_modified','decrypted_datetime','start_datetime','reencoded_datetime']:\n", + "branc_vids = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_video_files where start_datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "for col in [\"last_modified\", \"decrypted_datetime\", \"start_datetime\", \"reencoded_datetime\"]:\n", " branc_vids[col] = pandas.to_datetime(branc_vids[col], utc=True)\n", - "branc_vids = branc_vids.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", - " \n", - "stpat_vids = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_video_files where start_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['last_modified','decrypted_datetime','start_datetime','reencoded_datetime']:\n", + "branc_vids = branc_vids.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", + "\n", + "stpat_vids = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_video_files where start_datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "for col in [\"last_modified\", \"decrypted_datetime\", \"start_datetime\", \"reencoded_datetime\"]:\n", " stpat_vids[col] = pandas.to_datetime(stpat_vids[col], utc=True)\n", - "stpat_vids = stpat_vids.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", - " \n", + "stpat_vids = stpat_vids.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", + "\n", "\n", "stpat_vids" ] @@ -3065,15 +3097,26 @@ "source": [ "vids_rows = len(branc_vids) + len(stpat_vids)\n", "\n", - "collist = ['original_path', 'last_modified', 'decrypted_path', 'decrypted_datetime', 'stdout', 'stderr', 'start_datetime', 'cam_name', 'reencoded_path', 'reencoded_datetime', 'reencoded_stdout', 'reencoded_stderr']\n", + "collist = [\n", + " \"original_path\",\n", + " \"last_modified\",\n", + " \"decrypted_path\",\n", + " \"decrypted_datetime\",\n", + " \"stdout\",\n", + " \"stderr\",\n", + " \"start_datetime\",\n", + " \"cam_name\",\n", + " \"reencoded_path\",\n", + " \"reencoded_datetime\",\n", + " \"reencoded_stdout\",\n", + " \"reencoded_stderr\",\n", + "]\n", "print(branc_vids[collist].loc[0].to_csv())\n", "\n", - "vids_bytes = \\\n", - " len(branc_vids[collist].to_csv()) + \\\n", - " len(stpat_vids[collist].to_csv())\n", + "vids_bytes = len(branc_vids[collist].to_csv()) + len(stpat_vids[collist].to_csv())\n", "\n", "\n", - "print(\"vids rows\", vids_rows, 'vids MiB', vids_bytes/1024/1024)" + "print(\"vids rows\", vids_rows, \"vids MiB\", vids_bytes / 1024 / 1024)" ] }, { @@ -3091,14 +3134,22 @@ } ], "source": [ - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['original_path']) and pandas.isna(x['decrypted_path']), axis=1)]\n", + "a = branc_vids.loc[\n", + " branc_vids.apply(\n", + " lambda x: pandas.notna(x[\"original_path\"]) and pandas.isna(x[\"decrypted_path\"]), axis=1\n", + " )\n", + "]\n", "# print(\"failed to look at videos\",len(a))\n", "# display_full(a.sort_values('start_datetime'))\n", "\n", - "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x['original_path']) and pandas.isna(x['decrypted_path']), axis=1)]\n", + "b = stpat_vids.loc[\n", + " stpat_vids.apply(\n", + " lambda x: pandas.notna(x[\"original_path\"]) and pandas.isna(x[\"decrypted_path\"]), axis=1\n", + " )\n", + "]\n", "\n", "\n", - "print(\"failed to look at # videos:\",len(a) + len(b))" + "print(\"failed to look at # videos:\", len(a) + len(b))" ] }, { @@ -3119,29 +3170,46 @@ } ], "source": [ - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['reencoded_path']), axis=1)]\n", + "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x[\"reencoded_path\"]), axis=1)]\n", "\n", "# display_full(a.loc[409])\n", "\n", - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['reencoded_stdout']) and 'Execution ended after' not in x['reencoded_stdout'], axis=1)]\n", + "a = branc_vids.loc[\n", + " branc_vids.apply(\n", + " lambda x: pandas.notna(x[\"reencoded_stdout\"])\n", + " and \"Execution ended after\" not in x[\"reencoded_stdout\"],\n", + " axis=1,\n", + " )\n", + "]\n", "# display_full(a.loc[681])\n", "# display_full(a.loc[1387])\n", "# display_full(a)\n", "print(\"branc errored transcodes\", len(a))\n", "\n", "\n", - "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x['reencoded_stdout']) and 'Execution ended after' not in x['reencoded_stdout'], axis=1)]\n", + "b = stpat_vids.loc[\n", + " stpat_vids.apply(\n", + " lambda x: pandas.notna(x[\"reencoded_stdout\"])\n", + " and \"Execution ended after\" not in x[\"reencoded_stdout\"],\n", + " axis=1,\n", + " )\n", + "]\n", "# display_full(b.loc[18376])\n", "print(\"stpat errored transcodes\", len(b))\n", "\n", "# taken from operational logs when copying avi files onto usb sticks\n", - "average_size_per_avi = 314460123.9/1024/1024\n", + "average_size_per_avi = 314460123.9 / 1024 / 1024\n", "# a\n", - "print('average_size_per_avi ', average_size_per_avi)\n", + "print(\"average_size_per_avi \", average_size_per_avi)\n", "\n", - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['decrypted_path']), axis=1)]\n", - "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x['decrypted_path']), axis=1)]\n", - "print(\"video copied\", len(b) + len(a), \"MiB of video copied (estimate)\", (len(b) + len(a))*average_size_per_avi)" + "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x[\"decrypted_path\"]), axis=1)]\n", + "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x[\"decrypted_path\"]), axis=1)]\n", + "print(\n", + " \"video copied\",\n", + " len(b) + len(a),\n", + " \"MiB of video copied (estimate)\",\n", + " (len(b) + len(a)) * average_size_per_avi,\n", + ")" ] }, { @@ -4249,12 +4317,23 @@ "# print(\"could not process \", len(branc_aiout))\n", "\n", "\n", - "branc_aiout = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_aifishdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime']:\n", + "branc_aiout = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_aifishdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "for col in [\"datetime\"]:\n", " branc_aiout[col] = pandas.to_datetime(branc_aiout[col], utc=True)\n", - "for col in ['count','runtimems','detection_confidence']:\n", + "for col in [\"count\", \"runtimems\", \"detection_confidence\"]:\n", " branc_aiout[col] = pandas.to_numeric(branc_aiout[col])\n", - "branc_aiout = branc_aiout.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", + "branc_aiout = branc_aiout.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", "branc_aiout" ] }, @@ -5359,8 +5438,8 @@ } ], "source": [ - "a = branc_aiout.loc[branc_aiout['runtimems'] < 100]\n", - "a.sort_values('runtimems')\n" + "a = branc_aiout.loc[branc_aiout[\"runtimems\"] < 100]\n", + "a.sort_values(\"runtimems\")\n" ] }, { @@ -5399,39 +5478,72 @@ } ], "source": [ - "\n", - "branc_vidsaiout = branc_vids.loc[branc_vids['cam_name'] == 'cam1'].join(branc_aiout.set_index('video_uri'), on='decrypted_path', how='left')\n", - "branc_vidsaiout['videocopy_found_unable_to_copy'] = pandas.notna(branc_vidsaiout['original_path']) & pandas.isna(branc_vidsaiout['decrypted_path'])\n", + "branc_vidsaiout = branc_vids.loc[branc_vids[\"cam_name\"] == \"cam1\"].join(\n", + " branc_aiout.set_index(\"video_uri\"), on=\"decrypted_path\", how=\"left\"\n", + ")\n", + "branc_vidsaiout[\"videocopy_found_unable_to_copy\"] = pandas.notna(\n", + " branc_vidsaiout[\"original_path\"]\n", + ") & pandas.isna(branc_vidsaiout[\"decrypted_path\"])\n", "\n", "\n", - "branc_vidsaiout['videocopy_lateness'] = branc_vidsaiout['decrypted_datetime'] - branc_vidsaiout['start_datetime']\n", + "branc_vidsaiout[\"videocopy_lateness\"] = (\n", + " branc_vidsaiout[\"decrypted_datetime\"] - branc_vidsaiout[\"start_datetime\"]\n", + ")\n", "\n", - "branc_vidsaiout['videocopy_ok'] = pandas.notna(branc_vidsaiout['decrypted_path'])\n", + "branc_vidsaiout[\"videocopy_ok\"] = pandas.notna(branc_vidsaiout[\"decrypted_path\"])\n", "# branc_vidsaiout.loc[pandas.isna(branc_vidsaiout['processing_uri'])]\n", - "branc_vidsaiout['ai_vidok_but_did_not_try'] = branc_vidsaiout['videocopy_ok'] & pandas.isna(branc_vidsaiout['processing_uri'])\n", - "branc_vidsaiout['ai_crash_no_output'] = branc_vidsaiout['videocopy_ok'] & ~ branc_vidsaiout['ai_vidok_but_did_not_try'] & (branc_vidsaiout['status'] == 'queued')\n", - "branc_vidsaiout['ai_outputed_but_cant_parse'] = branc_vidsaiout['videocopy_ok'] & ~ branc_vidsaiout['ai_vidok_but_did_not_try'] & (branc_vidsaiout['status'] == 'parsing')\n", + "branc_vidsaiout[\"ai_vidok_but_did_not_try\"] = branc_vidsaiout[\"videocopy_ok\"] & pandas.isna(\n", + " branc_vidsaiout[\"processing_uri\"]\n", + ")\n", + "branc_vidsaiout[\"ai_crash_no_output\"] = (\n", + " branc_vidsaiout[\"videocopy_ok\"]\n", + " & ~branc_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (branc_vidsaiout[\"status\"] == \"queued\")\n", + ")\n", + "branc_vidsaiout[\"ai_outputed_but_cant_parse\"] = (\n", + " branc_vidsaiout[\"videocopy_ok\"]\n", + " & ~branc_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (branc_vidsaiout[\"status\"] == \"parsing\")\n", + ")\n", "\n", "\n", - "i = branc_vidsaiout.loc[(branc_vidsaiout['count'] > 0) | (branc_vidsaiout['detection_confidence'] > 0)].sort_values('runtimems')\n", - "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]]['runtimems']\n", - "print('smallest_runtimems_with_nonzero_detections',smallest_runtimems_with_nonzero_detections)\n", + "i = branc_vidsaiout.loc[\n", + " (branc_vidsaiout[\"count\"] > 0) | (branc_vidsaiout[\"detection_confidence\"] > 0)\n", + "].sort_values(\"runtimems\")\n", + "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]][\"runtimems\"]\n", + "print(\"smallest_runtimems_with_nonzero_detections\", smallest_runtimems_with_nonzero_detections)\n", "\n", - "branc_vidsaiout['ai_crash_output_too_fast'] = (branc_vidsaiout['runtimems'] < 17464).fillna(False)\n", - "branc_vidsaiout['ai_ok'] = ((branc_vidsaiout['runtimems'] > 17464) & (branc_vidsaiout['status'] == 'done')).fillna(False)\n", + "branc_vidsaiout[\"ai_crash_output_too_fast\"] = (branc_vidsaiout[\"runtimems\"] < 17464).fillna(False)\n", + "branc_vidsaiout[\"ai_ok\"] = (\n", + " (branc_vidsaiout[\"runtimems\"] > 17464) & (branc_vidsaiout[\"status\"] == \"done\")\n", + ").fillna(False)\n", "\n", "# bad_boolean_rows_check = branc_vidsaiout.loc[branc_vidsaiout[['ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']].applymap(int).sum(axis=1) > 1]\n", "# bad_boolean_rows_check\n", - "branc_vidsaiout.loc[branc_vidsaiout['videocopy_lateness'] > timedelta(minutes=12)].sort_values('videocopy_lateness')\n", + "branc_vidsaiout.loc[branc_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=12)].sort_values(\n", + " \"videocopy_lateness\"\n", + ")\n", "\n", - "print('max lateness', branc_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\n", + " \"max lateness\",\n", + " branc_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max(),\n", + ")\n", "\n", "min_lateness = 1\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+branc_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + branc_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=50,\n", + ")\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "branc_vidsaiout['videocopy_latenessbucket'] = pandas.cut(branc_vidsaiout.loc[branc_vidsaiout['videocopy_lateness'] > timedelta(minutes=min_lateness)]['videocopy_lateness'], bins=bins)\n", - "branc_vidsaiout.groupby('videocopy_latenessbucket')['videocopy_lateness'].count().plot.bar()" + "branc_vidsaiout[\"videocopy_latenessbucket\"] = pandas.cut(\n", + " branc_vidsaiout.loc[branc_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=min_lateness)][\n", + " \"videocopy_lateness\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "branc_vidsaiout.groupby(\"videocopy_latenessbucket\")[\"videocopy_lateness\"].count().plot.bar()" ] }, { @@ -5457,12 +5569,16 @@ } ], "source": [ - "branc_vid_late_threshold=11\n", - "print('old ok count', branc_vidsaiout['videocopy_ok'].value_counts())\n", - "branc_vidsaiout['videocopy_late'] = (branc_vidsaiout['videocopy_ok'] & (branc_vidsaiout['videocopy_lateness'] > timedelta(minutes=branc_vid_late_threshold)))\n", - "print('late count', branc_vidsaiout['videocopy_late'].value_counts())\n", - "branc_vidsaiout['videocopy_ok'] = branc_vidsaiout.apply(lambda x: x['videocopy_ok'] and not x['videocopy_late'], axis=1)\n", - "print('new ok count', branc_vidsaiout['videocopy_ok'].value_counts())" + "branc_vid_late_threshold = 11\n", + "print(\"old ok count\", branc_vidsaiout[\"videocopy_ok\"].value_counts())\n", + "branc_vidsaiout[\"videocopy_late\"] = branc_vidsaiout[\"videocopy_ok\"] & (\n", + " branc_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=branc_vid_late_threshold)\n", + ")\n", + "print(\"late count\", branc_vidsaiout[\"videocopy_late\"].value_counts())\n", + "branc_vidsaiout[\"videocopy_ok\"] = branc_vidsaiout.apply(\n", + " lambda x: x[\"videocopy_ok\"] and not x[\"videocopy_late\"], axis=1\n", + ")\n", + "print(\"new ok count\", branc_vidsaiout[\"videocopy_ok\"].value_counts())" ] }, { @@ -5485,31 +5601,78 @@ } ], "source": [ - "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-02']\n", - "thaloslogs_brancol_isup['up']\n", - "branc_vidsaiout2 = branc_vidsaiout.join(thaloslogs_brancol_isup['up'], on='start_datetime', how='outer')\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was down\", len(branc_vidsaiout2.loc[branc_vidsaiout2['up'].isna()]))\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was up\", len(branc_vidsaiout2.loc[branc_vidsaiout2['original_path'].isna()]))\n", - "branc_vidsaiout2['videocopy_no_video'] = branc_vidsaiout2['original_path'].isna()\n", - "for col in ['videocopy_found_unable_to_copy','videocopy_ok','videocopy_late','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']:\n", + "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[\n", + " thaloslogs_brancol_uptime_ts_df[\"up\"] == 1\n", + "]\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-02\"]\n", + "thaloslogs_brancol_isup[\"up\"]\n", + "branc_vidsaiout2 = branc_vidsaiout.join(\n", + " thaloslogs_brancol_isup[\"up\"], on=\"start_datetime\", how=\"outer\"\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was down\",\n", + " len(branc_vidsaiout2.loc[branc_vidsaiout2[\"up\"].isna()]),\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was up\",\n", + " len(branc_vidsaiout2.loc[branc_vidsaiout2[\"original_path\"].isna()]),\n", + ")\n", + "branc_vidsaiout2[\"videocopy_no_video\"] = branc_vidsaiout2[\"original_path\"].isna()\n", + "for col in [\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_ok\",\n", + " \"videocopy_late\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]:\n", " branc_vidsaiout2[col] = branc_vidsaiout2[col].fillna(False)\n", - " \n", + "\n", "# display_full(branc_vidsaiout2.loc[(branc_vidsaiout2['ai_crash_output_too_fast'] == False) & (branc_vidsaiout2['count'] == 0.0)])\n", "\n", - "tmp = pandas.DataFrame({'zerosize_datetime': pandas.to_datetime(pasted_array), 'is_zerosize': True})\n", + "tmp = pandas.DataFrame({\"zerosize_datetime\": pandas.to_datetime(pasted_array), \"is_zerosize\": True})\n", "tmp\n", - "tmp = tmp.set_index('zerosize_datetime')\n", + "tmp = tmp.set_index(\"zerosize_datetime\")\n", "\n", - "branc_vidsaiout2 = branc_vidsaiout2.join(tmp, on='start_datetime', how='left')\n", + "branc_vidsaiout2 = branc_vidsaiout2.join(tmp, on=\"start_datetime\", how=\"left\")\n", "\n", - "branc_vidsaiout2['is_zerosize'] = branc_vidsaiout2['is_zerosize'].fillna(False)\n", + "branc_vidsaiout2[\"is_zerosize\"] = branc_vidsaiout2[\"is_zerosize\"].fillna(False)\n", "branc_vidsaiout2\n", - "print('a', ((branc_vidsaiout2['ai_crash_output_too_fast'] == False) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == True)).sum() )\n", - "print('b', ((branc_vidsaiout2['ai_crash_output_too_fast'] == False) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == False)).sum() )\n", - "print('a', ((branc_vidsaiout2['ai_crash_output_too_fast'] == True) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == True)).sum() )\n", - "print('b', ((branc_vidsaiout2['ai_crash_output_too_fast'] == True) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == False)).sum() )\n", + "print(\n", + " \"a\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == False)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == True)\n", + " ).sum(),\n", + ")\n", + "print(\n", + " \"b\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == False)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == False)\n", + " ).sum(),\n", + ")\n", + "print(\n", + " \"a\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == True)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == True)\n", + " ).sum(),\n", + ")\n", + "print(\n", + " \"b\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == True)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == False)\n", + " ).sum(),\n", + ")\n", "# display_full(branc_vidsaiout2.loc[(branc_vidsaiout2['ai_crash_output_too_fast'] == True) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == False)])" ] }, @@ -5520,20 +5683,37 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "# branc_aiout = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_aifishdata where datetime > '2024-01-01' limit 10;\", database='tnc_edge')\n", "# print(\"could not process \", len(branc_aiout))\n", "\n", "\n", - "\n", - "\n", - "stpat_aiout = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_ondeckdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime']:\n", + "stpat_aiout = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_ondeckdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "for col in [\"datetime\"]:\n", " stpat_aiout[col] = pandas.to_datetime(stpat_aiout[col], utc=True)\n", - "for col in ['overallcount','overallruntimems','tracked_confidence','overallcatches','overalldiscards','detection_confidence']:\n", + "for col in [\n", + " \"overallcount\",\n", + " \"overallruntimems\",\n", + " \"tracked_confidence\",\n", + " \"overallcatches\",\n", + " \"overalldiscards\",\n", + " \"detection_confidence\",\n", + "]:\n", " stpat_aiout[col] = pandas.to_numeric(stpat_aiout[col])\n", - "stpat_aiout = stpat_aiout.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", - "stpat_aiout = stpat_aiout.loc[stpat_aiout['status'].notna()] # rows with status NA were actidentally run by the old ondeck model, remove them" + "stpat_aiout = stpat_aiout.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", + "stpat_aiout = stpat_aiout.loc[\n", + " stpat_aiout[\"status\"].notna()\n", + "] # rows with status NA were actidentally run by the old ondeck model, remove them" ] }, { @@ -5572,27 +5752,49 @@ } ], "source": [ - "\n", - "stpat_vidsaiout =stpat_vids.loc[stpat_vids['cam_name'] == 'cam1'].join(stpat_aiout.set_index('video_uri'), on='decrypted_path', how='left')\n", - "stpat_vidsaiout['videocopy_found_unable_to_copy'] = pandas.notna(stpat_vidsaiout['original_path']) & pandas.isna(stpat_vidsaiout['decrypted_path'])\n", + "stpat_vidsaiout = stpat_vids.loc[stpat_vids[\"cam_name\"] == \"cam1\"].join(\n", + " stpat_aiout.set_index(\"video_uri\"), on=\"decrypted_path\", how=\"left\"\n", + ")\n", + "stpat_vidsaiout[\"videocopy_found_unable_to_copy\"] = pandas.notna(\n", + " stpat_vidsaiout[\"original_path\"]\n", + ") & pandas.isna(stpat_vidsaiout[\"decrypted_path\"])\n", "\n", "\n", - "stpat_vidsaiout['videocopy_lateness'] = stpat_vidsaiout['decrypted_datetime'] - stpat_vidsaiout['start_datetime']\n", + "stpat_vidsaiout[\"videocopy_lateness\"] = (\n", + " stpat_vidsaiout[\"decrypted_datetime\"] - stpat_vidsaiout[\"start_datetime\"]\n", + ")\n", "\n", - "stpat_vidsaiout['videocopy_ok'] = pandas.notna(stpat_vidsaiout['decrypted_path'])\n", + "stpat_vidsaiout[\"videocopy_ok\"] = pandas.notna(stpat_vidsaiout[\"decrypted_path\"])\n", "# stpat_vidsaiout.loc[pandas.isna(stpat_vidsaiout['processing_uri'])]\n", - "stpat_vidsaiout['ai_vidok_but_did_not_try'] = stpat_vidsaiout['videocopy_ok'] & pandas.isna(stpat_vidsaiout['cocoannotations_uri'])\n", - "stpat_vidsaiout['ai_crash_no_output'] = stpat_vidsaiout['videocopy_ok'] & ~ stpat_vidsaiout['ai_vidok_but_did_not_try'] & (stpat_vidsaiout['status'] == 'queued')\n", - "stpat_vidsaiout['ai_outputed_but_cant_parse'] = stpat_vidsaiout['videocopy_ok'] & ~ stpat_vidsaiout['ai_vidok_but_did_not_try'] & (stpat_vidsaiout['status'] == 'parsing')\n", + "stpat_vidsaiout[\"ai_vidok_but_did_not_try\"] = stpat_vidsaiout[\"videocopy_ok\"] & pandas.isna(\n", + " stpat_vidsaiout[\"cocoannotations_uri\"]\n", + ")\n", + "stpat_vidsaiout[\"ai_crash_no_output\"] = (\n", + " stpat_vidsaiout[\"videocopy_ok\"]\n", + " & ~stpat_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (stpat_vidsaiout[\"status\"] == \"queued\")\n", + ")\n", + "stpat_vidsaiout[\"ai_outputed_but_cant_parse\"] = (\n", + " stpat_vidsaiout[\"videocopy_ok\"]\n", + " & ~stpat_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (stpat_vidsaiout[\"status\"] == \"parsing\")\n", + ")\n", "\n", "\n", - "i = stpat_vidsaiout.loc[(stpat_vidsaiout['overallcount'] > 0) | (stpat_vidsaiout['detection_confidence'] > 0)].sort_values('overallruntimems')\n", + "i = stpat_vidsaiout.loc[\n", + " (stpat_vidsaiout[\"overallcount\"] > 0) | (stpat_vidsaiout[\"detection_confidence\"] > 0)\n", + "].sort_values(\"overallruntimems\")\n", "# display_full(i)\n", - "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]]['overallruntimems']\n", - "print('smallest_runtimems_with_nonzero_detections',smallest_runtimems_with_nonzero_detections)\n", + "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]][\"overallruntimems\"]\n", + "print(\"smallest_runtimems_with_nonzero_detections\", smallest_runtimems_with_nonzero_detections)\n", "\n", - "stpat_vidsaiout['ai_crash_output_too_fast'] = (stpat_vidsaiout['overallruntimems'] < 1).fillna(False)\n", - "stpat_vidsaiout['ai_ok'] = ((stpat_vidsaiout['overallruntimems'] > 1) & ((stpat_vidsaiout['status'] == 'done') | (stpat_vidsaiout['status'] == 'doneskiphalf'))).fillna(False)\n", + "stpat_vidsaiout[\"ai_crash_output_too_fast\"] = (stpat_vidsaiout[\"overallruntimems\"] < 1).fillna(\n", + " False\n", + ")\n", + "stpat_vidsaiout[\"ai_ok\"] = (\n", + " (stpat_vidsaiout[\"overallruntimems\"] > 1)\n", + " & ((stpat_vidsaiout[\"status\"] == \"done\") | (stpat_vidsaiout[\"status\"] == \"doneskiphalf\"))\n", + ").fillna(False)\n", "\n", "\n", "# bad_boolean_rows_check = stpat_vidsaiout.loc[stpat_vidsaiout[['ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']].applymap(int).sum(axis=1) > 1]\n", @@ -5603,16 +5805,30 @@ "# display_full(stpat_vidsaiout.loc[stpat_vidsaiout['ai_crash_output_too_fast']])\n", "# display_full(stpat_vidsaiout.loc[stpat_vidsaiout['ai_ok']])\n", "\n", - "stpat_vidsaiout.loc[stpat_vidsaiout['videocopy_lateness'] > timedelta(minutes=17)].sort_values('videocopy_lateness')\n", + "stpat_vidsaiout.loc[stpat_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=17)].sort_values(\n", + " \"videocopy_lateness\"\n", + ")\n", "\n", - "print('max lateness', stpat_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\n", + " \"max lateness\",\n", + " stpat_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max(),\n", + ")\n", "\n", "min_lateness = 4\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=50,\n", + ")\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_vidsaiout['videocopy_latenessbucket'] = pandas.cut(stpat_vidsaiout.loc[stpat_vidsaiout['videocopy_lateness'] > timedelta(minutes=min_lateness)]['videocopy_lateness'], bins=bins)\n", - "stpat_vidsaiout.groupby('videocopy_latenessbucket')['videocopy_lateness'].count().plot.bar()" + "stpat_vidsaiout[\"videocopy_latenessbucket\"] = pandas.cut(\n", + " stpat_vidsaiout.loc[stpat_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=min_lateness)][\n", + " \"videocopy_lateness\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "stpat_vidsaiout.groupby(\"videocopy_latenessbucket\")[\"videocopy_lateness\"].count().plot.bar()" ] }, { @@ -5638,12 +5854,16 @@ } ], "source": [ - "stpat_vid_late_threshold=17\n", - "print('old ok count', stpat_vidsaiout['videocopy_ok'].value_counts())\n", - "stpat_vidsaiout['videocopy_late'] = (stpat_vidsaiout['videocopy_ok'] & (stpat_vidsaiout['videocopy_lateness'] > timedelta(minutes=stpat_vid_late_threshold)))\n", - "print('late count', stpat_vidsaiout['videocopy_late'].value_counts())\n", - "stpat_vidsaiout['videocopy_ok'] = stpat_vidsaiout.apply(lambda x: x['videocopy_ok'] and not x['videocopy_late'], axis=1)\n", - "print('new ok count', stpat_vidsaiout['videocopy_ok'].value_counts())" + "stpat_vid_late_threshold = 17\n", + "print(\"old ok count\", stpat_vidsaiout[\"videocopy_ok\"].value_counts())\n", + "stpat_vidsaiout[\"videocopy_late\"] = stpat_vidsaiout[\"videocopy_ok\"] & (\n", + " stpat_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=stpat_vid_late_threshold)\n", + ")\n", + "print(\"late count\", stpat_vidsaiout[\"videocopy_late\"].value_counts())\n", + "stpat_vidsaiout[\"videocopy_ok\"] = stpat_vidsaiout.apply(\n", + " lambda x: x[\"videocopy_ok\"] and not x[\"videocopy_late\"], axis=1\n", + ")\n", + "print(\"new ok count\", stpat_vidsaiout[\"videocopy_ok\"].value_counts())" ] }, { @@ -5662,15 +5882,36 @@ } ], "source": [ - "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", - "thaloslogs_stpatrick_isup['up']\n", - "stpat_vidsaiout2 = stpat_vidsaiout.join(thaloslogs_stpatrick_isup['up'], on='start_datetime', how='outer')\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was down\", len(stpat_vidsaiout2.loc[stpat_vidsaiout2['up'].isna()]))\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was up\", len(stpat_vidsaiout2.loc[stpat_vidsaiout2['original_path'].isna()]))\n", - "stpat_vidsaiout2['videocopy_no_video'] = stpat_vidsaiout2['original_path'].isna()\n", - "for col in ['videocopy_found_unable_to_copy','videocopy_ok','videocopy_late','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']:\n", + "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[\n", + " thaloslogs_stpatrick_uptime_ts_df[\"up\"] == 1\n", + "]\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", + "thaloslogs_stpatrick_isup[\"up\"]\n", + "stpat_vidsaiout2 = stpat_vidsaiout.join(\n", + " thaloslogs_stpatrick_isup[\"up\"], on=\"start_datetime\", how=\"outer\"\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was down\",\n", + " len(stpat_vidsaiout2.loc[stpat_vidsaiout2[\"up\"].isna()]),\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was up\",\n", + " len(stpat_vidsaiout2.loc[stpat_vidsaiout2[\"original_path\"].isna()]),\n", + ")\n", + "stpat_vidsaiout2[\"videocopy_no_video\"] = stpat_vidsaiout2[\"original_path\"].isna()\n", + "for col in [\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_ok\",\n", + " \"videocopy_late\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]:\n", " stpat_vidsaiout2[col] = stpat_vidsaiout2[col].fillna(False)\n", "# display_full(stpat_vidsaiout2.loc[stpat_vidsaiout2['videocopy_no_video']])\n", "# display_full(stpat_vidsaiout2)" @@ -5691,29 +5932,68 @@ } ], "source": [ - "a = branc_vidsaiout2[['original_path','decrypted_datetime','start_datetime','status','videocopy_no_video','videocopy_found_unable_to_copy','videocopy_late','videocopy_ok','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']]\n", - "b = stpat_vidsaiout2[['original_path','decrypted_datetime','start_datetime','status','videocopy_no_video','videocopy_found_unable_to_copy','videocopy_late','videocopy_ok','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']]\n", + "a = branc_vidsaiout2[\n", + " [\n", + " \"original_path\",\n", + " \"decrypted_datetime\",\n", + " \"start_datetime\",\n", + " \"status\",\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + " ]\n", + "]\n", + "b = stpat_vidsaiout2[\n", + " [\n", + " \"original_path\",\n", + " \"decrypted_datetime\",\n", + " \"start_datetime\",\n", + " \"status\",\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + " ]\n", + "]\n", "# display_full(a)\n", "\n", "c = a.append(b, ignore_index=True)\n", "\n", - "c.to_pickle('integration_state_evaluations_videocopy_ai.pickle')\n", + "c.to_pickle(\"integration_state_evaluations_videocopy_ai.pickle\")\n", "\n", "# show rows with at least one NA\n", "# display_full(c.loc[c.applymap(pandas.isna).any(axis=1)])\n", "# display_full(c.loc[(c['videocopy_no_video']) & (c['start_datetime'] < pandas.Timestamp('2024-01-13 00:00:00+00:00'))].sort_values('start_datetime') )\n", - "print(\"disk outage accounts for max\",\n", - " len(c.loc[(c['videocopy_no_video']) & (c['start_datetime'] < pandas.Timestamp('2024-01-13 00:00:00+00:00'))]),\n", - " \" out of \", \n", - " len(c.loc[(c['videocopy_no_video'])]))\n", + "print(\n", + " \"disk outage accounts for max\",\n", + " len(\n", + " c.loc[\n", + " (c[\"videocopy_no_video\"])\n", + " & (c[\"start_datetime\"] < pandas.Timestamp(\"2024-01-13 00:00:00+00:00\"))\n", + " ]\n", + " ),\n", + " \" out of \",\n", + " len(c.loc[(c[\"videocopy_no_video\"])]),\n", + ")\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] < pandas.Timestamp('2024-01-13 00:00:00+00:00'))]))\n", "\n", "# print(len(a.loc[(a['videocopy_no_video']) & (a['start_datetime'] > pandas.Timestamp('2024-01-13 00:00:00+00:00')) & (a['start_datetime'] < pandas.Timestamp('2024-02-01 00:00:00+00:00'))]))\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] > pandas.Timestamp('2024-01-13 00:00:00+00:00')) & (b['start_datetime'] < pandas.Timestamp('2024-02-01 00:00:00+00:00'))]))\n", - " \n", + "\n", "# print(len(a.loc[(a['videocopy_no_video']) & (a['start_datetime'] > pandas.Timestamp('2024-02-01 00:00:00+00:00')) & (a['start_datetime'] < pandas.Timestamp('2024-03-01 00:00:00+00:00'))]))\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] > pandas.Timestamp('2024-02-01 00:00:00+00:00')) & (b['start_datetime'] < pandas.Timestamp('2024-03-01 00:00:00+00:00'))]))\n", - " \n", + "\n", "# print(len(a.loc[(a['videocopy_no_video']) & (a['start_datetime'] > pandas.Timestamp('2024-03-01 00:00:00+00:00')) & (a['start_datetime'] < pandas.Timestamp('2024-04-07 00:00:00+00:00'))]))\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] > pandas.Timestamp('2024-03-01 00:00:00+00:00')) & (b['start_datetime'] < pandas.Timestamp('2024-04-07 00:00:00+00:00'))]))\n", "\n", @@ -6322,12 +6602,15 @@ } ], "source": [ - "stpat_gpsdata = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime', 'gps_datetime']:\n", + "stpat_gpsdata = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "for col in [\"datetime\", \"gps_datetime\"]:\n", " stpat_gpsdata[col] = pandas.to_datetime(stpat_gpsdata[col], utc=True)\n", - "for col in ['lat', 'lon']:\n", + "for col in [\"lat\", \"lon\"]:\n", " stpat_gpsdata[col] = pandas.to_numeric(stpat_gpsdata[col])\n", - "stpat_gpsdata = stpat_gpsdata[['datetime', 'gps_datetime','lat', 'lon']]\n", + "stpat_gpsdata = stpat_gpsdata[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]]\n", "stpat_gpsdata" ] }, @@ -7396,49 +7679,64 @@ } ], "source": [ - "stpat_gpsdata['lateness'] = stpat_gpsdata['datetime'] - stpat_gpsdata['gps_datetime']\n", + "stpat_gpsdata[\"lateness\"] = stpat_gpsdata[\"datetime\"] - stpat_gpsdata[\"gps_datetime\"]\n", "stpat_gpsdata\n", "\n", - "stpat_gpsdata.loc[stpat_gpsdata['lateness'] > timedelta(minutes=30)].sort_values('lateness')\n", + "stpat_gpsdata.loc[stpat_gpsdata[\"lateness\"] > timedelta(minutes=30)].sort_values(\"lateness\")\n", "\n", - "print('max lateness', stpat_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\"max lateness\", stpat_gpsdata[\"lateness\"].map(lambda x: x.total_seconds() / 60).max())\n", "\n", "min_lateness = 21.2\n", - "max_lateness = 1440 # 1 day\n", + "max_lateness = 1440 # 1 day\n", "# bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=100)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(max_lateness,10), num=100)\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(max_lateness), num=100)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_gpsdata['latenessbucket'] = pandas.cut(stpat_gpsdata.loc[stpat_gpsdata['lateness'] > timedelta(minutes=min_lateness)]['lateness'], bins=bins)\n", - "stpat_gpsdata.groupby('latenessbucket')['lateness'].count().plot.bar(figsize=(10,3))\n", + "stpat_gpsdata[\"latenessbucket\"] = pandas.cut(\n", + " stpat_gpsdata.loc[stpat_gpsdata[\"lateness\"] > timedelta(minutes=min_lateness)][\"lateness\"],\n", + " bins=bins,\n", + ")\n", + "stpat_gpsdata.groupby(\"latenessbucket\")[\"lateness\"].count().plot.bar(figsize=(10, 3))\n", "\n", "print(\"using lateness cutoff of 21minutes\")\n", "\n", - "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", - "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['status'] == ' down']\n", + "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", + "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_uptime_ts_df.loc[\n", + " thaloslogs_stpatrick_uptime_ts_df[\"status\"] == \" down\"\n", + "]\n", "\n", - "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_downevent.loc[thaloslogs_stpatrick_downevent.index < '2024-04-08']\n", - "downevent_dts = thaloslogs_stpatrick_downevent.loc[thaloslogs_stpatrick_downevent['datetime'].notna()]['datetime']\n", + "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_downevent.loc[\n", + " thaloslogs_stpatrick_downevent.index < \"2024-04-08\"\n", + "]\n", + "downevent_dts = thaloslogs_stpatrick_downevent.loc[\n", + " thaloslogs_stpatrick_downevent[\"datetime\"].notna()\n", + "][\"datetime\"]\n", "downevent_dts = pandas.to_datetime(downevent_dts, utc=True)\n", "\n", - "testtime = datetime.fromisoformat('2024-01-03T22:29:41+00:00').astimezone(timezone.utc)\n", + "testtime = datetime.fromisoformat(\"2024-01-03T22:29:41+00:00\").astimezone(UTC)\n", "\n", "# pandas.to_datetime(datetime.now().astimezone(timezone.utc))\n", "# pandas.to_datetime(downevent_dts, utc=True)\n", "# downevent_dts.map(lambda y: abs((testtime - y).total_seconds()) < 21*60).any()\n", "\n", "\n", - "#following works, but is very slow\n", + "# following works, but is very slow\n", "# stpat_gpsdata['near_down_event'] = stpat_gpsdata['gps_datetime'].map(lambda x: downevent_dts.map(lambda y: abs((x - y).total_seconds()) < 21*60).any())\n", "\n", "td_21m = timedelta(minutes=21, seconds=12)\n", - "#following works, is slightly less slow\n", - "stpat_gpsdata['just_before_downevent'] = stpat_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x )).any() )\n", - "stpat_gpsdata['just_after_downevent'] = stpat_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x ) & (downevent_dts <= x + td_21m)).any() )\n", + "# following works, is slightly less slow\n", + "stpat_gpsdata[\"just_before_downevent\"] = stpat_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x)).any()\n", + ")\n", + "stpat_gpsdata[\"just_after_downevent\"] = stpat_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x) & (downevent_dts <= x + td_21m)).any()\n", + ")\n", "\n", "\n", - "stpat_gpsdata['gps_late_integration_err'] = (stpat_gpsdata['lateness'] > td_21m ) & (stpat_gpsdata['just_after_downevent'] == False)\n", - "display(stpat_gpsdata.loc[stpat_gpsdata['gps_late_integration_err']])\n", + "stpat_gpsdata[\"gps_late_integration_err\"] = (stpat_gpsdata[\"lateness\"] > td_21m) & (\n", + " stpat_gpsdata[\"just_after_downevent\"] == False\n", + ")\n", + "display(stpat_gpsdata.loc[stpat_gpsdata[\"gps_late_integration_err\"]])\n", "# stpat_gpsdata.loc[(stpat_gpsdata['lateness'] > td_21m ) & (stpat_gpsdata['just_after_downevent'] == True)].sort_values('gps_datetime')\n", "\n", "# stpat_gpsdata['status'] = stpat_gpsdata.apply(lambda x: thaloslogs_stpatrick_uptime_ts_df.loc[x['gps_datetime'].replace(minute=math.floor(x['gps_datetime'].minute/5)*5,second=0)]['status'],axis=1)\n", @@ -7447,10 +7745,19 @@ "# if I were to declare that on status == 'down' there is no integration error, I would only save like 14 rows. Not worth my time.\n", "\n", "\n", - "print('len', len(stpat_gpsdata['gps_late_integration_err']))\n", - "print('num of int errors', stpat_gpsdata['gps_late_integration_err'].sum())\n", - "print('num of ok ', (stpat_gpsdata['gps_late_integration_err'] == False).sum())\n", - "print('num of errors on boundary', ((stpat_gpsdata['lateness'] > td_21m ) & ((stpat_gpsdata['just_before_downevent'] == True) | (stpat_gpsdata['just_after_downevent'] == True)) ).sum())\n" + "print(\"len\", len(stpat_gpsdata[\"gps_late_integration_err\"]))\n", + "print(\"num of int errors\", stpat_gpsdata[\"gps_late_integration_err\"].sum())\n", + "print(\"num of ok \", (stpat_gpsdata[\"gps_late_integration_err\"] == False).sum())\n", + "print(\n", + " \"num of errors on boundary\",\n", + " (\n", + " (stpat_gpsdata[\"lateness\"] > td_21m)\n", + " & (\n", + " (stpat_gpsdata[\"just_before_downevent\"] == True)\n", + " | (stpat_gpsdata[\"just_after_downevent\"] == True)\n", + " )\n", + " ).sum(),\n", + ")\n" ] }, { @@ -7470,17 +7777,21 @@ ], "source": [ "# display_full(stpat_gpsdata.loc[stpat_gpsdata['lateness'] > td_21m].sort_values('lateness'))\n", - "print(\"bef\" , len(stpat_gpsdata.loc[stpat_gpsdata['just_before_downevent']]))\n", + "print(\"bef\", len(stpat_gpsdata.loc[stpat_gpsdata[\"just_before_downevent\"]]))\n", "# display_full(stpat_gpsdata.loc[stpat_gpsdata['just_before_downevent']].sort_values('gps_datetime'))\n", - "print(\"aft\" ,len(stpat_gpsdata.loc[stpat_gpsdata['just_after_downevent']]))\n", + "print(\"aft\", len(stpat_gpsdata.loc[stpat_gpsdata[\"just_after_downevent\"]]))\n", "# display_full(stpat_gpsdata.loc[stpat_gpsdata['just_after_downevent']].sort_values('gps_datetime'))\n", - "thaloslogs_stpatrick_uptime_ts_df['datetime'] = pandas.to_datetime(thaloslogs_stpatrick_uptime_ts_df['datetime'], utc=True)\n", - "updownevents = thaloslogs_stpatrick_uptime_ts_df[thaloslogs_stpatrick_uptime_ts_df['datetime'].notna()].set_index('datetime')\n", + "thaloslogs_stpatrick_uptime_ts_df[\"datetime\"] = pandas.to_datetime(\n", + " thaloslogs_stpatrick_uptime_ts_df[\"datetime\"], utc=True\n", + ")\n", + "updownevents = thaloslogs_stpatrick_uptime_ts_df[\n", + " thaloslogs_stpatrick_uptime_ts_df[\"datetime\"].notna()\n", + "].set_index(\"datetime\")\n", "\n", - "stpat_gpsdata2 = stpat_gpsdata.set_index('gps_datetime')\n", - "stpat_gpsdata2 = stpat_gpsdata2.join(updownevents, how='outer')\n", + "stpat_gpsdata2 = stpat_gpsdata.set_index(\"gps_datetime\")\n", + "stpat_gpsdata2 = stpat_gpsdata2.join(updownevents, how=\"outer\")\n", "stpat_gpsdata2 = stpat_gpsdata2.sort_index()\n", - "stpat_gpsdata2['status'] = stpat_gpsdata2['status'].ffill()\n", + "stpat_gpsdata2[\"status\"] = stpat_gpsdata2[\"status\"].ffill()\n", "# display_full(stpat_gpsdata2.loc[(stpat_gpsdata2['status'] == ' down') | (stpat_gpsdata2['status'].shift(1) == ' down') | (stpat_gpsdata2['status'].shift(-1) == ' down')])" ] }, @@ -8086,12 +8397,15 @@ } ], "source": [ - "branc_gpsdata = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime', 'gps_datetime']:\n", + "branc_gpsdata = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "for col in [\"datetime\", \"gps_datetime\"]:\n", " branc_gpsdata[col] = pandas.to_datetime(branc_gpsdata[col], utc=True)\n", - "for col in ['lat', 'lon']:\n", + "for col in [\"lat\", \"lon\"]:\n", " branc_gpsdata[col] = pandas.to_numeric(branc_gpsdata[col])\n", - "branc_gpsdata = branc_gpsdata[['datetime', 'gps_datetime','lat', 'lon']]\n", + "branc_gpsdata = branc_gpsdata[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]]\n", "branc_gpsdata" ] }, @@ -8129,63 +8443,109 @@ } ], "source": [ - "branc_gpsdata['lateness'] = branc_gpsdata['datetime'] - branc_gpsdata['gps_datetime']\n", + "branc_gpsdata[\"lateness\"] = branc_gpsdata[\"datetime\"] - branc_gpsdata[\"gps_datetime\"]\n", "branc_gpsdata\n", "\n", - "branc_gpsdata.loc[branc_gpsdata['lateness'] > timedelta(minutes=30)].sort_values('lateness')\n", + "branc_gpsdata.loc[branc_gpsdata[\"lateness\"] > timedelta(minutes=30)].sort_values(\"lateness\")\n", "\n", - "print('max lateness', branc_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\"max lateness\", branc_gpsdata[\"lateness\"].map(lambda x: x.total_seconds() / 60).max())\n", "\n", "min_lateness = 21.2\n", - "max_lateness = 1440 # 1 day\n", + "max_lateness = 1440 # 1 day\n", "# bins = numpy.logspace(math.log(min_lateness,10), math.log(1+branc_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=100)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(max_lateness,10), num=100)\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(max_lateness), num=100)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "branc_gpsdata['latenessbucket'] = pandas.cut(branc_gpsdata.loc[branc_gpsdata['lateness'] > timedelta(minutes=min_lateness)]['lateness'], bins=bins)\n", - "branc_gpsdata.groupby('latenessbucket')['lateness'].count().plot.bar(figsize=(10,3))\n", + "branc_gpsdata[\"latenessbucket\"] = pandas.cut(\n", + " branc_gpsdata.loc[branc_gpsdata[\"lateness\"] > timedelta(minutes=min_lateness)][\"lateness\"],\n", + " bins=bins,\n", + ")\n", + "branc_gpsdata.groupby(\"latenessbucket\")[\"lateness\"].count().plot.bar(figsize=(10, 3))\n", "\n", "print(\"using lateness cutoff of 21minutes\")\n", "\n", - "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", - "thaloslogs_brancol_downevent = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['status'] == ' down']\n", + "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", + "thaloslogs_brancol_downevent = thaloslogs_brancol_uptime_ts_df.loc[\n", + " thaloslogs_brancol_uptime_ts_df[\"status\"] == \" down\"\n", + "]\n", "\n", - "thaloslogs_brancol_downevent = thaloslogs_brancol_downevent.loc[thaloslogs_brancol_downevent.index < '2024-04-08']\n", - "downevent_dts = thaloslogs_brancol_downevent.loc[thaloslogs_brancol_downevent['datetime'].notna()]['datetime']\n", + "thaloslogs_brancol_downevent = thaloslogs_brancol_downevent.loc[\n", + " thaloslogs_brancol_downevent.index < \"2024-04-08\"\n", + "]\n", + "downevent_dts = thaloslogs_brancol_downevent.loc[thaloslogs_brancol_downevent[\"datetime\"].notna()][\n", + " \"datetime\"\n", + "]\n", "downevent_dts = pandas.to_datetime(downevent_dts, utc=True)\n", "\n", - "testtime = datetime.fromisoformat('2024-01-03T22:29:41+00:00').astimezone(timezone.utc)\n", + "testtime = datetime.fromisoformat(\"2024-01-03T22:29:41+00:00\").astimezone(UTC)\n", "\n", "# pandas.to_datetime(datetime.now().astimezone(timezone.utc))\n", "# pandas.to_datetime(downevent_dts, utc=True)\n", "# downevent_dts.map(lambda y: abs((testtime - y).total_seconds()) < 21*60).any()\n", "\n", "\n", - "#following works, but is very slow\n", + "# following works, but is very slow\n", "# branc_gpsdata['near_down_event'] = branc_gpsdata['gps_datetime'].map(lambda x: downevent_dts.map(lambda y: abs((x - y).total_seconds()) < 21*60).any())\n", "\n", "td_21m = timedelta(minutes=21, seconds=12)\n", "\n", - "#following works, is slightly less slow\n", - "branc_gpsdata['just_before_downevent'] = branc_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x )).any() )\n", - "branc_gpsdata['just_after_downevent'] = branc_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x ) & (downevent_dts <= x + td_21m)).any() )\n", + "# following works, is slightly less slow\n", + "branc_gpsdata[\"just_before_downevent\"] = branc_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x)).any()\n", + ")\n", + "branc_gpsdata[\"just_after_downevent\"] = branc_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x) & (downevent_dts <= x + td_21m)).any()\n", + ")\n", "\n", "\n", - "branc_gpsdata['status'] = branc_gpsdata.apply(lambda x: thaloslogs_brancol_uptime_ts_df.loc[x['gps_datetime'].replace(minute=math.floor(x['gps_datetime'].minute/5)*5,second=0)]['status'],axis=1)\n", + "branc_gpsdata[\"status\"] = branc_gpsdata.apply(\n", + " lambda x: thaloslogs_brancol_uptime_ts_df.loc[\n", + " x[\"gps_datetime\"].replace(minute=math.floor(x[\"gps_datetime\"].minute / 5) * 5, second=0)\n", + " ][\"status\"],\n", + " axis=1,\n", + ")\n", "\n", - "print('branc num of lates where because down', len(branc_gpsdata.loc[(branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['status'] == ' down')]))\n", - "print('branc num of lates where because before_down', len(branc_gpsdata.loc[(branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['just_before_downevent'])]))\n", + "print(\n", + " \"branc num of lates where because down\",\n", + " len(\n", + " branc_gpsdata.loc[\n", + " (branc_gpsdata[\"lateness\"] > td_21m) & (branc_gpsdata[\"status\"] == \" down\")\n", + " ]\n", + " ),\n", + ")\n", + "print(\n", + " \"branc num of lates where because before_down\",\n", + " len(\n", + " branc_gpsdata.loc[\n", + " (branc_gpsdata[\"lateness\"] > td_21m) & (branc_gpsdata[\"just_before_downevent\"])\n", + " ]\n", + " ),\n", + ")\n", "# if I were to declare that on status == 'down' there is no integration error, I would only save like 14 rows. Not worth my time.\n", "\n", - "branc_gpsdata['gps_late_integration_err'] = (branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['just_after_downevent'] == False) & (branc_gpsdata['just_before_downevent'] == False) & (branc_gpsdata['status'] == ' up') \n", + "branc_gpsdata[\"gps_late_integration_err\"] = (\n", + " (branc_gpsdata[\"lateness\"] > td_21m)\n", + " & (branc_gpsdata[\"just_after_downevent\"] == False)\n", + " & (branc_gpsdata[\"just_before_downevent\"] == False)\n", + " & (branc_gpsdata[\"status\"] == \" up\")\n", + ")\n", "# display_full(branc_gpsdata.loc[branc_gpsdata['gps_late_integration_err']].sort_values('gps_datetime'))\n", "# branc_gpsdata.loc[(branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['just_after_downevent'] == True)].sort_values('gps_datetime')\n", "\n", "\n", - "print('len', len(branc_gpsdata['gps_late_integration_err']))\n", - "print('num of int errors', branc_gpsdata['gps_late_integration_err'].sum())\n", - "print('num of ok ', (branc_gpsdata['gps_late_integration_err'] == False).sum())\n", - "print('num of errors on boundary', ((branc_gpsdata['lateness'] > td_21m ) & ((branc_gpsdata['just_before_downevent'] == True) | (branc_gpsdata['just_after_downevent'] == True)) ).sum())\n" + "print(\"len\", len(branc_gpsdata[\"gps_late_integration_err\"]))\n", + "print(\"num of int errors\", branc_gpsdata[\"gps_late_integration_err\"].sum())\n", + "print(\"num of ok \", (branc_gpsdata[\"gps_late_integration_err\"] == False).sum())\n", + "print(\n", + " \"num of errors on boundary\",\n", + " (\n", + " (branc_gpsdata[\"lateness\"] > td_21m)\n", + " & (\n", + " (branc_gpsdata[\"just_before_downevent\"] == True)\n", + " | (branc_gpsdata[\"just_after_downevent\"] == True)\n", + " )\n", + " ).sum(),\n", + ")\n" ] }, { @@ -8195,21 +8555,22 @@ "metadata": {}, "outputs": [], "source": [ - "from pathlib import Path\n", - "from collections import defaultdict\n", "import json\n", + "from collections import defaultdict\n", + "from pathlib import Path\n", "\n", - "a='brancol_jan_aifishoutput'\n", - "b='brancol_feb_aifish_output'\n", - "c='brancol_mar_aifish_output'\n", + "a = \"brancol_jan_aifishoutput\"\n", + "b = \"brancol_feb_aifish_output\"\n", + "c = \"brancol_mar_aifish_output\"\n", "\n", - "aiff = [ x for i in [a, b, c] for x in Path(i).iterdir() ]\n", + "aiff = [x for i in [a, b, c] for x in Path(i).iterdir()]\n", "\n", - "aiff = pandas.DataFrame({'f': aiff})\n", + "aiff = pandas.DataFrame({\"f\": aiff})\n", "# len(aiff)\n", - "aiff['name'] = aiff['f'].apply(lambda x: x.name)\n", - "aiff = aiff.loc[aiff['name'].apply(lambda x: x.endswith('.json'))]\n", - "aiff['forjoin'] = aiff['name'].apply(lambda x: '/videos/output/' + x)\n", + "aiff[\"name\"] = aiff[\"f\"].apply(lambda x: x.name)\n", + "aiff = aiff.loc[aiff[\"name\"].apply(lambda x: x.endswith(\".json\"))]\n", + "aiff[\"forjoin\"] = aiff[\"name\"].apply(lambda x: \"/videos/output/\" + x)\n", + "\n", "\n", "def do_fcnt_allcnt_mdc(f: Path):\n", " with f.open() as d:\n", @@ -8220,34 +8581,41 @@ " # error handling here\n", " return (0, 0, 0)\n", "\n", - " fish_detections = list(filter(lambda d: d.get('class_name') == 'fish', detections))\n", + " fish_detections = list(filter(lambda d: d.get(\"class_name\") == \"fish\", detections))\n", "\n", " if len(fish_detections) == 0:\n", " # error handling here\n", " return (0, len(detections), 0)\n", "\n", - " detectionconfidences = list(filter(lambda x: x is not None, map(lambda d: d.get('object_confidence'), fish_detections)))\n", + " detectionconfidences = list(\n", + " filter(\n", + " lambda x: x is not None,\n", + " map(lambda d: d.get(\"object_confidence\"), fish_detections),\n", + " )\n", + " )\n", " # = max(map(lambda detection: detection.get('object_confidence'), detections))\n", " # trackedconfidences = []\n", "\n", " tracks = defaultdict(list)\n", " for d in fish_detections:\n", - " tracks[d.get('track')].append(d)\n", + " tracks[d.get(\"track\")].append(d)\n", "\n", " cnt = len(tracks.keys())\n", "\n", " if len(detectionconfidences) > 0:\n", - " meandetectionconfidence = float(sum(detectionconfidences)) / float(len(detectionconfidences))\n", + " meandetectionconfidence = float(sum(detectionconfidences)) / float(\n", + " len(detectionconfidences)\n", + " )\n", " else:\n", " meandetectionconfidence = 0\n", - " \n", + "\n", " return (cnt, len(detections), meandetectionconfidence)\n", - " \n", + "\n", " except json.JSONDecodeError:\n", - " print('json error in ' + f.name)\n", - " \n", + " print(\"json error in \" + f.name)\n", + "\n", "\n", - "aiff['fcnt_allcnt_mdc'] = aiff['f'].apply(do_fcnt_allcnt_mdc)\n", + "aiff[\"fcnt_allcnt_mdc\"] = aiff[\"f\"].apply(do_fcnt_allcnt_mdc)\n", "aiff\n", "# branc_vidsaiout[output_uri]" ] @@ -10256,31 +10624,65 @@ } ], "source": [ - "aiff['fcnt'] = aiff['fcnt_allcnt_mdc'].apply(lambda x: pandas.NA if x is None else x[0])\n", - "aiff['allcnt'] = aiff['fcnt_allcnt_mdc'].apply(lambda x: pandas.NA if x is None else x[1])\n", - "aiff['mdc'] = aiff['fcnt_allcnt_mdc'].apply(lambda x: pandas.NA if x is None else x[2])\n", + "aiff[\"fcnt\"] = aiff[\"fcnt_allcnt_mdc\"].apply(lambda x: pandas.NA if x is None else x[0])\n", + "aiff[\"allcnt\"] = aiff[\"fcnt_allcnt_mdc\"].apply(lambda x: pandas.NA if x is None else x[1])\n", + "aiff[\"mdc\"] = aiff[\"fcnt_allcnt_mdc\"].apply(lambda x: pandas.NA if x is None else x[2])\n", "\n", "display(branc_vidsaiout.columns)\n", "\n", - "toofast_df = branc_vidsaiout.join(aiff.set_index('forjoin'), on='output_uri')\n", - "\n", - "print('matchingcounts', (toofast_df['fcnt'] == toofast_df['count']).sum())\n", - "print('nonmatchingcounts', (toofast_df['fcnt'] != toofast_df['count']).sum())\n", - "print('nonmatchingcounts higher', (toofast_df['fcnt'] > toofast_df['count']).sum())\n", - "print('nonmatchingcounts lower', (toofast_df['fcnt'] < toofast_df['count']).sum())\n", - "print('0fish,somenonfish', ((toofast_df['fcnt'] == 0 ) & ( toofast_df['allcnt'] > 0)).sum())\n", - "\n", - "\n", - "print('nonmatchingcounts that match toofast', ((toofast_df['fcnt'] != toofast_df['count']) & (toofast_df['ai_crash_output_too_fast'])).sum())\n", - "print('somenonefish that match toofast', (( toofast_df['allcnt'] > 0) & (toofast_df['ai_crash_output_too_fast'])).sum())\n", - "print('legit toofast', ((toofast_df['fcnt'] == toofast_df['count']) & (toofast_df['allcnt'] == toofast_df['count']) & (toofast_df['ai_crash_output_too_fast'])).sum())\n", - "toofast_df.loc[(toofast_df['fcnt'] == toofast_df['count']) & (toofast_df['allcnt'] == toofast_df['count']) & (toofast_df['ai_crash_output_too_fast'])][['start_datetime', 'output_uri', 'datetime',\n", - " 'count', 'runtimems', 'detection_confidence', 'status',\n", - " 'videocopy_found_unable_to_copy', 'videocopy_lateness', 'videocopy_ok',\n", - " 'ai_vidok_but_did_not_try', 'ai_crash_no_output',\n", - " 'ai_outputed_but_cant_parse', 'ai_crash_output_too_fast', 'ai_ok',\n", - " 'videocopy_latenessbucket', 'videocopy_late',\n", - " 'fcnt','allcnt', 'mdc']]" + "toofast_df = branc_vidsaiout.join(aiff.set_index(\"forjoin\"), on=\"output_uri\")\n", + "\n", + "print(\"matchingcounts\", (toofast_df[\"fcnt\"] == toofast_df[\"count\"]).sum())\n", + "print(\"nonmatchingcounts\", (toofast_df[\"fcnt\"] != toofast_df[\"count\"]).sum())\n", + "print(\"nonmatchingcounts higher\", (toofast_df[\"fcnt\"] > toofast_df[\"count\"]).sum())\n", + "print(\"nonmatchingcounts lower\", (toofast_df[\"fcnt\"] < toofast_df[\"count\"]).sum())\n", + "print(\"0fish,somenonfish\", ((toofast_df[\"fcnt\"] == 0) & (toofast_df[\"allcnt\"] > 0)).sum())\n", + "\n", + "\n", + "print(\n", + " \"nonmatchingcounts that match toofast\",\n", + " ((toofast_df[\"fcnt\"] != toofast_df[\"count\"]) & (toofast_df[\"ai_crash_output_too_fast\"])).sum(),\n", + ")\n", + "print(\n", + " \"somenonefish that match toofast\",\n", + " ((toofast_df[\"allcnt\"] > 0) & (toofast_df[\"ai_crash_output_too_fast\"])).sum(),\n", + ")\n", + "print(\n", + " \"legit toofast\",\n", + " (\n", + " (toofast_df[\"fcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"allcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"ai_crash_output_too_fast\"])\n", + " ).sum(),\n", + ")\n", + "toofast_df.loc[\n", + " (toofast_df[\"fcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"allcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"ai_crash_output_too_fast\"])\n", + "][\n", + " [\n", + " \"start_datetime\",\n", + " \"output_uri\",\n", + " \"datetime\",\n", + " \"count\",\n", + " \"runtimems\",\n", + " \"detection_confidence\",\n", + " \"status\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_lateness\",\n", + " \"videocopy_ok\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + " \"videocopy_latenessbucket\",\n", + " \"videocopy_late\",\n", + " \"fcnt\",\n", + " \"allcnt\",\n", + " \"mdc\",\n", + " ]\n", + "]" ] }, { @@ -10299,9 +10701,9 @@ } ], "source": [ - "rug = pandas.read_pickle('integration_state_evaluations_videocopy_ai.pickle')\n", - "print('aitoofast', rug['ai_crash_output_too_fast'].sum())\n", - "print('aitoofast', (~rug['ai_crash_output_too_fast']).sum())" + "rug = pandas.read_pickle(\"integration_state_evaluations_videocopy_ai.pickle\")\n", + "print(\"aitoofast\", rug[\"ai_crash_output_too_fast\"].sum())\n", + "print(\"aitoofast\", (~rug[\"ai_crash_output_too_fast\"]).sum())" ] } ], diff --git a/notebooks/tnc-edge-gps-speed.ipynb b/notebooks/tnc-edge-gps-speed.ipynb index 828a874..24a4293 100644 --- a/notebooks/tnc-edge-gps-speed.ipynb +++ b/notebooks/tnc-edge-gps-speed.ipynb @@ -17,16 +17,16 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas\n", - "import numpy as np\n", - "from datetime import datetime, date, time, timezone, timedelta\n", - "from dateutil.parser import parse as parse_datetime\n", + "\n", "# help(np.argwhere)\n", - "import re\n", + "from datetime import timedelta\n", + "\n", "import awswrangler as wr\n", "import boto3\n", - "import math\n", - "boto3.setup_default_session(profile_name='XXXXXXXX')\n" + "import numpy as np\n", + "import pandas\n", + "\n", + "boto3.setup_default_session(profile_name=\"XXXXXXXX\")\n" ] }, { @@ -183,7 +183,10 @@ } ], "source": [ - "gps_df = wr.athena.read_sql_query(\"SELECT datetime ,gps_datetime ,lat ,lon from stpatrick_v1_gpsdata where gps_datetime > '2024-01-01';\", database=\"tnc_edge\")\n", + "gps_df = wr.athena.read_sql_query(\n", + " \"SELECT datetime ,gps_datetime ,lat ,lon from stpatrick_v1_gpsdata where gps_datetime > '2024-01-01';\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "\n", "gps_df" @@ -414,41 +417,54 @@ } ], "source": [ - "if gps_df['lat'].dtype != 'float64':\n", - " gps_df['lat'] = pandas.to_numeric(gps_df['lat'])\n", - "if gps_df['lon'].dtype != 'float64':\n", - " gps_df['lon'] = pandas.to_numeric(gps_df['lon'])\n", - "if gps_df['gps_datetime'].dtype != 'object':\n", - " gps_df['gps_datetime'] = pandas.to_datetime(gps_df['gps_datetime'])\n", + "if gps_df[\"lat\"].dtype != \"float64\":\n", + " gps_df[\"lat\"] = pandas.to_numeric(gps_df[\"lat\"])\n", + "if gps_df[\"lon\"].dtype != \"float64\":\n", + " gps_df[\"lon\"] = pandas.to_numeric(gps_df[\"lon\"])\n", + "if gps_df[\"gps_datetime\"].dtype != \"object\":\n", + " gps_df[\"gps_datetime\"] = pandas.to_datetime(gps_df[\"gps_datetime\"])\n", "\n", "\n", - "gps_df = gps_df.sort_values('gps_datetime')\n", - " \n", + "gps_df = gps_df.sort_values(\"gps_datetime\")\n", "\n", - "gps_df['prev_lat'] = gps_df['lat'].shift(1)\n", - "gps_df['prev_lon'] = gps_df['lon'].shift(1)\n", - "gps_df['prev_dt'] = gps_df['gps_datetime'].shift(1)\n", "\n", - "if gps_df['prev_dt'].dtype != 'object':\n", - " gps_df['prev_dt'] = pandas.to_datetime(gps_df['prev_dt'])\n", + "gps_df[\"prev_lat\"] = gps_df[\"lat\"].shift(1)\n", + "gps_df[\"prev_lon\"] = gps_df[\"lon\"].shift(1)\n", + "gps_df[\"prev_dt\"] = gps_df[\"gps_datetime\"].shift(1)\n", "\n", + "if gps_df[\"prev_dt\"].dtype != \"object\":\n", + " gps_df[\"prev_dt\"] = pandas.to_datetime(gps_df[\"prev_dt\"])\n", "\n", - "gps_df['kph'] = np.power(\n", - " np.power((gps_df['lat'] - gps_df['prev_lat'])*110.574, 2) + \n", - " np.power(np.cos(gps_df['lat']*3.14159265/180)*(gps_df['lon']-gps_df['prev_lon'])*111.320, 2), 0.5\n", - " )/((gps_df['gps_datetime'] - gps_df['prev_dt']).dt.total_seconds()/3600)\n", "\n", - "gps_df['clockheading'] = (6 - \n", - " np.sign(gps_df['lon']-gps_df['prev_lon'])\n", - " *(np.sign(gps_df['lon']-gps_df['prev_lon'])\n", - " *np.arctan(\n", - " (gps_df['lat'] - gps_df['prev_lat'])\n", - " /(np.cos(gps_df['lat']*3.14159265/180)*(gps_df['lon']-gps_df['prev_lon'])))/3.14159 + 0.5 )\n", - " * 6 )\n", + "gps_df[\"kph\"] = np.power(\n", + " np.power((gps_df[\"lat\"] - gps_df[\"prev_lat\"]) * 110.574, 2)\n", + " + np.power(\n", + " np.cos(gps_df[\"lat\"] * 3.14159265 / 180) * (gps_df[\"lon\"] - gps_df[\"prev_lon\"]) * 111.320, 2\n", + " ),\n", + " 0.5,\n", + ") / ((gps_df[\"gps_datetime\"] - gps_df[\"prev_dt\"]).dt.total_seconds() / 3600)\n", + "\n", + "gps_df[\"clockheading\"] = (\n", + " 6\n", + " - np.sign(gps_df[\"lon\"] - gps_df[\"prev_lon\"])\n", + " * (\n", + " np.sign(gps_df[\"lon\"] - gps_df[\"prev_lon\"])\n", + " * np.arctan(\n", + " (gps_df[\"lat\"] - gps_df[\"prev_lat\"])\n", + " / (np.cos(gps_df[\"lat\"] * 3.14159265 / 180) * (gps_df[\"lon\"] - gps_df[\"prev_lon\"]))\n", + " )\n", + " / 3.14159\n", + " + 0.5\n", + " )\n", + " * 6\n", + ")\n", "\n", "# if the lon difference == 0, then the math above can't tell if it's north or south. It defaults to south (6).\n", "# check here and conditionally set to north\n", - "gps_df.loc[(gps_df['lon']-gps_df['prev_lon'] == 0) & ( gps_df['lat'] - gps_df['prev_lat'] > 0), 'clockheading'] = 0\n", + "gps_df.loc[\n", + " (gps_df[\"lon\"] - gps_df[\"prev_lon\"] == 0) & (gps_df[\"lat\"] - gps_df[\"prev_lat\"] > 0),\n", + " \"clockheading\",\n", + "] = 0\n", "\n", "gps_df" ] @@ -686,7 +702,7 @@ "# gps_df.loc[range(30465,30485)]\n", "\n", "# gps_df['gps_datetime']\n", - "gps_df[gps_df['kph'] <= 0.1]\n", + "gps_df[gps_df[\"kph\"] <= 0.1]\n", "\n", "# gps_df[gps_df['gps_datetime'] - gps_df['prev_dt'] <= np.timedelta64(0) ]\n" ] @@ -719,18 +735,17 @@ } ], "source": [ - "\n", - "gps_df['gps_datetime'] = pandas.to_datetime(gps_df['gps_datetime'], utc=True)\n", + "gps_df[\"gps_datetime\"] = pandas.to_datetime(gps_df[\"gps_datetime\"], utc=True)\n", "\n", "\n", - "avg = gps_df.groupby('gps_datetime').mean()[['kph', 'clockheading']]\n", + "avg = gps_df.groupby(\"gps_datetime\").mean()[[\"kph\", \"clockheading\"]]\n", "\n", "# i = pandas.DatetimeIndex([gps_df['gps_datetime'].min(), gps_df['gps_datetime'].max()])\n", "# gps_df_ts = pandas.DataFrame(index=i)\n", "# df['mycol'] = [0, 0]\n", "gps_df_ts = avg.resample(timedelta(minutes=30)).mean()\n", "\n", - "gps_df_ts.plot(figsize=(100,5))" + "gps_df_ts.plot(figsize=(100, 5))" ] } ], diff --git a/notebooks/tnc-edge-network-uptime.ipynb b/notebooks/tnc-edge-network-uptime.ipynb index c53afd9..b33de4c 100644 --- a/notebooks/tnc-edge-network-uptime.ipynb +++ b/notebooks/tnc-edge-network-uptime.ipynb @@ -36,36 +36,36 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')\n", "\n", - "import pandas\n", + "from datetime import timedelta\n", + "\n", "import awswrangler\n", - "from datetime import datetime, timezone, timedelta\n", - "from dateutil import parser\n", - "import pytz\n", + "import pandas\n", + "\n", "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -75,7 +75,9 @@ "metadata": {}, "outputs": [], "source": [ - "branc_dhe = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_deckhandevents where datetime > '2024-01-01'\", database='tnc_edge')\n", + "branc_dhe = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_deckhandevents where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", "branc_dhe.datetime = pandas.to_datetime(branc_dhe.datetime, utc=True)\n", "display(branc_dhe)" ] @@ -87,7 +89,9 @@ "metadata": {}, "outputs": [], "source": [ - "stp_dhe = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_deckhandevents where datetime > '2024-01-01'\", database='tnc_edge')\n", + "stp_dhe = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_deckhandevents where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", "stp_dhe.datetime = pandas.to_datetime(stp_dhe.datetime, utc=True)\n", "display(stp_dhe)" ] @@ -122,39 +126,44 @@ "source": [ "import json\n", "\n", - "branc_dhe['boat'] = 'brancol'\n", - "stp_dhe['boat'] = 'stpatrick'\n", + "branc_dhe[\"boat\"] = \"brancol\"\n", + "stp_dhe[\"boat\"] = \"stpatrick\"\n", "\n", - "s = pandas.concat([branc_dhe,stp_dhe])\n", - "if 'jsonblob' in s.columns:\n", - " s['json'] = s['jsonblob'].map(json.loads)\n", - " s = s.drop('jsonblob', axis=1)\n", + "s = pandas.concat([branc_dhe, stp_dhe])\n", + "if \"jsonblob\" in s.columns:\n", + " s[\"json\"] = s[\"jsonblob\"].map(json.loads)\n", + " s = s.drop(\"jsonblob\", axis=1)\n", "# display_full(s['json'].loc[s['json'].map(lambda x: x['eventType'] == 'tripDetailsEvent')])\n", "\n", "# return\n", - "s['jsondatetime'] = s['json'].map(lambda x: x['lastCompletedTimestamp'] if x['eventType'] == 'longlineEvent' \\\n", - " else x['lastCompletedTimestamp'] if x['eventType'] == 'tripDetailsEvent' else x['eventType'])\n", - "\n", - "s['jsondatetime'] = pandas.to_datetime(s['jsondatetime'].map(lambda x: x*1000000000), utc=True)\n", - "s['diff'] = s['datetime'] - s['jsondatetime']\n", + "s[\"jsondatetime\"] = s[\"json\"].map(\n", + " lambda x: x[\"lastCompletedTimestamp\"]\n", + " if x[\"eventType\"] == \"longlineEvent\"\n", + " else x[\"lastCompletedTimestamp\"]\n", + " if x[\"eventType\"] == \"tripDetailsEvent\"\n", + " else x[\"eventType\"]\n", + ")\n", "\n", + "s[\"jsondatetime\"] = pandas.to_datetime(s[\"jsondatetime\"].map(lambda x: x * 1000000000), utc=True)\n", + "s[\"diff\"] = s[\"datetime\"] - s[\"jsondatetime\"]\n", "\n", "\n", "# s['diff'].plot()\n", - "binsandlabels = [[timedelta(seconds=0), ''],\n", - " [timedelta(seconds=1), '1sec'],\n", - " [timedelta(seconds=5), '5secs'],\n", - " [timedelta(seconds=30), '30secs'],\n", - " [timedelta(minutes=1), '1min'],\n", - " [timedelta(minutes=5), '5mins'],\n", - " [timedelta(minutes=30), '30mins'],\n", - " [timedelta(minutes=60), '1hour'],\n", - " [timedelta(days=2), '2days'],\n", - " ]\n", + "binsandlabels = [\n", + " [timedelta(seconds=0), \"\"],\n", + " [timedelta(seconds=1), \"1sec\"],\n", + " [timedelta(seconds=5), \"5secs\"],\n", + " [timedelta(seconds=30), \"30secs\"],\n", + " [timedelta(minutes=1), \"1min\"],\n", + " [timedelta(minutes=5), \"5mins\"],\n", + " [timedelta(minutes=30), \"30mins\"],\n", + " [timedelta(minutes=60), \"1hour\"],\n", + " [timedelta(days=2), \"2days\"],\n", + "]\n", "bins = [x[0] for x in binsandlabels]\n", "labels = [x[1] for x in binsandlabels][1:]\n", - "s['elog submission delay'] = pandas.cut(s['diff'], bins, labels=labels)\n", - "s.groupby('elog submission delay').count()['id'].plot(kind='bar', figsize=(7,3))" + "s[\"elog submission delay\"] = pandas.cut(s[\"diff\"], bins, labels=labels)\n", + "s.groupby(\"elog submission delay\").count()[\"id\"].plot(kind=\"bar\", figsize=(7, 3))" ] }, { @@ -956,7 +965,7 @@ } ], "source": [ - "display_full(s[['boat','datetime', 'jsondatetime','diff']])" + "display_full(s[[\"boat\", \"datetime\", \"jsondatetime\", \"diff\"]])" ] }, { @@ -1747,10 +1756,13 @@ } ], "source": [ - "stp_inettests = awswrangler.athena.read_sql_query(f\"SELECT id,name,type,vector_id,score,detail,datetime from stpatrick_v1_tests where vector_id = '1' and datetime > '2024-01-01'\", database='tnc_edge')\n", + "stp_inettests = awswrangler.athena.read_sql_query(\n", + " \"SELECT id,name,type,vector_id,score,detail,datetime from stpatrick_v1_tests where vector_id = '1' and datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "stp_inettests.datetime = pandas.to_datetime(stp_inettests.datetime, utc=True)\n", - "stp_inettests['score'] = pandas.to_numeric(stp_inettests['score'])\n", - "stp_inettests = stp_inettests.sort_values('datetime')\n", + "stp_inettests[\"score\"] = pandas.to_numeric(stp_inettests[\"score\"])\n", + "stp_inettests = stp_inettests.sort_values(\"datetime\")\n", "# display_full(stp_inettests)\n", "stp_inettests" ] @@ -1783,13 +1795,13 @@ } ], "source": [ - "stp_inet_ts = stp_inettests.set_index('datetime')\n", + "stp_inet_ts = stp_inettests.set_index(\"datetime\")\n", "\n", - "stp_inet_ts['score'] = stp_inet_ts['score'].apply(lambda x: 0 if x < 0.7 else 1)\n", + "stp_inet_ts[\"score\"] = stp_inet_ts[\"score\"].apply(lambda x: 0 if x < 0.7 else 1)\n", "\n", "stp_inet_ts = stp_inet_ts.resample(timedelta(minutes=30)).first()\n", "\n", - "stp_inet_ts['score'].plot(figsize=(150,3))" + "stp_inet_ts[\"score\"].plot(figsize=(150, 3))" ] }, { @@ -2580,10 +2592,13 @@ } ], "source": [ - "branc_inettests = awswrangler.athena.read_sql_query(f\"SELECT id,name,type,vector_id,score,detail,datetime from brancol_v1_tests where vector_id = '1' and datetime > '2024-01-01'\", database='tnc_edge')\n", + "branc_inettests = awswrangler.athena.read_sql_query(\n", + " \"SELECT id,name,type,vector_id,score,detail,datetime from brancol_v1_tests where vector_id = '1' and datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "branc_inettests.datetime = pandas.to_datetime(branc_inettests.datetime, utc=True)\n", - "branc_inettests['score'] = pandas.to_numeric(branc_inettests['score'])\n", - "branc_inettests = branc_inettests.sort_values('datetime')\n", + "branc_inettests[\"score\"] = pandas.to_numeric(branc_inettests[\"score\"])\n", + "branc_inettests = branc_inettests.sort_values(\"datetime\")\n", "# display_full(branc_inettests)\n", "branc_inettests" ] @@ -2616,11 +2631,11 @@ } ], "source": [ - "branc_inet_ts = branc_inettests.set_index('datetime')\n", - "branc_inet_ts['score'] = branc_inet_ts['score'].apply(lambda x: 0 if x < 0.7 else 1)\n", + "branc_inet_ts = branc_inettests.set_index(\"datetime\")\n", + "branc_inet_ts[\"score\"] = branc_inet_ts[\"score\"].apply(lambda x: 0 if x < 0.7 else 1)\n", "\n", "branc_inet_ts = branc_inet_ts.resample(timedelta(minutes=30)).first()\n", - "branc_inet_ts['score'].plot(figsize=(150,3))" + "branc_inet_ts[\"score\"].plot(figsize=(150, 3))" ] }, { @@ -2642,10 +2657,10 @@ } ], "source": [ - "b_len= branc_inettests['score'].notna().sum()\n", - "b_sum= branc_inettests['score'].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", - "s_len= stp_inettests['score'].notna().sum()\n", - "s_sum= stp_inettests['score'].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", + "b_len = branc_inettests[\"score\"].notna().sum()\n", + "b_sum = branc_inettests[\"score\"].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", + "s_len = stp_inettests[\"score\"].notna().sum()\n", + "s_sum = stp_inettests[\"score\"].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", "\n", "print(\"b len\", b_len)\n", "print(\"b sum\", b_sum)\n", diff --git a/notebooks/tnc-edge-ondeck-ops-df.ipynb b/notebooks/tnc-edge-ondeck-ops-df.ipynb index 3a7b5f0..c38a4d1 100644 --- a/notebooks/tnc-edge-ondeck-ops-df.ipynb +++ b/notebooks/tnc-edge-ondeck-ops-df.ipynb @@ -19,15 +19,13 @@ "source": [ "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", + "\n", + "import awswrangler\n", "import boto3\n", "import pandas\n", - "import awswrangler\n", - "from datetime import datetime, timezone\n", - "\n", - "\n", "\n", "boto3.setup_default_session(**aws_config)\n" ] @@ -40,19 +38,18 @@ "outputs": [], "source": [ "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n", - "\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -62,16 +59,19 @@ "metadata": {}, "outputs": [], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT stpatrick_v1_video_files.*, \\\n", + "s = awswrangler.athena.read_sql_query(\n", + " \"SELECT stpatrick_v1_video_files.*, \\\n", "stpatrick_v1_ondeckdata.video_uri, stpatrick_v1_ondeckdata.cocoannotations_uri, stpatrick_v1_ondeckdata.datetime, \\\n", "stpatrick_v1_ondeckdata.overallcount, stpatrick_v1_ondeckdata.overallruntimems, \\\n", "stpatrick_v1_ondeckdata.tracked_confidence, stpatrick_v1_ondeckdata.status, \\\n", "stpatrick_v1_ondeckdata.overallcatches, stpatrick_v1_ondeckdata.overalldiscards, \\\n", "stpatrick_v1_ondeckdata.detection_confidence FROM stpatrick_v1_video_files \\\n", "left join stpatrick_v1_ondeckdata on decrypted_path = video_uri \\\n", - "where cam_name = 'cam1' and start_datetime > '2024-01-01' limit 50000\", database='tnc_edge')\n", + "where cam_name = 'cam1' and start_datetime > '2024-01-01' limit 50000\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", - "s = s.sort_values('start_datetime')\n", + "s = s.sort_values(\"start_datetime\")\n", "\n", "display(s)" ] @@ -83,7 +83,7 @@ "metadata": {}, "outputs": [], "source": [ - "s.to_csv('tmp.csv')" + "s.to_csv(\"tmp.csv\")" ] }, { @@ -2417,7 +2417,7 @@ } ], "source": [ - "display(s.loc[s['status'] == 'errored'])" + "display(s.loc[s[\"status\"] == \"errored\"])" ] } ], diff --git a/notebooks/tnc-edge-system-uptime.ipynb b/notebooks/tnc-edge-system-uptime.ipynb index cdcdae1..960dc49 100644 --- a/notebooks/tnc-edge-system-uptime.ipynb +++ b/notebooks/tnc-edge-system-uptime.ipynb @@ -50,14 +50,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')" ] @@ -69,25 +69,27 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas\n", + "from datetime import datetime, timedelta\n", + "\n", "import awswrangler\n", - "from datetime import datetime, timezone, timedelta\n", - "from dateutil import parser\n", + "import pandas\n", "import pytz\n", + "from dateutil import parser\n", + "\n", "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -217,19 +219,25 @@ } ], "source": [ - "sunrisesunset = pandas.read_csv('cr_sunrise_sunset.csv')\n", - "crtz = pytz.timezone('America/Costa_Rica')\n", + "sunrisesunset = pandas.read_csv(\"cr_sunrise_sunset.csv\")\n", + "crtz = pytz.timezone(\"America/Costa_Rica\")\n", "\n", - "# the following doesn't work for some reason? \n", + "# the following doesn't work for some reason?\n", "# so I need to replace tz in a separate step\n", "# sunrisesunset['sunrise'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunrise']).time()).replace(tzinfo=crtz), axis=1)\n", "# sunrisesunset['sunset'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunset']).time()).replace(tzinfo=crtz), axis=1)\n", "\n", - "sunrisesunset['sunrise'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunrise']).time()), axis=1)\n", - "sunrisesunset['sunset'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunset']).time()), axis=1)\n", + "sunrisesunset[\"sunrise\"] = sunrisesunset.apply(\n", + " lambda x: datetime.combine(parser.parse(x[\"date\"]).date(), parser.parse(x[\"sunrise\"]).time()),\n", + " axis=1,\n", + ")\n", + "sunrisesunset[\"sunset\"] = sunrisesunset.apply(\n", + " lambda x: datetime.combine(parser.parse(x[\"date\"]).date(), parser.parse(x[\"sunset\"]).time()),\n", + " axis=1,\n", + ")\n", "\n", - "sunrisesunset['sunrise'] = sunrisesunset['sunrise'].map(lambda x: x.replace(tzinfo=crtz))\n", - "sunrisesunset['sunset'] = sunrisesunset['sunset'].map(lambda x: x.replace(tzinfo=crtz))\n", + "sunrisesunset[\"sunrise\"] = sunrisesunset[\"sunrise\"].map(lambda x: x.replace(tzinfo=crtz))\n", + "sunrisesunset[\"sunset\"] = sunrisesunset[\"sunset\"].map(lambda x: x.replace(tzinfo=crtz))\n", "\n", "sunrisesunset" ] @@ -241,7 +249,10 @@ "metadata": {}, "outputs": [], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT brancol_v1_tests.* from brancol_v1_tests where vector_id in ('1', '3') and datetime > '2024-01-01' order by id asc limit 10000\", database='tnc_edge')\n", + "s = awswrangler.athena.read_sql_query(\n", + " \"SELECT brancol_v1_tests.* from brancol_v1_tests where vector_id in ('1', '3') and datetime > '2024-01-01' order by id asc limit 10000\",\n", + " database=\"tnc_edge\",\n", + ")\n", "s.datetime = pandas.to_datetime(s.datetime, utc=True)\n", "display(s)" ] @@ -1781,9 +1792,9 @@ } ], "source": [ - "s['lan_errors'] = s.loc[s['vector_id'] == '3']['score']\n", - "s['wan_errors'] = s.loc[s['vector_id'] == '1']['score']\n", - "s = s.drop(columns=['score'])\n", + "s[\"lan_errors\"] = s.loc[s[\"vector_id\"] == \"3\"][\"score\"]\n", + "s[\"wan_errors\"] = s.loc[s[\"vector_id\"] == \"1\"][\"score\"]\n", + "s = s.drop(columns=[\"score\"])\n", "s" ] }, @@ -1794,8 +1805,7 @@ "metadata": {}, "outputs": [], "source": [ - "from dateutil.parser import parse as parse_dt\n", - "from datetime import datetime, timedelta, timezone, date, time\n", + "from datetime import time\n", "# import datetime\n" ] }, @@ -1836,31 +1846,31 @@ ], "source": [ "# dir(s['score'].dtype.num)\n", - "display(type(s['datetime'].dtype))\n", - "if not s['datetime'].dtype == 'datetime64':\n", - " s['datetime'] = pandas.to_datetime(s['datetime'], utc=True)\n", + "display(type(s[\"datetime\"].dtype))\n", + "if s[\"datetime\"].dtype != \"datetime64\":\n", + " s[\"datetime\"] = pandas.to_datetime(s[\"datetime\"], utc=True)\n", "# s['datetime'] = pandas.to_datetime(s['datetime'], utc=True)\n", "\n", - " \n", + "\n", "# if not pandas.api.types.is_numeric_dtype(s['score'].dtype):\n", "# s['score'] = pandas.to_numeric(s['score'])\n", - "if not pandas.api.types.is_numeric_dtype(s['lan_errors'].dtype):\n", - " s['lan_errors'] = pandas.to_numeric(s['lan_errors'])\n", - "if not pandas.api.types.is_numeric_dtype(s['wan_errors'].dtype):\n", - " s['wan_errors'] = pandas.to_numeric(s['wan_errors'])\n", + "if not pandas.api.types.is_numeric_dtype(s[\"lan_errors\"].dtype):\n", + " s[\"lan_errors\"] = pandas.to_numeric(s[\"lan_errors\"])\n", + "if not pandas.api.types.is_numeric_dtype(s[\"wan_errors\"].dtype):\n", + " s[\"wan_errors\"] = pandas.to_numeric(s[\"wan_errors\"])\n", "\n", "# display(s)\n", - " \n", - "s_gb = s.groupby('datetime').mean(numeric_only=True)\n", + "\n", + "s_gb = s.groupby(\"datetime\").mean(numeric_only=True)\n", "\n", "# display(s_gb.index)\n", "cnt_ts = s_gb.resample(timedelta(minutes=60)).mean()\n", "\n", - "ax = cnt_ts.plot(figsize=(100,2))\n", + "ax = cnt_ts.plot(figsize=(100, 2))\n", "xticks = pandas.date_range(start=cnt_ts.index.min(), end=cnt_ts.index.max(), freq=timedelta(days=1))\n", "# display(xticks)\n", "ax.set_xticks(xticks.to_pydatetime())\n", - "ax.set_xticklabels([x.strftime('%D') for x in xticks], rotation=77)\n", + "ax.set_xticklabels([x.strftime(\"%D\") for x in xticks], rotation=77)\n", "# ax.xticks(rotation=90);\n", "display(ax)\n", "# ax" @@ -2287,43 +2297,61 @@ } ], "source": [ - "\n", - "\n", - "branc_box_cam1_filedatetimes_df = pandas.read_csv('brancol_box_cam1_filedatetimes.txt', names=['datetime'])\n", - "branc_box_cam1_filedatetimes_df['datetime'] = pandas.to_datetime(branc_box_cam1_filedatetimes_df['datetime'], utc=True)\n", - "branc_box_cam1_filedatetimes_df['cam1'] = 1\n", - "branc_box_cam1_filedatetimes_df.index = branc_box_cam1_filedatetimes_df['datetime']\n", - "branc_box_cam1_filedatetimes_df = branc_box_cam1_filedatetimes_df[['cam1']]\n", - "branc_box_cam2_filedatetimes_df = pandas.read_csv('brancol_box_cam2_filedatetimes.txt', names=['datetime'])\n", - "branc_box_cam2_filedatetimes_df['datetime'] = pandas.to_datetime(branc_box_cam2_filedatetimes_df['datetime'], utc=True)\n", - "branc_box_cam2_filedatetimes_df['cam2'] = 1\n", - "branc_box_cam2_filedatetimes_df.index = branc_box_cam2_filedatetimes_df['datetime']\n", - "branc_box_cam2_filedatetimes_df = branc_box_cam2_filedatetimes_df[['cam2']]\n", - "\n", - "branc_box_filedatetimes_df = branc_box_cam1_filedatetimes_df.join(branc_box_cam2_filedatetimes_df, how='outer')\n", - "branc_box_filedatetimes_df = branc_box_filedatetimes_df.loc['2024']\n", + "branc_box_cam1_filedatetimes_df = pandas.read_csv(\n", + " \"brancol_box_cam1_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "branc_box_cam1_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " branc_box_cam1_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "branc_box_cam1_filedatetimes_df[\"cam1\"] = 1\n", + "branc_box_cam1_filedatetimes_df.index = branc_box_cam1_filedatetimes_df[\"datetime\"]\n", + "branc_box_cam1_filedatetimes_df = branc_box_cam1_filedatetimes_df[[\"cam1\"]]\n", + "branc_box_cam2_filedatetimes_df = pandas.read_csv(\n", + " \"brancol_box_cam2_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "branc_box_cam2_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " branc_box_cam2_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "branc_box_cam2_filedatetimes_df[\"cam2\"] = 1\n", + "branc_box_cam2_filedatetimes_df.index = branc_box_cam2_filedatetimes_df[\"datetime\"]\n", + "branc_box_cam2_filedatetimes_df = branc_box_cam2_filedatetimes_df[[\"cam2\"]]\n", + "\n", + "branc_box_filedatetimes_df = branc_box_cam1_filedatetimes_df.join(\n", + " branc_box_cam2_filedatetimes_df, how=\"outer\"\n", + ")\n", + "branc_box_filedatetimes_df = branc_box_filedatetimes_df.loc[\"2024\"]\n", "branc_box_filedatetimes_df = branc_box_filedatetimes_df.fillna(0)\n", "\n", - "branc_box_filedatetimes_df.to_pickle('brancol_box_filedatetimes.pickle')\n", - "\n", - "stp_box_cam1_filedatetimes_df = pandas.read_csv('stpatrick_box_cam1_filedatetimes.txt', names=['datetime'])\n", - "stp_box_cam1_filedatetimes_df['datetime'] = pandas.to_datetime(stp_box_cam1_filedatetimes_df['datetime'], utc=True)\n", - "stp_box_cam1_filedatetimes_df['cam1'] = 1\n", - "stp_box_cam1_filedatetimes_df.index = stp_box_cam1_filedatetimes_df['datetime']\n", - "stp_box_cam1_filedatetimes_df = stp_box_cam1_filedatetimes_df[['cam1']]\n", - "\n", - "stp_box_cam2_filedatetimes_df = pandas.read_csv('stpatrick_box_cam2_filedatetimes.txt', names=['datetime'])\n", - "stp_box_cam2_filedatetimes_df['datetime'] = pandas.to_datetime(stp_box_cam2_filedatetimes_df['datetime'], utc=True)\n", - "stp_box_cam2_filedatetimes_df['cam2'] = 1\n", - "stp_box_cam2_filedatetimes_df.index = stp_box_cam2_filedatetimes_df['datetime']\n", - "stp_box_cam2_filedatetimes_df = stp_box_cam2_filedatetimes_df[['cam2']]\n", - "\n", - "\n", - "stp_box_filedatetimes_df = stp_box_cam1_filedatetimes_df.join(stp_box_cam2_filedatetimes_df, how='outer')\n", - "stp_box_filedatetimes_df = stp_box_filedatetimes_df.loc['2024']\n", + "branc_box_filedatetimes_df.to_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "\n", + "stp_box_cam1_filedatetimes_df = pandas.read_csv(\n", + " \"stpatrick_box_cam1_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "stp_box_cam1_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " stp_box_cam1_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "stp_box_cam1_filedatetimes_df[\"cam1\"] = 1\n", + "stp_box_cam1_filedatetimes_df.index = stp_box_cam1_filedatetimes_df[\"datetime\"]\n", + "stp_box_cam1_filedatetimes_df = stp_box_cam1_filedatetimes_df[[\"cam1\"]]\n", + "\n", + "stp_box_cam2_filedatetimes_df = pandas.read_csv(\n", + " \"stpatrick_box_cam2_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "stp_box_cam2_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " stp_box_cam2_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "stp_box_cam2_filedatetimes_df[\"cam2\"] = 1\n", + "stp_box_cam2_filedatetimes_df.index = stp_box_cam2_filedatetimes_df[\"datetime\"]\n", + "stp_box_cam2_filedatetimes_df = stp_box_cam2_filedatetimes_df[[\"cam2\"]]\n", + "\n", + "\n", + "stp_box_filedatetimes_df = stp_box_cam1_filedatetimes_df.join(\n", + " stp_box_cam2_filedatetimes_df, how=\"outer\"\n", + ")\n", + "stp_box_filedatetimes_df = stp_box_filedatetimes_df.loc[\"2024\"]\n", "stp_box_filedatetimes_df = stp_box_filedatetimes_df.fillna(0)\n", "\n", - "stp_box_filedatetimes_df.to_pickle('stpatrick_box_filedatetimes.pickle')\n" + "stp_box_filedatetimes_df.to_pickle(\"stpatrick_box_filedatetimes.pickle\")\n" ] }, { @@ -2354,9 +2382,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2364,24 +2395,28 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", "p = df.resample(timedelta(minutes=5)).sum()\n", "\n", "# p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", "# p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", - "p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", + "p.index = p.index.map(lambda x: x.tz_convert(\"America/Costa_Rica\"))\n", "# p.index\n", - "ax = p.plot(figsize=(100,2), label='Video Output')\n", + "ax = p.plot(figsize=(100, 2), label=\"Video Output\")\n", "# display_full(rs['2023-12-27 19'])\n", "\n", "first = True\n", - "for time in sunrisesunset['sunrise']:\n", - " ax.axvline(x=pandas.to_datetime(time), color='b', linestyle='--', label='sunrise' if first else None)\n", + "for time in sunrisesunset[\"sunrise\"]:\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"b\", linestyle=\"--\", label=\"sunrise\" if first else None\n", + " )\n", " first = False\n", "first = True\n", - "for time in sunrisesunset['sunset']:\n", - " ax.axvline(x=pandas.to_datetime(time), color='r', linestyle='--', label='sunset' if first else None)\n", + "for time in sunrisesunset[\"sunset\"]:\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"r\", linestyle=\"--\", label=\"sunset\" if first else None\n", + " )\n", " first = False\n", "\n", "# Show the plot\n", @@ -2416,9 +2451,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2426,27 +2464,35 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", "p = df.resample(timedelta(minutes=5)).sum()\n", "\n", - "p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", - "p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", - "p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", + "p = p.loc[p.index >= \"2024-02-17 00:00:00-06\"]\n", + "p = p.loc[p.index <= \"2024-02-21 00:00:00-06\"]\n", + "p.index = p.index.map(lambda x: x.tz_convert(\"America/Costa_Rica\"))\n", "# p.index\n", - "ax = p.plot(figsize=(6,2), label='Video Output')\n", + "ax = p.plot(figsize=(6, 2), label=\"Video Output\")\n", "# display_full(rs['2023-12-27 19'])\n", "\n", - "sunrises = sunrisesunset['sunrise'].loc[(sunrisesunset['sunrise'] > '2024-02-17') & (sunrisesunset['sunrise'] < '2024-02-21')]\n", + "sunrises = sunrisesunset[\"sunrise\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-17\") & (sunrisesunset[\"sunrise\"] < \"2024-02-21\")\n", + "]\n", "\n", - "sunsets = sunrisesunset['sunset'].loc[(sunrisesunset['sunrise'] > '2024-02-17') & (sunrisesunset['sunrise'] < '2024-02-21')]\n", + "sunsets = sunrisesunset[\"sunset\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-17\") & (sunrisesunset[\"sunrise\"] < \"2024-02-21\")\n", + "]\n", "first = True\n", "for time in sunrises:\n", - " ax.axvline(x=pandas.to_datetime(time), color='b', linestyle='--', label='sunrise' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"b\", linestyle=\"--\", label=\"sunrise\" if first else None\n", + " )\n", " first = False\n", "first = True\n", "for time in sunsets:\n", - " ax.axvline(x=pandas.to_datetime(time), color='r', linestyle='--', label='sunset' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"r\", linestyle=\"--\", label=\"sunset\" if first else None\n", + " )\n", " first = False\n", "\n", "# Show the plot\n", @@ -2481,9 +2527,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2491,32 +2540,40 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", "p = df.resample(timedelta(minutes=5)).sum()\n", "\n", - "p = p.loc[p.index >= '2024-02-09 00:00:00-06']\n", - "p = p.loc[p.index <= '2024-02-13 00:00:00-06']\n", - "p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", + "p = p.loc[p.index >= \"2024-02-09 00:00:00-06\"]\n", + "p = p.loc[p.index <= \"2024-02-13 00:00:00-06\"]\n", + "p.index = p.index.map(lambda x: x.tz_convert(\"America/Costa_Rica\"))\n", "# p.index\n", - "ax = p.plot(figsize=(9,2), label='Video Output')\n", + "ax = p.plot(figsize=(9, 2), label=\"Video Output\")\n", "# display_full(rs['2023-12-27 19'])\n", "\n", - "sunrises = sunrisesunset['sunrise'].loc[(sunrisesunset['sunrise'] > '2024-02-09') & (sunrisesunset['sunrise'] < '2024-02-13')]\n", + "sunrises = sunrisesunset[\"sunrise\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-09\") & (sunrisesunset[\"sunrise\"] < \"2024-02-13\")\n", + "]\n", "\n", - "sunsets = sunrisesunset['sunset'].loc[(sunrisesunset['sunrise'] > '2024-02-09') & (sunrisesunset['sunrise'] < '2024-02-13')]\n", + "sunsets = sunrisesunset[\"sunset\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-09\") & (sunrisesunset[\"sunrise\"] < \"2024-02-13\")\n", + "]\n", "first = True\n", "for time in sunrises:\n", - " ax.axvline(x=pandas.to_datetime(time), color='g', linestyle='-', label='sunrise' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"g\", linestyle=\"-\", label=\"sunrise\" if first else None\n", + " )\n", " first = False\n", "first = True\n", "for time in sunsets:\n", - " ax.axvline(x=pandas.to_datetime(time), color='r', linestyle='-', label='sunset' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"r\", linestyle=\"-\", label=\"sunset\" if first else None\n", + " )\n", " first = False\n", "\n", "ax.legend()\n", "ax.set_yticks([])\n", - "ax.set_xlabel('')" + "ax.set_xlabel(\"\")" ] }, { @@ -2635,9 +2692,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2645,13 +2705,16 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "recorded_sum=0\n", - "triptime_sum=0\n", + "recorded_sum = 0\n", + "triptime_sum = 0\n", "\n", "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "brancol_trip1_startend = (parser.parse('2024-01-05 16:14:42Z'), parser.parse('2024-01-26 01:35:17Z'),)\n", + "brancol_trip1_startend = (\n", + " parser.parse(\"2024-01-05 16:14:42Z\"),\n", + " parser.parse(\"2024-01-26 01:35:17Z\"),\n", + ")\n", "brancoltrip1 = df.loc[df.index > brancol_trip1_startend[0]]\n", "brancoltrip1 = brancoltrip1.loc[brancoltrip1.index < brancol_trip1_startend[1]]\n", "brancoltrip1.loc[brancol_trip1_startend[0]] = 0\n", @@ -2660,9 +2723,12 @@ "p = brancoltrip1.resample(timedelta(minutes=5)).sum()\n", "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('brancol trip 1: ', p.sum() / len(p))\n", + "display(\"brancol trip 1: \", p.sum() / len(p))\n", "\n", - "brancol_trip2_startend = (parser.parse('2024-02-03 17:16:47Z'), parser.parse('2024-02-26 23:32:03Z') + timedelta(hours=7.0966),)\n", + "brancol_trip2_startend = (\n", + " parser.parse(\"2024-02-03 17:16:47Z\"),\n", + " parser.parse(\"2024-02-26 23:32:03Z\") + timedelta(hours=7.0966),\n", + ")\n", "\n", "brancoltrip2 = df.loc[df.index > brancol_trip2_startend[0]]\n", "brancoltrip2 = brancoltrip2.loc[brancoltrip2.index < brancol_trip2_startend[1]]\n", @@ -2670,12 +2736,15 @@ "brancoltrip2.loc[brancol_trip2_startend[1]] = 0\n", "# brancoltrip1\n", "p = brancoltrip2.resample(timedelta(minutes=5)).sum()\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('brancol trip 2: ', p.sum() / len(p))\n", + "display(\"brancol trip 2: \", p.sum() / len(p))\n", "\n", "\n", - "brancol_trip3_startend = (parser.parse('2024-03-05 15:26:13Z'), parser.parse('2024-04-01 01:21:47Z') + timedelta(hours=8.4725),)\n", + "brancol_trip3_startend = (\n", + " parser.parse(\"2024-03-05 15:26:13Z\"),\n", + " parser.parse(\"2024-04-01 01:21:47Z\") + timedelta(hours=8.4725),\n", + ")\n", "\n", "brancoltrip3 = df.loc[df.index > brancol_trip3_startend[0]]\n", "brancoltrip3 = brancoltrip3.loc[brancoltrip3.index < brancol_trip3_startend[1]]\n", @@ -2683,15 +2752,18 @@ "brancoltrip3.loc[brancol_trip3_startend[1]] = 0\n", "# brancoltrip3\n", "p = brancoltrip3.resample(timedelta(minutes=5)).sum()\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('brancol trip 3: ', p.sum() / len(p))\n", + "display(\"brancol trip 3: \", p.sum() / len(p))\n", "\n", "\n", - "df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", "\n", - "stpatrick_trip1_startend = (parser.parse('2024-01-03 13:22:25Z'), parser.parse('2024-01-28 06:40:04Z'),)\n", + "stpatrick_trip1_startend = (\n", + " parser.parse(\"2024-01-03 13:22:25Z\"),\n", + " parser.parse(\"2024-01-28 06:40:04Z\"),\n", + ")\n", "\n", "\n", "stpatricktrip1 = df.loc[df.index > stpatrick_trip1_startend[0]]\n", @@ -2701,12 +2773,15 @@ "# brancoltrip3\n", "p = stpatricktrip1.resample(timedelta(minutes=5)).sum()\n", "# display_full(p)\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('stpatrick trip 1: ', p.sum() / len(p))\n", + "display(\"stpatrick trip 1: \", p.sum() / len(p))\n", "\n", "# 10.9 miles / 6 knots = 1.57 hours\n", - "stpatrick_trip2_startend = (parser.parse('2024-02-03 13:44:11Z'), parser.parse('2024-02-28 03:42:55Z') + timedelta(hours=1.578),)\n", + "stpatrick_trip2_startend = (\n", + " parser.parse(\"2024-02-03 13:44:11Z\"),\n", + " parser.parse(\"2024-02-28 03:42:55Z\") + timedelta(hours=1.578),\n", + ")\n", "\n", "stpatricktrip2 = df.loc[df.index > stpatrick_trip2_startend[0]]\n", "stpatricktrip2 = stpatricktrip2.loc[stpatricktrip2.index < stpatrick_trip2_startend[1]]\n", @@ -2714,12 +2789,12 @@ "stpatricktrip2.loc[stpatrick_trip2_startend[1]] = 0\n", "# brancoltrip3\n", "p = stpatricktrip2.resample(timedelta(minutes=5)).sum()\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('stpatrick trip 2: ', p.sum() / len(p))\n", + "display(\"stpatrick trip 2: \", p.sum() / len(p))\n", "\n", "display(\"recording uptime for all trips:\")\n", - "display(f\"{(recorded_sum / triptime_sum * 100):.1f}%\" )" + "display(f\"{(recorded_sum / triptime_sum * 100):.1f}%\")" ] }, { @@ -2730,12 +2805,18 @@ "outputs": [], "source": [ "ts = pandas.DataFrame()\n", - "ts['ts'] = [sunrisesunset.index.min().replace(tzinfo=crtz), sunrisesunset.index.max().replace(tzinfo=crtz)] \n", + "ts[\"ts\"] = [\n", + " sunrisesunset.index.min().replace(tzinfo=crtz),\n", + " sunrisesunset.index.max().replace(tzinfo=crtz),\n", + "]\n", "\n", - "ts.index = ts['ts']\n", + "ts.index = ts[\"ts\"]\n", "ts = ts.resample(timedelta(minutes=5)).sum()\n", "type(sunrisesunset.index[0])\n", - "ts['is_daytime'] = ts.index.map(lambda x: sunrisesunset.loc[pandas.Timestamp(x.date())]['sunrise'] < x and sunrisesunset.loc[pandas.Timestamp(x.date())]['sunset'] > x )\n", + "ts[\"is_daytime\"] = ts.index.map(\n", + " lambda x: sunrisesunset.loc[pandas.Timestamp(x.date())][\"sunrise\"] < x\n", + " and sunrisesunset.loc[pandas.Timestamp(x.date())][\"sunset\"] > x\n", + ")\n", "is_daytime = ts" ] }, @@ -2837,9 +2918,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2847,55 +2931,72 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "recorded_sum=0\n", - "triptime_sum=0\n", - "\n", - "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", - "df['bothcams'] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "brancol_trip1_startend = (parser.parse('2024-01-05 16:14:42Z'), parser.parse('2024-01-26 01:35:17Z'),)\n", - "brancol_trip2_startend = (parser.parse('2024-02-03 17:16:47Z'), parser.parse('2024-02-26 23:32:03Z') + timedelta(hours=7.0966),)\n", - "brancol_trip3_startend = (parser.parse('2024-03-05 15:26:13Z'), parser.parse('2024-04-01 01:21:47Z') + timedelta(hours=8.4725),)\n", + "recorded_sum = 0\n", + "triptime_sum = 0\n", + "\n", + "\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "df[\"bothcams\"] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", + "brancol_trip1_startend = (\n", + " parser.parse(\"2024-01-05 16:14:42Z\"),\n", + " parser.parse(\"2024-01-26 01:35:17Z\"),\n", + ")\n", + "brancol_trip2_startend = (\n", + " parser.parse(\"2024-02-03 17:16:47Z\"),\n", + " parser.parse(\"2024-02-26 23:32:03Z\") + timedelta(hours=7.0966),\n", + ")\n", + "brancol_trip3_startend = (\n", + " parser.parse(\"2024-03-05 15:26:13Z\"),\n", + " parser.parse(\"2024-04-01 01:21:47Z\") + timedelta(hours=8.4725),\n", + ")\n", "df.loc[brancol_trip1_startend[0]] = 0\n", "df.loc[brancol_trip3_startend[1]] = 0\n", "df = df.resample(timedelta(minutes=5)).sum()\n", - "df = df.loc[(df.index > brancol_trip1_startend[0] ) & (df.index < brancol_trip1_startend[1]) \\\n", - " | (df.index > brancol_trip2_startend[0] ) & (df.index < brancol_trip2_startend[1]) \\\n", - " | (df.index > brancol_trip3_startend[0] ) & (df.index < brancol_trip3_startend[1]) ]\n", - "\n", + "df = df.loc[\n", + " (df.index > brancol_trip1_startend[0]) & (df.index < brancol_trip1_startend[1])\n", + " | (df.index > brancol_trip2_startend[0]) & (df.index < brancol_trip2_startend[1])\n", + " | (df.index > brancol_trip3_startend[0]) & (df.index < brancol_trip3_startend[1])\n", + "]\n", "\n", "\n", "df = df.join(is_daytime)\n", "\n", "df = df.loc[df.is_daytime]\n", - "display('brancol bothcams upimecount', df['bothcams'].sum())\n", - "display('brancol bothcams daytimecount', len(df['bothcams']))\n", - "display(\"brancol uptime during sunrisesunset\", df['bothcams'].sum() / len(df['bothcams']))\n", - "\n", - "numeratorsum = df['bothcams'].sum()\n", - "denominatorsum = len(df['bothcams'])\n", - "\n", - "\n", - "df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", - "df['bothcams'] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "stpatrick_trip1_startend = (parser.parse('2024-01-03 13:22:25Z'), parser.parse('2024-01-28 06:40:04Z'),)\n", - "stpatrick_trip2_startend = (parser.parse('2024-02-03 13:44:11Z'), parser.parse('2024-02-28 03:42:55Z') + timedelta(hours=1.578),)\n", + "display(\"brancol bothcams upimecount\", df[\"bothcams\"].sum())\n", + "display(\"brancol bothcams daytimecount\", len(df[\"bothcams\"]))\n", + "display(\"brancol uptime during sunrisesunset\", df[\"bothcams\"].sum() / len(df[\"bothcams\"]))\n", + "\n", + "numeratorsum = df[\"bothcams\"].sum()\n", + "denominatorsum = len(df[\"bothcams\"])\n", + "\n", + "\n", + "df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", + "df[\"bothcams\"] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", + "stpatrick_trip1_startend = (\n", + " parser.parse(\"2024-01-03 13:22:25Z\"),\n", + " parser.parse(\"2024-01-28 06:40:04Z\"),\n", + ")\n", + "stpatrick_trip2_startend = (\n", + " parser.parse(\"2024-02-03 13:44:11Z\"),\n", + " parser.parse(\"2024-02-28 03:42:55Z\") + timedelta(hours=1.578),\n", + ")\n", "df.loc[stpatrick_trip1_startend[0]] = 0\n", "df.loc[stpatrick_trip2_startend[1]] = 0\n", "df = df.resample(timedelta(minutes=5)).sum()\n", - "df = df.loc[(df.index > stpatrick_trip1_startend[0] ) & (df.index < stpatrick_trip1_startend[1]) \\\n", - " | (df.index > stpatrick_trip2_startend[0] ) & (df.index < stpatrick_trip2_startend[1]) ]\n", - "\n", + "df = df.loc[\n", + " (df.index > stpatrick_trip1_startend[0]) & (df.index < stpatrick_trip1_startend[1])\n", + " | (df.index > stpatrick_trip2_startend[0]) & (df.index < stpatrick_trip2_startend[1])\n", + "]\n", "\n", "\n", "df = df.join(is_daytime)\n", "\n", "df = df.loc[df.is_daytime]\n", "\n", - "display(\"stpatrick uptime during sunrisesunset\", df['bothcams'].sum() / len(df['bothcams']))\n", + "display(\"stpatrick uptime during sunrisesunset\", df[\"bothcams\"].sum() / len(df[\"bothcams\"]))\n", "\n", - "numeratorsum += df['bothcams'].sum()\n", - "denominatorsum += len(df['bothcams'])\n", + "numeratorsum += df[\"bothcams\"].sum()\n", + "denominatorsum += len(df[\"bothcams\"])\n", "\n", "\n", "display(\"alltrips uptime during sunrisesunset\", numeratorsum / denominatorsum)" @@ -2932,12 +3033,11 @@ "import io\n", "import re\n", "\n", - "\n", - "brancol_fname = 'thalos_uptime_logs/sessions_brancol.csv'\n", + "brancol_fname = \"thalos_uptime_logs/sessions_brancol.csv\"\n", "# stpatrick_fname = 'thalos_uptime_logs/sessions_saintpatrick.csv'\n", "\n", "\n", - "r = re.compile('^\\d+(, .*){8}')\n", + "r = re.compile(r\"^\\d+(, .*){8}\")\n", "brancol_lines = []\n", "with open(brancol_fname) as brancol_f:\n", " for l in brancol_f.readlines():\n", @@ -2946,31 +3046,48 @@ "# print(type(brancol_f), dir(brancol_f))\n", "\n", "# display(brancol_lines)\n", - " \n", - "thalos_uptime_logs_brancol = pandas.read_csv(io.StringIO(''.join(brancol_lines)), names=['id', 'datetime', 'boat', 'certus', 'status', 'i', 'j', 'name', 'ip'])\n", - " \n", + "\n", + "thalos_uptime_logs_brancol = pandas.read_csv(\n", + " io.StringIO(\"\".join(brancol_lines)),\n", + " names=[\"id\", \"datetime\", \"boat\", \"certus\", \"status\", \"i\", \"j\", \"name\", \"ip\"],\n", + ")\n", + "\n", "# thalos_uptime_logs_stpatrick = pandas.read_csv()\n", "\n", - "thalos_uptime_logs_brancol['datetime'] = pandas.to_datetime(thalos_uptime_logs_brancol['datetime'], utc=True)\n", - "thalos_uptime_logs_brancol = thalos_uptime_logs_brancol.loc[thalos_uptime_logs_brancol['datetime'] > '2024-01-01']\n", + "thalos_uptime_logs_brancol[\"datetime\"] = pandas.to_datetime(\n", + " thalos_uptime_logs_brancol[\"datetime\"], utc=True\n", + ")\n", + "thalos_uptime_logs_brancol = thalos_uptime_logs_brancol.loc[\n", + " thalos_uptime_logs_brancol[\"datetime\"] > \"2024-01-01\"\n", + "]\n", "\n", - "thalos_uptime_logs_brancol.index = thalos_uptime_logs_brancol['datetime']\n", + "thalos_uptime_logs_brancol.index = thalos_uptime_logs_brancol[\"datetime\"]\n", "\n", "# display(thalos_uptime_logs_brancol.index[4] )\n", "\n", "# display(thalos_uptime_logs_brancol.loc[parser.parse('2024-01-04 15:22:07')])\n", "\n", - "thalos_uptime_logs_brancol.loc[parser.parse('2024-01-01 00:00Z')] = [0, parser.parse('2024-01-01'), ' brancol', ' cer1', ' down', 0, 0, ' pop-prd-pthalos000', ' ']\n", + "thalos_uptime_logs_brancol.loc[parser.parse(\"2024-01-01 00:00Z\")] = [\n", + " 0,\n", + " parser.parse(\"2024-01-01\"),\n", + " \" brancol\",\n", + " \" cer1\",\n", + " \" down\",\n", + " 0,\n", + " 0,\n", + " \" pop-prd-pthalos000\",\n", + " \" \",\n", + "]\n", "\n", "# dir(thalos_uptime_logs_brancol.resample(timedelta(minutes=5)))\n", "thalos_brancol_ts_df = thalos_uptime_logs_brancol.resample(timedelta(minutes=5)).last()\n", "# u\n", - "thalos_brancol_ts_df['status'] = thalos_brancol_ts_df['status'].ffill()\n", - "thalos_brancol_ts_df['up'] = thalos_brancol_ts_df['status'].map(lambda x: 1 if x==' up' else 0)\n", + "thalos_brancol_ts_df[\"status\"] = thalos_brancol_ts_df[\"status\"].ffill()\n", + "thalos_brancol_ts_df[\"up\"] = thalos_brancol_ts_df[\"status\"].map(lambda x: 1 if x == \" up\" else 0)\n", "\n", - "thalos_brancol_ts_df.to_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thalos_brancol_ts_df.to_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "\n", - "thalos_brancol_ts_df['up'].plot(figsize=(200,2))" + "thalos_brancol_ts_df[\"up\"].plot(figsize=(200, 2))" ] }, { @@ -3001,19 +3118,21 @@ } ], "source": [ - "brancol_file_uptime = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", - "brancol_file_uptime.loc[parser.parse('2024-01-01 00:00Z')] = [0, 0]\n", - "brancol_file_uptime['bothcamsup'] = brancol_file_uptime.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", + "brancol_file_uptime = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "brancol_file_uptime.loc[parser.parse(\"2024-01-01 00:00Z\")] = [0, 0]\n", + "brancol_file_uptime[\"bothcamsup\"] = brancol_file_uptime.apply(\n", + " lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1\n", + ")\n", "# display(stpatrick_file_uptime)\n", "p = brancol_file_uptime.resample(timedelta(minutes=5)).last()\n", - "p['bothcamsup'] = p['bothcamsup'].fillna(0)\n", + "p[\"bothcamsup\"] = p[\"bothcamsup\"].fillna(0)\n", "\n", "# p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", "# p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", "# p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", "\n", - "p= p.join(thalos_brancol_ts_df['up'])\n", - "p[['bothcamsup', 'up']].plot(figsize=(100,2))" + "p = p.join(thalos_brancol_ts_df[\"up\"])\n", + "p[[\"bothcamsup\", \"up\"]].plot(figsize=(100, 2))" ] }, { @@ -3038,7 +3157,7 @@ } ], "source": [ - "disparities = p.loc[p['bothcamsup'] + p['up'] == 1]\n", + "disparities = p.loc[p[\"bothcamsup\"] + p[\"up\"] == 1]\n", "disparities.sum()" ] }, @@ -3073,12 +3192,11 @@ "import io\n", "import re\n", "\n", - "\n", "# brancol_fname = 'thalos_uptime_logs/sessions_brancol.csv'\n", - "stpatrick_fname = 'thalos_uptime_logs/sessions_saintpatrick.csv'\n", + "stpatrick_fname = \"thalos_uptime_logs/sessions_saintpatrick.csv\"\n", "\n", "\n", - "r = re.compile('^\\d+(, .*){8}')\n", + "r = re.compile(r\"^\\d+(, .*){8}\")\n", "stpatrick_lines = []\n", "with open(stpatrick_fname) as stpatrick_f:\n", " for l in stpatrick_f.readlines():\n", @@ -3087,31 +3205,50 @@ "# print(type(brancol_f), dir(brancol_f))\n", "\n", "# display(brancol_lines)\n", - " \n", - "thalos_uptime_logs_stpatrick = pandas.read_csv(io.StringIO(''.join(stpatrick_lines)), names=['id', 'datetime', 'boat', 'certus', 'status', 'i', 'j', 'name', 'ip'])\n", - " \n", + "\n", + "thalos_uptime_logs_stpatrick = pandas.read_csv(\n", + " io.StringIO(\"\".join(stpatrick_lines)),\n", + " names=[\"id\", \"datetime\", \"boat\", \"certus\", \"status\", \"i\", \"j\", \"name\", \"ip\"],\n", + ")\n", + "\n", "# thalos_uptime_logs_stpatrick = pandas.read_csv()\n", "\n", - "thalos_uptime_logs_stpatrick['datetime'] = pandas.to_datetime(thalos_uptime_logs_stpatrick['datetime'], utc=True)\n", - "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick['datetime'] > '2024-01-01']\n", + "thalos_uptime_logs_stpatrick[\"datetime\"] = pandas.to_datetime(\n", + " thalos_uptime_logs_stpatrick[\"datetime\"], utc=True\n", + ")\n", + "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[\n", + " thalos_uptime_logs_stpatrick[\"datetime\"] > \"2024-01-01\"\n", + "]\n", "\n", - "thalos_uptime_logs_stpatrick.index = thalos_uptime_logs_stpatrick['datetime']\n", + "thalos_uptime_logs_stpatrick.index = thalos_uptime_logs_stpatrick[\"datetime\"]\n", "\n", "# display(thalos_uptime_logs_brancol.index[4] )\n", "\n", "# display(thalos_uptime_logs_brancol.loc[parser.parse('2024-01-04 15:22:07')])\n", "\n", - "thalos_uptime_logs_stpatrick.loc[parser.parse('2024-01-01 00:00Z')] = [0, parser.parse('2024-01-01'), ' saintpatrick', ' cer1', ' down', 0, 0, ' pop-prd-pthalos000', ' ']\n", + "thalos_uptime_logs_stpatrick.loc[parser.parse(\"2024-01-01 00:00Z\")] = [\n", + " 0,\n", + " parser.parse(\"2024-01-01\"),\n", + " \" saintpatrick\",\n", + " \" cer1\",\n", + " \" down\",\n", + " 0,\n", + " 0,\n", + " \" pop-prd-pthalos000\",\n", + " \" \",\n", + "]\n", "\n", "# dir(thalos_uptime_logs_brancol.resample(timedelta(minutes=5)))\n", "thalos_stpatrick_ts_df = thalos_uptime_logs_stpatrick.resample(timedelta(minutes=5)).last()\n", "# u\n", - "thalos_stpatrick_ts_df['status'] = thalos_stpatrick_ts_df['status'].ffill()\n", - "thalos_stpatrick_ts_df['up'] = thalos_stpatrick_ts_df['status'].map(lambda x: 1 if x==' up' else 0)\n", + "thalos_stpatrick_ts_df[\"status\"] = thalos_stpatrick_ts_df[\"status\"].ffill()\n", + "thalos_stpatrick_ts_df[\"up\"] = thalos_stpatrick_ts_df[\"status\"].map(\n", + " lambda x: 1 if x == \" up\" else 0\n", + ")\n", "\n", - "thalos_stpatrick_ts_df.to_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thalos_stpatrick_ts_df.to_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "\n", - "thalos_stpatrick_ts_df['up'].plot(figsize=(200,2))" + "thalos_stpatrick_ts_df[\"up\"].plot(figsize=(200, 2))" ] }, { @@ -3142,20 +3279,21 @@ } ], "source": [ - "\n", - "stpatrick_file_uptime = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", - "stpatrick_file_uptime.loc[parser.parse('2024-01-01 00:00Z')] = [0, 0]\n", - "stpatrick_file_uptime['bothcamsup'] = stpatrick_file_uptime.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", + "stpatrick_file_uptime = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", + "stpatrick_file_uptime.loc[parser.parse(\"2024-01-01 00:00Z\")] = [0, 0]\n", + "stpatrick_file_uptime[\"bothcamsup\"] = stpatrick_file_uptime.apply(\n", + " lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1\n", + ")\n", "# display(stpatrick_file_uptime)\n", "q = stpatrick_file_uptime.resample(timedelta(minutes=5)).last()\n", - "q['bothcamsup'] = q['bothcamsup'].fillna(0)\n", + "q[\"bothcamsup\"] = q[\"bothcamsup\"].fillna(0)\n", "\n", "# p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", "# p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", "# p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", "\n", - "q= q.join(thalos_stpatrick_ts_df['up'])\n", - "q[['bothcamsup', 'up']].plot(figsize=(100,2))" + "q = q.join(thalos_stpatrick_ts_df[\"up\"])\n", + "q[[\"bothcamsup\", \"up\"]].plot(figsize=(100, 2))" ] }, { @@ -3180,7 +3318,7 @@ } ], "source": [ - "disparities = q.loc[q['bothcamsup'] + q['up'] == 1]\n", + "disparities = q.loc[q[\"bothcamsup\"] + q[\"up\"] == 1]\n", "disparities.sum()" ] }, @@ -3201,17 +3339,22 @@ } ], "source": [ - "thalos_system_logs_as_baseline = p.loc[p['up'] == 1]\n", - "br_numerator = thalos_system_logs_as_baseline['bothcamsup'].sum()\n", + "thalos_system_logs_as_baseline = p.loc[p[\"up\"] == 1]\n", + "br_numerator = thalos_system_logs_as_baseline[\"bothcamsup\"].sum()\n", "br_denominator = len(thalos_system_logs_as_baseline)\n", - "print('brancol video uptime with thalos system logs as baseline', br_numerator / br_denominator)\n", + "print(\"brancol video uptime with thalos system logs as baseline\", br_numerator / br_denominator)\n", "\n", - "thalos_system_logs_as_baseline = q.loc[q['up'] == 1]\n", - "stp_numerator = thalos_system_logs_as_baseline['bothcamsup'].sum()\n", + "thalos_system_logs_as_baseline = q.loc[q[\"up\"] == 1]\n", + "stp_numerator = thalos_system_logs_as_baseline[\"bothcamsup\"].sum()\n", "stp_denominator = len(thalos_system_logs_as_baseline)\n", - "print('saintpatrick video uptime with thalos system logs as baseline', stp_numerator / stp_denominator)\n", - "\n", - "print('all trips with thalos system logs as a baseline', (br_numerator + stp_numerator) / (br_denominator+stp_denominator))" + "print(\n", + " \"saintpatrick video uptime with thalos system logs as baseline\", stp_numerator / stp_denominator\n", + ")\n", + "\n", + "print(\n", + " \"all trips with thalos system logs as a baseline\",\n", + " (br_numerator + stp_numerator) / (br_denominator + stp_denominator),\n", + ")" ] }, { @@ -3259,70 +3402,107 @@ } ], "source": [ - "\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", - "\n", - "brancol_trip1_startend = (parser.parse('2024-01-05 16:14:42Z'), parser.parse('2024-01-26 01:35:17Z'),)\n", - "brancol_trip2_startend = (parser.parse('2024-02-03 17:16:47Z'), parser.parse('2024-02-26 23:32:03Z') + timedelta(hours=7.0966),)\n", - "brancol_trip3_startend = (parser.parse('2024-03-05 15:26:13Z'), parser.parse('2024-04-01 01:21:47Z') + timedelta(hours=8.4725),)\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "\n", + "brancol_trip1_startend = (\n", + " parser.parse(\"2024-01-05 16:14:42Z\"),\n", + " parser.parse(\"2024-01-26 01:35:17Z\"),\n", + ")\n", + "brancol_trip2_startend = (\n", + " parser.parse(\"2024-02-03 17:16:47Z\"),\n", + " parser.parse(\"2024-02-26 23:32:03Z\") + timedelta(hours=7.0966),\n", + ")\n", + "brancol_trip3_startend = (\n", + " parser.parse(\"2024-03-05 15:26:13Z\"),\n", + " parser.parse(\"2024-04-01 01:21:47Z\") + timedelta(hours=8.4725),\n", + ")\n", "branc_box_filedatetimes_df.loc[brancol_trip1_startend[0]] = 0\n", "branc_box_filedatetimes_df.loc[brancol_trip3_startend[1]] = 0\n", "branc_box_filedatetimes_df = branc_box_filedatetimes_df.resample(timedelta(minutes=5)).last()\n", - "branc_box_filedatetimes_df['trip1'] = (branc_box_filedatetimes_df.index > brancol_trip1_startend[0] ) & (branc_box_filedatetimes_df.index < brancol_trip1_startend[1]) \n", - "branc_box_filedatetimes_df['trip2'] = (branc_box_filedatetimes_df.index > brancol_trip2_startend[0] ) & (branc_box_filedatetimes_df.index < brancol_trip2_startend[1]) \n", - "branc_box_filedatetimes_df['trip3'] = (branc_box_filedatetimes_df.index > brancol_trip3_startend[0] ) & (branc_box_filedatetimes_df.index < brancol_trip3_startend[1])\n", - "branc_box_filedatetimes_df['cam1'] = branc_box_filedatetimes_df['cam1'].fillna(0)\n", - "branc_box_filedatetimes_df['cam2'] = branc_box_filedatetimes_df['cam2'].fillna(0)\n", - "branc_box_filedatetimes_df['bothcams'] = branc_box_filedatetimes_df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "sr = sunrisesunset[['sunrise']]\n", - "sr['a'] = 1\n", - "sr = sr.set_index('sunrise')\n", + "branc_box_filedatetimes_df[\"trip1\"] = (\n", + " branc_box_filedatetimes_df.index > brancol_trip1_startend[0]\n", + ") & (branc_box_filedatetimes_df.index < brancol_trip1_startend[1])\n", + "branc_box_filedatetimes_df[\"trip2\"] = (\n", + " branc_box_filedatetimes_df.index > brancol_trip2_startend[0]\n", + ") & (branc_box_filedatetimes_df.index < brancol_trip2_startend[1])\n", + "branc_box_filedatetimes_df[\"trip3\"] = (\n", + " branc_box_filedatetimes_df.index > brancol_trip3_startend[0]\n", + ") & (branc_box_filedatetimes_df.index < brancol_trip3_startend[1])\n", + "branc_box_filedatetimes_df[\"cam1\"] = branc_box_filedatetimes_df[\"cam1\"].fillna(0)\n", + "branc_box_filedatetimes_df[\"cam2\"] = branc_box_filedatetimes_df[\"cam2\"].fillna(0)\n", + "branc_box_filedatetimes_df[\"bothcams\"] = branc_box_filedatetimes_df.apply(\n", + " lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1\n", + ")\n", + "sr = sunrisesunset[[\"sunrise\"]]\n", + "sr[\"a\"] = 1\n", + "sr = sr.set_index(\"sunrise\")\n", "sr = sr.resample(timedelta(minutes=5)).last()\n", - "ss = sunrisesunset[['sunset']]\n", - "ss['b'] = 0\n", - "ss = ss.set_index('sunset')\n", + "ss = sunrisesunset[[\"sunset\"]]\n", + "ss[\"b\"] = 0\n", + "ss = ss.set_index(\"sunset\")\n", "ss = ss.resample(timedelta(minutes=5)).last()\n", - "srss = sr.join(ss, how='outer')\n", + "srss = sr.join(ss, how=\"outer\")\n", "# sj['a'] = sj['a'].fillna(value=None)\n", "# sj['b'] = sj['b'].fillna(value=None)\n", "# display(sj)\n", "\n", - "srss['sunup'] = srss.apply(lambda x: x['a'] if pandas.notna(x['a']) else x['b'] if pandas.notna(x['b']) else None, axis=1)\n", - "srss['sunup'] = srss['sunup'].ffill()\n", + "srss[\"sunup\"] = srss.apply(\n", + " lambda x: x[\"a\"] if pandas.notna(x[\"a\"]) else x[\"b\"] if pandas.notna(x[\"b\"]) else None, axis=1\n", + ")\n", + "srss[\"sunup\"] = srss[\"sunup\"].ffill()\n", "\n", - "branc_box_filedatetimes_df = branc_box_filedatetimes_df.join(srss['sunup'])\n", + "branc_box_filedatetimes_df = branc_box_filedatetimes_df.join(srss[\"sunup\"])\n", "# display_full(branc_box_filedatetimes_df)\n", "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", "\n", - "stpatrick_trip1_startend = (parser.parse('2024-01-03 13:22:25Z'), parser.parse('2024-01-28 06:40:04Z'),)\n", - "stpatrick_trip2_startend = (parser.parse('2024-02-03 13:44:11Z'), parser.parse('2024-02-28 03:42:55Z') + timedelta(hours=1.578),)\n", + "stpatrick_trip1_startend = (\n", + " parser.parse(\"2024-01-03 13:22:25Z\"),\n", + " parser.parse(\"2024-01-28 06:40:04Z\"),\n", + ")\n", + "stpatrick_trip2_startend = (\n", + " parser.parse(\"2024-02-03 13:44:11Z\"),\n", + " parser.parse(\"2024-02-28 03:42:55Z\") + timedelta(hours=1.578),\n", + ")\n", "stp_box_filedatetimes_df.loc[stpatrick_trip1_startend[0]] = 0\n", "stp_box_filedatetimes_df.loc[stpatrick_trip2_startend[1]] = 0\n", "stp_box_filedatetimes_df = stp_box_filedatetimes_df.resample(timedelta(minutes=5)).last()\n", "\n", - "stp_box_filedatetimes_df['trip1'] = (stp_box_filedatetimes_df.index > stpatrick_trip1_startend[0] ) & (stp_box_filedatetimes_df.index < stpatrick_trip1_startend[1]) \n", - "stp_box_filedatetimes_df['trip2'] = (stp_box_filedatetimes_df.index > stpatrick_trip2_startend[0] ) & (stp_box_filedatetimes_df.index < stpatrick_trip2_startend[1]) \n", - "stp_box_filedatetimes_df['trip3'] = False\n", - "stp_box_filedatetimes_df['cam1'] = stp_box_filedatetimes_df['cam1'].fillna(0)\n", - "stp_box_filedatetimes_df['cam2'] = stp_box_filedatetimes_df['cam2'].fillna(0)\n", - "stp_box_filedatetimes_df['bothcams'] = stp_box_filedatetimes_df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "\n", - "stp_box_filedatetimes_df = stp_box_filedatetimes_df.join(srss['sunup'])\n", + "stp_box_filedatetimes_df[\"trip1\"] = (\n", + " stp_box_filedatetimes_df.index > stpatrick_trip1_startend[0]\n", + ") & (stp_box_filedatetimes_df.index < stpatrick_trip1_startend[1])\n", + "stp_box_filedatetimes_df[\"trip2\"] = (\n", + " stp_box_filedatetimes_df.index > stpatrick_trip2_startend[0]\n", + ") & (stp_box_filedatetimes_df.index < stpatrick_trip2_startend[1])\n", + "stp_box_filedatetimes_df[\"trip3\"] = False\n", + "stp_box_filedatetimes_df[\"cam1\"] = stp_box_filedatetimes_df[\"cam1\"].fillna(0)\n", + "stp_box_filedatetimes_df[\"cam2\"] = stp_box_filedatetimes_df[\"cam2\"].fillna(0)\n", + "stp_box_filedatetimes_df[\"bothcams\"] = stp_box_filedatetimes_df.apply(\n", + " lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1\n", + ")\n", + "\n", + "stp_box_filedatetimes_df = stp_box_filedatetimes_df.join(srss[\"sunup\"])\n", "# display_full(stp_box_filedatetimes_df)\n", "\n", + "\n", "class ReduceOutages:\n", " def __init__(self):\n", " self.outages = []\n", " self.outageStart = None\n", " self.last = None\n", + "\n", " def __call__(self, nx):\n", - "# print(dir(nx.index))\n", - "# if nx.Index == parser.parse('2024-02-06 23:25:00+00:00'):\n", - "# print(nx, self.last)\n", - " if not ((nx.trip1 or nx.trip2 or nx.trip3 ) and nx.sunup):\n", + " # print(dir(nx.index))\n", + " # if nx.Index == parser.parse('2024-02-06 23:25:00+00:00'):\n", + " # print(nx, self.last)\n", + " if not ((nx.trip1 or nx.trip2 or nx.trip3) and nx.sunup):\n", " if self.outageStart and self.last is not None:\n", - " self.outages.append((self.outageStart, nx.Index,))\n", + " self.outages.append(\n", + " (\n", + " self.outageStart,\n", + " nx.Index,\n", + " )\n", + " )\n", " self.outageStart = None\n", " self.last = None\n", " return\n", @@ -3334,51 +3514,68 @@ " if self.last.bothcams == 1 and nx.bothcams == 0:\n", " self.outageStart = nx.Index\n", " if self.last.bothcams == 0 and nx.bothcams == 1:\n", - " self.outages.append((self.outageStart, nx.Index,))\n", + " self.outages.append(\n", + " (\n", + " self.outageStart,\n", + " nx.Index,\n", + " )\n", + " )\n", " self.outageStart = None\n", " self.last = nx\n", - " \n", + "\n", + "\n", "branc_reduce_outages = ReduceOutages()\n", - "for i in branc_box_filedatetimes_df.itertuples(index=True, name='a'):\n", + "for i in branc_box_filedatetimes_df.itertuples(index=True, name=\"a\"):\n", " branc_reduce_outages(i)\n", "\n", "stp_reduce_outages = ReduceOutages()\n", - "for i in stp_box_filedatetimes_df.itertuples(index=True, name='a'):\n", + "for i in stp_box_filedatetimes_df.itertuples(index=True, name=\"a\"):\n", " stp_reduce_outages(i)\n", "\n", "# put the outages into a resample.first.ffill so that it can be plotted alongside uptime on the very long time graph\n", - "tmp = pandas.DataFrame([ x for (o_start, o_end) in branc_reduce_outages.outages for x in [{'datetime':o_start, 'outage':1, 'diff': o_end - o_start},{'datetime':o_end,'outage':0, 'diff':pandas.NA}]]).set_index('datetime')\n", + "tmp = pandas.DataFrame(\n", + " [\n", + " x\n", + " for (o_start, o_end) in branc_reduce_outages.outages\n", + " for x in [\n", + " {\"datetime\": o_start, \"outage\": 1, \"diff\": o_end - o_start},\n", + " {\"datetime\": o_end, \"outage\": 0, \"diff\": pandas.NA},\n", + " ]\n", + " ]\n", + ").set_index(\"datetime\")\n", "tmp = tmp.resample(timedelta(minutes=5)).first().ffill()\n", "\n", - "branc_box_filedatetimes_df['outage'] = tmp['outage']\n", - "branc_box_filedatetimes_df['diff'] = tmp['diff']\n", - "branc_box_filedatetimes_df['outage'] = branc_box_filedatetimes_df['outage'].fillna(0)\n", + "branc_box_filedatetimes_df[\"outage\"] = tmp[\"outage\"]\n", + "branc_box_filedatetimes_df[\"diff\"] = tmp[\"diff\"]\n", + "branc_box_filedatetimes_df[\"outage\"] = branc_box_filedatetimes_df[\"outage\"].fillna(0)\n", "# display(branc_box_filedatetimes_df[['bothcams','outage']].plot(figsize=(200,2)))\n", "\n", "all_outtages_df = pandas.DataFrame(branc_reduce_outages.outages + stp_reduce_outages.outages)\n", - "all_outtages_df['diff'] = all_outtages_df[1] - all_outtages_df[0]\n", - "binandlabels = [(timedelta(minutes=0), '0mins'),\n", - " (timedelta(minutes=5), '5mins'),\n", - " (timedelta(minutes=10),'10mins'),\n", - " (timedelta(minutes=15),'15mins'),\n", - " (timedelta(minutes=20), '20mins'),\n", - " (timedelta(minutes=40), '40mins'),\n", - " (timedelta(minutes=60), '60mins'),\n", - " (timedelta(minutes=80), '80mins'),\n", - " (timedelta(minutes=100), '100mins'),\n", - " (timedelta(minutes=120), '120mins'),\n", - " (timedelta(hours=3), '3hours'),\n", - " (timedelta(hours=4), '4hours'),\n", - " (timedelta(hours=5), '5hours'),\n", - " (timedelta(hours=6), '6hours'),\n", - " (timedelta(hours=8), '8hours'),\n", - " (timedelta(hours=10), '10hours'),\n", - " (timedelta(hours=12), '12hours'),]\n", + "all_outtages_df[\"diff\"] = all_outtages_df[1] - all_outtages_df[0]\n", + "binandlabels = [\n", + " (timedelta(minutes=0), \"0mins\"),\n", + " (timedelta(minutes=5), \"5mins\"),\n", + " (timedelta(minutes=10), \"10mins\"),\n", + " (timedelta(minutes=15), \"15mins\"),\n", + " (timedelta(minutes=20), \"20mins\"),\n", + " (timedelta(minutes=40), \"40mins\"),\n", + " (timedelta(minutes=60), \"60mins\"),\n", + " (timedelta(minutes=80), \"80mins\"),\n", + " (timedelta(minutes=100), \"100mins\"),\n", + " (timedelta(minutes=120), \"120mins\"),\n", + " (timedelta(hours=3), \"3hours\"),\n", + " (timedelta(hours=4), \"4hours\"),\n", + " (timedelta(hours=5), \"5hours\"),\n", + " (timedelta(hours=6), \"6hours\"),\n", + " (timedelta(hours=8), \"8hours\"),\n", + " (timedelta(hours=10), \"10hours\"),\n", + " (timedelta(hours=12), \"12hours\"),\n", + "]\n", "bins = [i[0] for i in binandlabels]\n", "labels = [i[1] for i in binandlabels][1:]\n", - "all_outtages_df['cut'] = pandas.cut(all_outtages_df['diff'], bins, labels=labels)\n", + "all_outtages_df[\"cut\"] = pandas.cut(all_outtages_df[\"diff\"], bins, labels=labels)\n", "\n", - "all_outtages_df.groupby('cut').count()['diff'].plot(kind='bar', figsize=(7,3), xlabel='' )\n" + "all_outtages_df.groupby(\"cut\").count()[\"diff\"].plot(kind=\"bar\", figsize=(7, 3), xlabel=\"\")\n" ] }, { @@ -3409,22 +3606,33 @@ } ], "source": [ - "# try to prove that captain behavior (long outages) accounted for most of the difference \n", + "# try to prove that captain behavior (long outages) accounted for most of the difference\n", "# between videoon/systemon and videoon/daylighton\n", "\n", - "sunupcount = ((branc_box_filedatetimes_df['trip1'] | branc_box_filedatetimes_df['trip2'] | branc_box_filedatetimes_df['trip3'] ) & branc_box_filedatetimes_df['sunup'] == 1).sum()\n", - "print('outagecount', (branc_box_filedatetimes_df['outage'] == 1).sum())\n", - "print('sunupcount', sunupcount)\n", + "sunupcount = (\n", + " (\n", + " branc_box_filedatetimes_df[\"trip1\"]\n", + " | branc_box_filedatetimes_df[\"trip2\"]\n", + " | branc_box_filedatetimes_df[\"trip3\"]\n", + " )\n", + " & branc_box_filedatetimes_df[\"sunup\"]\n", + " == 1\n", + ").sum()\n", + "print(\"outagecount\", (branc_box_filedatetimes_df[\"outage\"] == 1).sum())\n", + "print(\"sunupcount\", sunupcount)\n", "\n", - "print((sunupcount-1079)/sunupcount)\n", + "print((sunupcount - 1079) / sunupcount)\n", "\n", "\n", "# branc_box_filedatetimes_df.loc[branc_box_filedatetimes_df['diff'].map(pandas.notna)]\n", - "outagecount_minuslongoutages = ((branc_box_filedatetimes_df['outage'] == 1 ) & ((branc_box_filedatetimes_df['diff'] < timedelta(hours=2) ))).sum()\n", + "outagecount_minuslongoutages = (\n", + " (branc_box_filedatetimes_df[\"outage\"] == 1)\n", + " & (branc_box_filedatetimes_df[\"diff\"] < timedelta(hours=2))\n", + ").sum()\n", "\n", - "print('outagecount_minuslongoutages', outagecount_minuslongoutages)\n", + "print(\"outagecount_minuslongoutages\", outagecount_minuslongoutages)\n", "# print('sunupcount', ((branc_box_filedatetimes_df['trip1'] | branc_box_filedatetimes_df['trip2'] | branc_box_filedatetimes_df['trip3'] ) & branc_box_filedatetimes_df['sunup'] == 1).sum())\n", - "(sunupcount-outagecount_minuslongoutages)/sunupcount\n", + "(sunupcount - outagecount_minuslongoutages) / sunupcount\n", "\n", "# not compelling. 89% -> 95% is not enough for me\n", "# I expected it to be closer to the 98% (from brancol video uptime with thalos system logs as baseline)" @@ -3437,36 +3645,42 @@ "metadata": {}, "outputs": [], "source": [ - "brancol_file_uptime = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "brancol_file_uptime = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "brancol_file_uptime = brancol_file_uptime.resample(timedelta(minutes=5)).sum()\n", - "branc_list_of_state_changes = brancol_file_uptime.join(brancol_file_uptime.shift(1),lsuffix='_next', rsuffix='_prev')\n", + "branc_list_of_state_changes = brancol_file_uptime.join(\n", + " brancol_file_uptime.shift(1), lsuffix=\"_next\", rsuffix=\"_prev\"\n", + ")\n", "# print(dir(branc_list_of_state_changes.index))\n", "branc_list_of_state_changes = branc_list_of_state_changes.loc[branc_list_of_state_changes.index[1:]]\n", + "\n", + "\n", "# branc_list_of_state_changes\n", "def newstate(x):\n", " retA = None\n", - " if x['cam1_prev'] < x['cam1_next']:\n", - " retA='up'\n", - " elif x['cam1_prev'] > x['cam1_next']:\n", - " retA='down'\n", - " \n", + " if x[\"cam1_prev\"] < x[\"cam1_next\"]:\n", + " retA = \"up\"\n", + " elif x[\"cam1_prev\"] > x[\"cam1_next\"]:\n", + " retA = \"down\"\n", + "\n", " retB = None\n", - " if x['cam2_prev'] < x['cam2_next']:\n", - " retB='up'\n", - " elif x['cam2_prev'] > x['cam2_next']:\n", - " retB='down'\n", - " \n", + " if x[\"cam2_prev\"] < x[\"cam2_next\"]:\n", + " retB = \"up\"\n", + " elif x[\"cam2_prev\"] > x[\"cam2_next\"]:\n", + " retB = \"down\"\n", + "\n", " if retA and retB and retA != retB:\n", - " return 'mixed'\n", + " return \"mixed\"\n", " elif retA:\n", " return retA\n", " elif retB:\n", " return retB\n", " else:\n", " return pandas.NA\n", - "branc_list_of_state_changes['newstate'] = branc_list_of_state_changes.apply(newstate, axis=1)\n", + "\n", + "\n", + "branc_list_of_state_changes[\"newstate\"] = branc_list_of_state_changes.apply(newstate, axis=1)\n", "# branc_list_of_state_changes\n", - "display(branc_list_of_state_changes.loc[pandas.notna(branc_list_of_state_changes['newstate'])])\n", + "display(branc_list_of_state_changes.loc[pandas.notna(branc_list_of_state_changes[\"newstate\"])])\n", "\n", "# brancol_file_uptime.loc[ (brancol_file_uptime['cam1'] == 0) & (brancol_file_uptime['cam2'] == 0) ]\n" ] diff --git a/notebooks/tnc-edge-vectorprocessing.ipynb b/notebooks/tnc-edge-vectorprocessing.ipynb index 8e970b9..698ddc8 100644 --- a/notebooks/tnc-edge-vectorprocessing.ipynb +++ b/notebooks/tnc-edge-vectorprocessing.ipynb @@ -38,38 +38,39 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')\n", "\n", - "import pandas\n", - "import numpy\n", "import math\n", + "from datetime import timedelta\n", + "\n", "import awswrangler\n", - "from datetime import datetime, timezone, timedelta\n", + "import numpy\n", + "import pandas\n", "from dateutil import parser\n", - "import pytz\n", + "\n", "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 5000)\n", - " pandas.set_option('display.min_rows', 1000)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 5000)\n", + " pandas.set_option(\"display.min_rows\", 1000)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -79,11 +80,14 @@ "metadata": {}, "outputs": [], "source": [ - "branc_equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where datetime > '2024-01-01' and vector_id='4'\", database='tnc_edge')\n", + "branc_equip_agg_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_tests where datetime > '2024-01-01' and vector_id='4'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "# equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '4' limit 10\", database='tnc_edge')\n", - "branc_equip_agg_df['datetime'] = pandas.to_datetime(branc_equip_agg_df['datetime'], utc=True)\n", + "branc_equip_agg_df[\"datetime\"] = pandas.to_datetime(branc_equip_agg_df[\"datetime\"], utc=True)\n", "\n", - "branc_equip_agg_df = branc_equip_agg_df.sort_values('datetime')\n" + "branc_equip_agg_df = branc_equip_agg_df.sort_values(\"datetime\")\n" ] }, { @@ -93,11 +97,14 @@ "metadata": {}, "outputs": [], "source": [ - "stpat_equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where datetime > '2024-01-01' and vector_id='4'\", database='tnc_edge')\n", + "stpat_equip_agg_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_tests where datetime > '2024-01-01' and vector_id='4'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "# equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '4' limit 10\", database='tnc_edge')\n", - "stpat_equip_agg_df['datetime'] = pandas.to_datetime(stpat_equip_agg_df['datetime'], utc=True)\n", + "stpat_equip_agg_df[\"datetime\"] = pandas.to_datetime(stpat_equip_agg_df[\"datetime\"], utc=True)\n", "\n", - "stpat_equip_agg_df = stpat_equip_agg_df.sort_values('datetime')\n" + "stpat_equip_agg_df = stpat_equip_agg_df.sort_values(\"datetime\")\n" ] }, { @@ -1603,17 +1610,25 @@ } ], "source": [ - "branc_equip_agg_df['diff'] = branc_equip_agg_df['datetime'] - branc_equip_agg_df.shift(1)['datetime']\n", + "branc_equip_agg_df[\"diff\"] = (\n", + " branc_equip_agg_df[\"datetime\"] - branc_equip_agg_df.shift(1)[\"datetime\"]\n", + ")\n", "# branc_equip_agg_df\n", "\n", - "min_lateness = 3.9995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+branc_equip_agg_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=150)\n", + "min_lateness = 3.9995 * 60\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + branc_equip_agg_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=150,\n", + ")\n", "# bins = numpy.logspace(math.log(min_lateness,10), math.log(4.0005*60,10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "branc_equip_agg_df['diffbucket'] = pandas.cut(branc_equip_agg_df.loc[branc_equip_agg_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "branc_equip_agg_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "branc_equip_agg_df[\"diffbucket\"] = pandas.cut(\n", + " branc_equip_agg_df.loc[branc_equip_agg_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "branc_equip_agg_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -1644,18 +1659,26 @@ } ], "source": [ - "stpat_equip_agg_df['diff'] = stpat_equip_agg_df['datetime'] - stpat_equip_agg_df.shift(1)['datetime']\n", + "stpat_equip_agg_df[\"diff\"] = (\n", + " stpat_equip_agg_df[\"datetime\"] - stpat_equip_agg_df.shift(1)[\"datetime\"]\n", + ")\n", "# branc_equip_agg_df\n", "\n", "# min_lateness=3.9*60\n", - "min_lateness = 3.9995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_equip_agg_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=150)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(4.0005*60,10), num=50)\n", + "min_lateness = 3.9995 * 60\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_equip_agg_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=150,\n", + ")\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(4.0005 * 60), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_equip_agg_df['diffbucket'] = pandas.cut(stpat_equip_agg_df.loc[stpat_equip_agg_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_equip_agg_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_equip_agg_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_equip_agg_df.loc[stpat_equip_agg_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "stpat_equip_agg_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -1666,13 +1689,17 @@ "outputs": [], "source": [ "# stpat_equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where datetime > '2024-01-01' and vector_id='4'\", database='tnc_edge')\n", - "branc_thalosmount_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '3'\", database='tnc_edge')\n", - "branc_thalosmount_df['datetime'] = pandas.to_datetime(branc_thalosmount_df['datetime'], utc=True)\n", - "branc_thalosmount_df = branc_thalosmount_df.sort_values('datetime')\n", + "branc_thalosmount_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_tests where vector_id = '3'\", database=\"tnc_edge\"\n", + ")\n", + "branc_thalosmount_df[\"datetime\"] = pandas.to_datetime(branc_thalosmount_df[\"datetime\"], utc=True)\n", + "branc_thalosmount_df = branc_thalosmount_df.sort_values(\"datetime\")\n", "\n", - "stpat_thalosmount_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '3'\", database='tnc_edge')\n", - "stpat_thalosmount_df['datetime'] = pandas.to_datetime(stpat_thalosmount_df['datetime'], utc=True)\n", - "stpat_thalosmount_df = stpat_thalosmount_df.sort_values('datetime')" + "stpat_thalosmount_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_tests where vector_id = '3'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_thalosmount_df[\"datetime\"] = pandas.to_datetime(stpat_thalosmount_df[\"datetime\"], utc=True)\n", + "stpat_thalosmount_df = stpat_thalosmount_df.sort_values(\"datetime\")" ] }, { @@ -1693,19 +1720,36 @@ } ], "source": [ - "branc_thalosmount_df['diff'] = branc_thalosmount_df['datetime'] - branc_thalosmount_df.shift(1)['datetime']\n", - "stpat_thalosmount_df['diff'] = stpat_thalosmount_df['datetime'] - stpat_thalosmount_df.shift(1)['datetime']\n", + "branc_thalosmount_df[\"diff\"] = (\n", + " branc_thalosmount_df[\"datetime\"] - branc_thalosmount_df.shift(1)[\"datetime\"]\n", + ")\n", + "stpat_thalosmount_df[\"diff\"] = (\n", + " stpat_thalosmount_df[\"datetime\"] - stpat_thalosmount_df.shift(1)[\"datetime\"]\n", + ")\n", "\n", "min_lateness = 9.995\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_thalosmount_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(10.005,10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_thalosmount_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(10.005), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_thalosmount_df['diffbucket'] = pandas.cut(stpat_thalosmount_df.loc[stpat_thalosmount_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_thalosmount_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_thalosmount_df['diffbucket'] = pandas.cut(branc_thalosmount_df.loc[branc_thalosmount_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "# branc_thalosmount_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_thalosmount_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_thalosmount_df.loc[stpat_thalosmount_df[\"diff\"] > timedelta(minutes=min_lateness)][\n", + " \"diff\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "stpat_thalosmount_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n", + "branc_thalosmount_df[\"diffbucket\"] = pandas.cut(\n", + " branc_thalosmount_df.loc[branc_thalosmount_df[\"diff\"] > timedelta(minutes=min_lateness)][\n", + " \"diff\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "# branc_thalosmount_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n" ] }, { @@ -3659,30 +3703,55 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=10))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=10))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_thalosmount_ts_df = stpat_thalosmount_df.set_index('datetime')\n", + "stpat_thalosmount_ts_df = stpat_thalosmount_df.set_index(\"datetime\")\n", "# stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.resample(timedelta(minutes=10)).last()\n", "display()\n", - "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.loc[stpat_thalosmount_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.loc[\n", + " stpat_thalosmount_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.join(\n", + " thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\"\n", + ")\n", "\n", "# stpat_thalosmount_ts_df.loc[ ( stpat_thalosmount_ts_df['id'].notna() ) & ( stpat_thalosmount_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_thalosmount_ts_df['id'].isna() ) & ( stpat_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_thalosmount_ts_df['id'].notna() ) & ( stpat_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_thalosmount_ts_df['id'].isna() ) & ( stpat_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_thalosmount_ts_df['id'].notna() ) & ( stpat_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_thalosmount_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].isna()) & (stpat_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].notna()) & (stpat_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].isna()) & (stpat_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].notna()) & (stpat_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_thalosmount_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_thalosmount_ts_df['prevup'] = stpat_thalosmount_ts_df['up'].shift(1)\n", - "display(stpat_thalosmount_ts_df.loc[(( stpat_thalosmount_ts_df['id'].isna() ) & (( stpat_thalosmount_ts_df['up'] == 1 )) | ( stpat_thalosmount_ts_df['up'] != stpat_thalosmount_ts_df['prevup'] ) ) ])" + "stpat_thalosmount_ts_df[\"prevup\"] = stpat_thalosmount_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_thalosmount_ts_df.loc[\n", + " (\n", + " (stpat_thalosmount_ts_df[\"id\"].isna()) & (stpat_thalosmount_ts_df[\"up\"] == 1)\n", + " | (stpat_thalosmount_ts_df[\"up\"] != stpat_thalosmount_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -5700,30 +5769,51 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=10))[['up']].min()\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=10))[[\"up\"]].min()\n", "thaloslogs_brancol_isup\n", "\n", - "branc_thalosmount_ts_df = branc_thalosmount_df.set_index('datetime')\n", + "branc_thalosmount_ts_df = branc_thalosmount_df.set_index(\"datetime\")\n", "# branc_thalosmount_ts_df = branc_thalosmount_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_thalosmount_ts_df = branc_thalosmount_ts_df.resample(timedelta(minutes=10)).last()\n", "display()\n", - "branc_thalosmount_ts_df = branc_thalosmount_ts_df.loc[branc_thalosmount_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_thalosmount_ts_df = branc_thalosmount_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_thalosmount_ts_df = branc_thalosmount_ts_df.loc[\n", + " branc_thalosmount_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_thalosmount_ts_df = branc_thalosmount_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_thalosmount_ts_df.loc[ ( branc_thalosmount_ts_df['id'].notna() ) & ( branc_thalosmount_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_thalosmount_ts_df['id'].isna() ) & ( branc_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_thalosmount_ts_df['id'].notna() ) & ( branc_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_thalosmount_ts_df['id'].isna() ) & ( branc_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_thalosmount_ts_df['id'].notna() ) & ( branc_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_thalosmount_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((branc_thalosmount_ts_df[\"id\"].isna()) & (branc_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_thalosmount_ts_df[\"id\"].notna()) & (branc_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((branc_thalosmount_ts_df[\"id\"].isna()) & (branc_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_thalosmount_ts_df[\"id\"].notna()) & (branc_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_thalosmount_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_thalosmount_ts_df['prevup'] = branc_thalosmount_ts_df['up'].shift(1)\n", - "display(branc_thalosmount_ts_df.loc[(( branc_thalosmount_ts_df['id'].isna() ) & (( branc_thalosmount_ts_df['up'] == 1 )) | ( branc_thalosmount_ts_df['up'] != branc_thalosmount_ts_df['prevup'] ) ) ])" + "branc_thalosmount_ts_df[\"prevup\"] = branc_thalosmount_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_thalosmount_ts_df.loc[\n", + " (\n", + " (branc_thalosmount_ts_df[\"id\"].isna()) & (branc_thalosmount_ts_df[\"up\"] == 1)\n", + " | (branc_thalosmount_ts_df[\"up\"] != branc_thalosmount_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -5753,7 +5843,6 @@ "# stpat 627 during 5th-12th downtime\n", "\n", "\n", - "\n", "# off, no vector 7230\n", "# off, vector ran somehow? 167\n", "# on, no vector? 1234\n", @@ -5772,13 +5861,17 @@ "metadata": {}, "outputs": [], "source": [ - "branc_gpstest_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '2'\", database='tnc_edge')\n", - "branc_gpstest_df['datetime'] = pandas.to_datetime(branc_gpstest_df['datetime'], utc=True)\n", - "branc_gpstest_df = branc_gpstest_df.sort_values('datetime')\n", + "branc_gpstest_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_tests where vector_id = '2'\", database=\"tnc_edge\"\n", + ")\n", + "branc_gpstest_df[\"datetime\"] = pandas.to_datetime(branc_gpstest_df[\"datetime\"], utc=True)\n", + "branc_gpstest_df = branc_gpstest_df.sort_values(\"datetime\")\n", "\n", - "stpat_gpstest_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '2'\", database='tnc_edge')\n", - "stpat_gpstest_df['datetime'] = pandas.to_datetime(stpat_gpstest_df['datetime'], utc=True)\n", - "stpat_gpstest_df = stpat_gpstest_df.sort_values('datetime')" + "stpat_gpstest_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_tests where vector_id = '2'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_gpstest_df[\"datetime\"] = pandas.to_datetime(stpat_gpstest_df[\"datetime\"], utc=True)\n", + "stpat_gpstest_df = stpat_gpstest_df.sort_values(\"datetime\")" ] }, { @@ -5799,19 +5892,28 @@ } ], "source": [ - "branc_gpstest_df['diff'] = branc_gpstest_df['datetime'] - branc_gpstest_df.shift(1)['datetime']\n", - "stpat_gpstest_df['diff'] = stpat_gpstest_df['datetime'] - stpat_gpstest_df.shift(1)['datetime']\n", + "branc_gpstest_df[\"diff\"] = branc_gpstest_df[\"datetime\"] - branc_gpstest_df.shift(1)[\"datetime\"]\n", + "stpat_gpstest_df[\"diff\"] = stpat_gpstest_df[\"datetime\"] - stpat_gpstest_df.shift(1)[\"datetime\"]\n", "\n", "min_lateness = 29.995\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_gpstest_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(30.005,10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_gpstest_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(30.005), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_gpstest_df['diffbucket'] = pandas.cut(stpat_gpstest_df.loc[stpat_gpstest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_gpstest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_gpstest_df['diffbucket'] = pandas.cut(branc_gpstest_df.loc[branc_gpstest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "# branc_gpstest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_gpstest_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_gpstest_df.loc[stpat_gpstest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "stpat_gpstest_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n", + "branc_gpstest_df[\"diffbucket\"] = pandas.cut(\n", + " branc_gpstest_df.loc[branc_gpstest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "# branc_gpstest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n" ] }, { @@ -7765,30 +7867,49 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[['up']].max()\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[[\"up\"]].max()\n", "thaloslogs_brancol_isup\n", "\n", - "branc_gpstest_ts_df = branc_gpstest_df.set_index('datetime')\n", + "branc_gpstest_ts_df = branc_gpstest_df.set_index(\"datetime\")\n", "# branc_gpstest_ts_df = branc_gpstest_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_gpstest_ts_df = branc_gpstest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "branc_gpstest_ts_df = branc_gpstest_ts_df.loc[branc_gpstest_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_gpstest_ts_df = branc_gpstest_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_gpstest_ts_df = branc_gpstest_ts_df.loc[\n", + " branc_gpstest_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_gpstest_ts_df = branc_gpstest_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_gpstest_ts_df.loc[ ( branc_gpstest_ts_df['id'].notna() ) & ( branc_gpstest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_gpstest_ts_df['id'].isna() ) & ( branc_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_gpstest_ts_df['id'].notna() ) & ( branc_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_gpstest_ts_df['id'].isna() ) & ( branc_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_gpstest_ts_df['id'].notna() ) & ( branc_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_gpstest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((branc_gpstest_ts_df[\"id\"].isna()) & (branc_gpstest_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_gpstest_ts_df[\"id\"].notna()) & (branc_gpstest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((branc_gpstest_ts_df[\"id\"].isna()) & (branc_gpstest_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_gpstest_ts_df[\"id\"].notna()) & (branc_gpstest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_gpstest_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_gpstest_ts_df['prevup'] = branc_gpstest_ts_df['up'].shift(1)\n", - "display(branc_gpstest_ts_df.loc[(( branc_gpstest_ts_df['id'].isna() ) & (( branc_gpstest_ts_df['up'] == 1 )) | ( branc_gpstest_ts_df['up'] != branc_gpstest_ts_df['prevup'] ) ) ])" + "branc_gpstest_ts_df[\"prevup\"] = branc_gpstest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_gpstest_ts_df.loc[\n", + " (\n", + " (branc_gpstest_ts_df[\"id\"].isna()) & (branc_gpstest_ts_df[\"up\"] == 1)\n", + " | (branc_gpstest_ts_df[\"up\"] != branc_gpstest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -9742,30 +9863,51 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_gpstest_ts_df = stpat_gpstest_df.set_index('datetime')\n", + "stpat_gpstest_ts_df = stpat_gpstest_df.set_index(\"datetime\")\n", "# stpat_gpstest_ts_df = stpat_gpstest_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_gpstest_ts_df = stpat_gpstest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "stpat_gpstest_ts_df = stpat_gpstest_ts_df.loc[stpat_gpstest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_gpstest_ts_df = stpat_gpstest_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_gpstest_ts_df = stpat_gpstest_ts_df.loc[\n", + " stpat_gpstest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_gpstest_ts_df = stpat_gpstest_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_gpstest_ts_df.loc[ ( stpat_gpstest_ts_df['id'].notna() ) & ( stpat_gpstest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_gpstest_ts_df['id'].isna() ) & ( stpat_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_gpstest_ts_df['id'].notna() ) & ( stpat_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_gpstest_ts_df['id'].isna() ) & ( stpat_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_gpstest_ts_df['id'].notna() ) & ( stpat_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_gpstest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((stpat_gpstest_ts_df[\"id\"].isna()) & (stpat_gpstest_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_gpstest_ts_df[\"id\"].notna()) & (stpat_gpstest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((stpat_gpstest_ts_df[\"id\"].isna()) & (stpat_gpstest_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_gpstest_ts_df[\"id\"].notna()) & (stpat_gpstest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_gpstest_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_gpstest_ts_df['prevup'] = stpat_gpstest_ts_df['up'].shift(1)\n", - "display(stpat_gpstest_ts_df.loc[(( stpat_gpstest_ts_df['id'].isna() ) & (( stpat_gpstest_ts_df['up'] == 1 )) | ( stpat_gpstest_ts_df['up'] != stpat_gpstest_ts_df['prevup'] ) ) ])" + "stpat_gpstest_ts_df[\"prevup\"] = stpat_gpstest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_gpstest_ts_df.loc[\n", + " (\n", + " (stpat_gpstest_ts_df[\"id\"].isna()) & (stpat_gpstest_ts_df[\"up\"] == 1)\n", + " | (stpat_gpstest_ts_df[\"up\"] != stpat_gpstest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -9804,7 +9946,7 @@ "# 207 during 5th-12th downtime\n", "\n", "\n", - "(1776 + 1985 ) / ( 2148 - 336 + 2202 - 207)" + "(1776 + 1985) / (2148 - 336 + 2202 - 207)" ] }, { @@ -9814,14 +9956,18 @@ "metadata": {}, "outputs": [], "source": [ - "branc_inettest_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '1'\", database='tnc_edge')\n", + "branc_inettest_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_tests where vector_id = '1'\", database=\"tnc_edge\"\n", + ")\n", "# branc_inettest_df\n", - "branc_inettest_df['datetime'] = pandas.to_datetime(branc_inettest_df['datetime'], utc=True)\n", - "branc_inettest_df = branc_inettest_df.sort_values('datetime')\n", + "branc_inettest_df[\"datetime\"] = pandas.to_datetime(branc_inettest_df[\"datetime\"], utc=True)\n", + "branc_inettest_df = branc_inettest_df.sort_values(\"datetime\")\n", "\n", - "stpat_inettest_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '1'\", database='tnc_edge')\n", - "stpat_inettest_df['datetime'] = pandas.to_datetime(stpat_inettest_df['datetime'], utc=True)\n", - "stpat_inettest_df = stpat_inettest_df.sort_values('datetime')" + "stpat_inettest_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_tests where vector_id = '1'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_inettest_df[\"datetime\"] = pandas.to_datetime(stpat_inettest_df[\"datetime\"], utc=True)\n", + "stpat_inettest_df = stpat_inettest_df.sort_values(\"datetime\")" ] }, { @@ -9842,19 +9988,28 @@ } ], "source": [ - "branc_inettest_df['diff'] = branc_inettest_df['datetime'] - branc_inettest_df.shift(1)['datetime']\n", - "stpat_inettest_df['diff'] = stpat_inettest_df['datetime'] - stpat_inettest_df.shift(1)['datetime']\n", + "branc_inettest_df[\"diff\"] = branc_inettest_df[\"datetime\"] - branc_inettest_df.shift(1)[\"datetime\"]\n", + "stpat_inettest_df[\"diff\"] = stpat_inettest_df[\"datetime\"] - stpat_inettest_df.shift(1)[\"datetime\"]\n", "\n", "min_lateness = 29.995\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_inettest_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(30.005,10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_inettest_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(30.005), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_inettest_df['diffbucket'] = pandas.cut(stpat_inettest_df.loc[stpat_inettest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_inettest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_inettest_df['diffbucket'] = pandas.cut(branc_inettest_df.loc[branc_inettest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "# branc_inettest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_inettest_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_inettest_df.loc[stpat_inettest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "stpat_inettest_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n", + "branc_inettest_df[\"diffbucket\"] = pandas.cut(\n", + " branc_inettest_df.loc[branc_inettest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "# branc_inettest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n" ] }, { @@ -11616,30 +11771,53 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_inettest_ts_df = stpat_inettest_df.set_index('datetime')\n", + "stpat_inettest_ts_df = stpat_inettest_df.set_index(\"datetime\")\n", "# stpat_inettest_ts_df = stpat_inettest_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_inettest_ts_df = stpat_inettest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "stpat_inettest_ts_df = stpat_inettest_ts_df.loc[stpat_inettest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_inettest_ts_df = stpat_inettest_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_inettest_ts_df = stpat_inettest_ts_df.loc[\n", + " stpat_inettest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_inettest_ts_df = stpat_inettest_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_inettest_ts_df.loc[ ( stpat_inettest_ts_df['id'].notna() ) & ( stpat_inettest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_inettest_ts_df['id'].isna() ) & ( stpat_inettest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_inettest_ts_df['id'].notna() ) & ( stpat_inettest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_inettest_ts_df['id'].isna() ) & ( stpat_inettest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_inettest_ts_df['id'].notna() ) & ( stpat_inettest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_inettest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((stpat_inettest_ts_df[\"id\"].isna()) & (stpat_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_inettest_ts_df[\"id\"].notna()) & (stpat_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((stpat_inettest_ts_df[\"id\"].isna()) & (stpat_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_inettest_ts_df[\"id\"].notna()) & (stpat_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_inettest_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_inettest_ts_df['prevup'] = stpat_inettest_ts_df['up'].shift(1)\n", - "display(stpat_inettest_ts_df.loc[(( stpat_inettest_ts_df['id'].isna() ) & (( stpat_inettest_ts_df['up'] == 1 )) | ( stpat_inettest_ts_df['up'] != stpat_inettest_ts_df['prevup'] ) ) ])\n", + "stpat_inettest_ts_df[\"prevup\"] = stpat_inettest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_inettest_ts_df.loc[\n", + " (\n", + " (stpat_inettest_ts_df[\"id\"].isna()) & (stpat_inettest_ts_df[\"up\"] == 1)\n", + " | (stpat_inettest_ts_df[\"up\"] != stpat_inettest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 207" ] @@ -13467,32 +13645,53 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[['up']].min()\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[[\"up\"]].min()\n", "thaloslogs_brancol_isup\n", "\n", - "branc_inettest_ts_df = branc_inettest_df.set_index('datetime')\n", + "branc_inettest_ts_df = branc_inettest_df.set_index(\"datetime\")\n", "# branc_inettest_ts_df = branc_inettest_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_inettest_ts_df = branc_inettest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "branc_inettest_ts_df = branc_inettest_ts_df.loc[branc_inettest_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_inettest_ts_df = branc_inettest_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_inettest_ts_df = branc_inettest_ts_df.loc[\n", + " branc_inettest_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_inettest_ts_df = branc_inettest_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_inettest_ts_df.loc[ ( branc_inettest_ts_df['id'].notna() ) & ( branc_inettest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_inettest_ts_df['id'].isna() ) & ( branc_inettest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_inettest_ts_df['id'].notna() ) & ( branc_inettest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_inettest_ts_df['id'].isna() ) & ( branc_inettest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_inettest_ts_df['id'].notna() ) & ( branc_inettest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_inettest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((branc_inettest_ts_df[\"id\"].isna()) & (branc_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_inettest_ts_df[\"id\"].notna()) & (branc_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((branc_inettest_ts_df[\"id\"].isna()) & (branc_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_inettest_ts_df[\"id\"].notna()) & (branc_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_inettest_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_inettest_ts_df['prevup'] = branc_inettest_ts_df['up'].shift(1)\n", - "display(branc_inettest_ts_df.loc[(( branc_inettest_ts_df['id'].isna() ) & (( branc_inettest_ts_df['up'] == 1 )) | ( branc_inettest_ts_df['up'] != branc_inettest_ts_df['prevup'] ) ) ])\n", + "branc_inettest_ts_df[\"prevup\"] = branc_inettest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_inettest_ts_df.loc[\n", + " (\n", + " (branc_inettest_ts_df[\"id\"].isna()) & (branc_inettest_ts_df[\"up\"] == 1)\n", + " | (branc_inettest_ts_df[\"up\"] != branc_inettest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", - "#336" + "# 336" ] }, { @@ -13535,12 +13734,11 @@ "import io\n", "import re\n", "\n", - "\n", "# brancol_fname = 'misc/data/thalos_uptime_logs/sessions_brancol.csv'\n", - "stpatrick_fname = 'misc/data/thalos_uptime_logs/sessions_saintpatrick.csv'\n", + "stpatrick_fname = \"misc/data/thalos_uptime_logs/sessions_saintpatrick.csv\"\n", "\n", "\n", - "r = re.compile('^\\d+(, .*){8}')\n", + "r = re.compile(r\"^\\d+(, .*){8}\")\n", "stpatrick_lines = []\n", "with open(stpatrick_fname) as stpatrick_f:\n", " for l in stpatrick_f.readlines():\n", @@ -13549,26 +13747,53 @@ "# print(type(brancol_f), dir(brancol_f))\n", "\n", "# display(brancol_lines)\n", - " \n", - "thalos_uptime_logs_stpatrick = pandas.read_csv(io.StringIO(''.join(stpatrick_lines)), names=['id', 'datetime', 'boat', 'certus', 'status', 'i', 'j', 'name', 'ip'])\n", - " \n", + "\n", + "thalos_uptime_logs_stpatrick = pandas.read_csv(\n", + " io.StringIO(\"\".join(stpatrick_lines)),\n", + " names=[\"id\", \"datetime\", \"boat\", \"certus\", \"status\", \"i\", \"j\", \"name\", \"ip\"],\n", + ")\n", + "\n", "# thalos_uptime_logs_stpatrick = pandas.read_csv()\n", "\n", - "thalos_uptime_logs_stpatrick['datetime'] = pandas.to_datetime(thalos_uptime_logs_stpatrick['datetime'], utc=True)\n", - "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick['datetime'] > '2024-01-01']\n", + "thalos_uptime_logs_stpatrick[\"datetime\"] = pandas.to_datetime(\n", + " thalos_uptime_logs_stpatrick[\"datetime\"], utc=True\n", + ")\n", + "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[\n", + " thalos_uptime_logs_stpatrick[\"datetime\"] > \"2024-01-01\"\n", + "]\n", "# thalos_uptime_logs_stpatrick.loc[666]\n", - "thalos_uptime_logs_stpatrick.loc[0] = [0,parser.parse('2024-01-01 00:00:00+00:00'),'saintpatrick','cer1',' down',46668,63663 ,'pop-prd-pthalos000','']\n", + "thalos_uptime_logs_stpatrick.loc[0] = [\n", + " 0,\n", + " parser.parse(\"2024-01-01 00:00:00+00:00\"),\n", + " \"saintpatrick\",\n", + " \"cer1\",\n", + " \" down\",\n", + " 46668,\n", + " 63663,\n", + " \"pop-prd-pthalos000\",\n", + " \"\",\n", + "]\n", "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.sort_index()\n", - "thalos_uptime_logs_stpatrick['prev_datetime'] = thalos_uptime_logs_stpatrick['datetime'].shift(1)\n", - "thalos_uptime_logs_stpatrick['diff'] = thalos_uptime_logs_stpatrick['datetime'] - thalos_uptime_logs_stpatrick['prev_datetime']\n", - "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick['diff'].notna()]\n", - "thalos_uptime_logs_stpatrick['tenmin'] = (thalos_uptime_logs_stpatrick['diff'] / timedelta(minutes=10)).apply(math.floor)\n", - "thalos_uptime_logs_stpatrick['thirtymin'] = (thalos_uptime_logs_stpatrick['diff'] / timedelta(minutes=30)).apply(math.floor)\n", - "thalos_uptime_logs_stpatrick['fourhours'] = (thalos_uptime_logs_stpatrick['diff'] / timedelta(hours=4)).apply(math.floor)\n", - "thalos_uptime_logs_stpatrick\\\n", - ".loc[thalos_uptime_logs_stpatrick['status'] == ' up']\\\n", - ".loc[(thalos_uptime_logs_stpatrick['datetime'] < parser.parse('2024-01-05 12:30:00+00:00') ) | (thalos_uptime_logs_stpatrick['datetime'] > parser.parse('2024-01-12 21:30:00+00:00') )]\\\n", - ".sum()\n", + "thalos_uptime_logs_stpatrick[\"prev_datetime\"] = thalos_uptime_logs_stpatrick[\"datetime\"].shift(1)\n", + "thalos_uptime_logs_stpatrick[\"diff\"] = (\n", + " thalos_uptime_logs_stpatrick[\"datetime\"] - thalos_uptime_logs_stpatrick[\"prev_datetime\"]\n", + ")\n", + "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[\n", + " thalos_uptime_logs_stpatrick[\"diff\"].notna()\n", + "]\n", + "thalos_uptime_logs_stpatrick[\"tenmin\"] = (\n", + " thalos_uptime_logs_stpatrick[\"diff\"] / timedelta(minutes=10)\n", + ").apply(math.floor)\n", + "thalos_uptime_logs_stpatrick[\"thirtymin\"] = (\n", + " thalos_uptime_logs_stpatrick[\"diff\"] / timedelta(minutes=30)\n", + ").apply(math.floor)\n", + "thalos_uptime_logs_stpatrick[\"fourhours\"] = (\n", + " thalos_uptime_logs_stpatrick[\"diff\"] / timedelta(hours=4)\n", + ").apply(math.floor)\n", + "thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick[\"status\"] == \" up\"].loc[\n", + " (thalos_uptime_logs_stpatrick[\"datetime\"] < parser.parse(\"2024-01-05 12:30:00+00:00\"))\n", + " | (thalos_uptime_logs_stpatrick[\"datetime\"] > parser.parse(\"2024-01-12 21:30:00+00:00\"))\n", + "].sum()\n", "\n", "# I can't use this method, because enough vectors run outside of thalos's 'on' windows, such that the efficiency is over 100%\n" ] @@ -13580,14 +13805,18 @@ "metadata": {}, "outputs": [], "source": [ - "branc_eloggap_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '6'\", database='tnc_edge')\n", + "branc_eloggap_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_tests where vector_id = '6'\", database=\"tnc_edge\"\n", + ")\n", "branc_eloggap_df\n", - "branc_eloggap_df['datetime'] = pandas.to_datetime(branc_eloggap_df['datetime'], utc=True)\n", - "branc_eloggap_df = branc_eloggap_df.sort_values('datetime')\n", + "branc_eloggap_df[\"datetime\"] = pandas.to_datetime(branc_eloggap_df[\"datetime\"], utc=True)\n", + "branc_eloggap_df = branc_eloggap_df.sort_values(\"datetime\")\n", "\n", - "stpat_eloggap_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '6'\", database='tnc_edge')\n", - "stpat_eloggap_df['datetime'] = pandas.to_datetime(stpat_eloggap_df['datetime'], utc=True)\n", - "stpat_eloggap_df = stpat_eloggap_df.sort_values('datetime')" + "stpat_eloggap_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_tests where vector_id = '6'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_eloggap_df[\"datetime\"] = pandas.to_datetime(stpat_eloggap_df[\"datetime\"], utc=True)\n", + "stpat_eloggap_df = stpat_eloggap_df.sort_values(\"datetime\")" ] }, { @@ -13618,19 +13847,28 @@ } ], "source": [ - "branc_eloggap_df['diff'] = branc_eloggap_df['datetime'] - branc_eloggap_df.shift(1)['datetime']\n", - "stpat_eloggap_df['diff'] = stpat_eloggap_df['datetime'] - stpat_eloggap_df.shift(1)['datetime']\n", + "branc_eloggap_df[\"diff\"] = branc_eloggap_df[\"datetime\"] - branc_eloggap_df.shift(1)[\"datetime\"]\n", + "stpat_eloggap_df[\"diff\"] = stpat_eloggap_df[\"datetime\"] - stpat_eloggap_df.shift(1)[\"datetime\"]\n", "\n", - "min_lateness = 3.995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_eloggap_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(4.01*60,10), num=50)\n", + "min_lateness = 3.995 * 60\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_eloggap_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(4.01 * 60), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_eloggap_df['diffbucket'] = pandas.cut(stpat_eloggap_df.loc[stpat_eloggap_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", + "stpat_eloggap_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_eloggap_df.loc[stpat_eloggap_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", "# stpat_eloggap_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_eloggap_df['diffbucket'] = pandas.cut(branc_eloggap_df.loc[branc_eloggap_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "branc_eloggap_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "branc_eloggap_df[\"diffbucket\"] = pandas.cut(\n", + " branc_eloggap_df.loc[branc_eloggap_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "branc_eloggap_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -15584,30 +15822,49 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=4))[['up']].sum() >= 48\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=4))[[\"up\"]].sum() >= 48\n", "thaloslogs_brancol_isup\n", "\n", - "branc_eloggap_ts_df = branc_eloggap_df.set_index('datetime')\n", + "branc_eloggap_ts_df = branc_eloggap_df.set_index(\"datetime\")\n", "# branc_eloggap_ts_df = branc_eloggap_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_eloggap_ts_df = branc_eloggap_ts_df.resample(timedelta(hours=4)).last()\n", "display()\n", - "branc_eloggap_ts_df = branc_eloggap_ts_df.loc[branc_eloggap_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_eloggap_ts_df = branc_eloggap_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_eloggap_ts_df = branc_eloggap_ts_df.loc[\n", + " branc_eloggap_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_eloggap_ts_df = branc_eloggap_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_eloggap_ts_df.loc[ ( branc_eloggap_ts_df['id'].notna() ) & ( branc_eloggap_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_eloggap_ts_df['id'].isna() ) & ( branc_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_eloggap_ts_df['id'].notna() ) & ( branc_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_eloggap_ts_df['id'].isna() ) & ( branc_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_eloggap_ts_df['id'].notna() ) & ( branc_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_eloggap_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((branc_eloggap_ts_df[\"id\"].isna()) & (branc_eloggap_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_eloggap_ts_df[\"id\"].notna()) & (branc_eloggap_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((branc_eloggap_ts_df[\"id\"].isna()) & (branc_eloggap_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_eloggap_ts_df[\"id\"].notna()) & (branc_eloggap_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_eloggap_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_eloggap_ts_df['prevup'] = branc_eloggap_ts_df['up'].shift(1)\n", - "display(branc_eloggap_ts_df.loc[(( branc_eloggap_ts_df['id'].isna() ) & (( branc_eloggap_ts_df['up'] == 1 )) | ( branc_eloggap_ts_df['up'] != branc_eloggap_ts_df['prevup'] ) ) ])\n", + "branc_eloggap_ts_df[\"prevup\"] = branc_eloggap_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_eloggap_ts_df.loc[\n", + " (\n", + " (branc_eloggap_ts_df[\"id\"].isna()) & (branc_eloggap_ts_df[\"up\"] == 1)\n", + " | (branc_eloggap_ts_df[\"up\"] != branc_eloggap_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 27" ] @@ -17563,30 +17820,51 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(hours=4))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(hours=4))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_eloggap_ts_df = stpat_eloggap_df.set_index('datetime')\n", + "stpat_eloggap_ts_df = stpat_eloggap_df.set_index(\"datetime\")\n", "# stpat_eloggap_ts_df = stpat_eloggap_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_eloggap_ts_df = stpat_eloggap_ts_df.resample(timedelta(hours=4)).last()\n", "display()\n", - "stpat_eloggap_ts_df = stpat_eloggap_ts_df.loc[stpat_eloggap_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_eloggap_ts_df = stpat_eloggap_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_eloggap_ts_df = stpat_eloggap_ts_df.loc[\n", + " stpat_eloggap_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_eloggap_ts_df = stpat_eloggap_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_eloggap_ts_df.loc[ ( stpat_eloggap_ts_df['id'].notna() ) & ( stpat_eloggap_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_eloggap_ts_df['id'].isna() ) & ( stpat_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_eloggap_ts_df['id'].notna() ) & ( stpat_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_eloggap_ts_df['id'].isna() ) & ( stpat_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_eloggap_ts_df['id'].notna() ) & ( stpat_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_eloggap_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((stpat_eloggap_ts_df[\"id\"].isna()) & (stpat_eloggap_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_eloggap_ts_df[\"id\"].notna()) & (stpat_eloggap_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((stpat_eloggap_ts_df[\"id\"].isna()) & (stpat_eloggap_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_eloggap_ts_df[\"id\"].notna()) & (stpat_eloggap_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_eloggap_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_eloggap_ts_df['prevup'] = stpat_eloggap_ts_df['up'].shift(1)\n", - "display(stpat_eloggap_ts_df.loc[(( stpat_eloggap_ts_df['id'].isna() ) & (( stpat_eloggap_ts_df['up'] == 1 )) | ( stpat_eloggap_ts_df['up'] != stpat_eloggap_ts_df['prevup'] ) ) ])\n", + "stpat_eloggap_ts_df[\"prevup\"] = stpat_eloggap_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_eloggap_ts_df.loc[\n", + " (\n", + " (stpat_eloggap_ts_df[\"id\"].isna()) & (stpat_eloggap_ts_df[\"up\"] == 1)\n", + " | (stpat_eloggap_ts_df[\"up\"] != stpat_eloggap_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 4" ] @@ -17609,7 +17887,7 @@ } ], "source": [ - "(132+126) / (139-4+177-27)" + "(132 + 126) / (139 - 4 + 177 - 27)" ] }, { @@ -17619,14 +17897,18 @@ "metadata": {}, "outputs": [], "source": [ - "branc_cca_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '7'\", database='tnc_edge')\n", + "branc_cca_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from brancol_v1_tests where vector_id = '7'\", database=\"tnc_edge\"\n", + ")\n", "branc_cca_df\n", - "branc_cca_df['datetime'] = pandas.to_datetime(branc_cca_df['datetime'], utc=True)\n", - "branc_cca_df = branc_cca_df.sort_values('datetime')\n", + "branc_cca_df[\"datetime\"] = pandas.to_datetime(branc_cca_df[\"datetime\"], utc=True)\n", + "branc_cca_df = branc_cca_df.sort_values(\"datetime\")\n", "\n", - "stpat_cca_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '7'\", database='tnc_edge')\n", - "stpat_cca_df['datetime'] = pandas.to_datetime(stpat_cca_df['datetime'], utc=True)\n", - "stpat_cca_df = stpat_cca_df.sort_values('datetime')" + "stpat_cca_df = awswrangler.athena.read_sql_query(\n", + " \"SELECT * from stpatrick_v1_tests where vector_id = '7'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_cca_df[\"datetime\"] = pandas.to_datetime(stpat_cca_df[\"datetime\"], utc=True)\n", + "stpat_cca_df = stpat_cca_df.sort_values(\"datetime\")" ] }, { @@ -17657,19 +17939,26 @@ } ], "source": [ - "branc_cca_df['diff'] = branc_cca_df['datetime'] - branc_cca_df.shift(1)['datetime']\n", - "stpat_cca_df['diff'] = stpat_cca_df['datetime'] - stpat_cca_df.shift(1)['datetime']\n", + "branc_cca_df[\"diff\"] = branc_cca_df[\"datetime\"] - branc_cca_df.shift(1)[\"datetime\"]\n", + "stpat_cca_df[\"diff\"] = stpat_cca_df[\"datetime\"] - stpat_cca_df.shift(1)[\"datetime\"]\n", "\n", - "min_lateness = 0.995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_cca_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=90)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1.005*60,10), num=50)\n", + "min_lateness = 0.995 * 60\n", + "bins = numpy.logspace(\n", + " math.log10(min_lateness),\n", + " math.log10(1 + stpat_cca_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max()),\n", + " num=90,\n", + ")\n", + "bins = numpy.logspace(math.log10(min_lateness), math.log10(1.005 * 60), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_cca_df['diffbucket'] = pandas.cut(stpat_cca_df.loc[stpat_cca_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", + "stpat_cca_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_cca_df.loc[stpat_cca_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"], bins=bins\n", + ")\n", "# stpat_cca_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_cca_df['diffbucket'] = pandas.cut(branc_cca_df.loc[branc_cca_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "branc_cca_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "branc_cca_df[\"diffbucket\"] = pandas.cut(\n", + " branc_cca_df.loc[branc_cca_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"], bins=bins\n", + ")\n", + "branc_cca_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -19689,30 +19978,40 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=1))[['up']].sum() >= 10\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=1))[[\"up\"]].sum() >= 10\n", "thaloslogs_brancol_isup\n", "\n", - "branc_cca_ts_df = branc_cca_df.set_index('datetime')\n", + "branc_cca_ts_df = branc_cca_df.set_index(\"datetime\")\n", "# branc_cca_ts_df = branc_cca_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_cca_ts_df = branc_cca_ts_df.resample(timedelta(hours=1)).last()\n", "display()\n", "branc_cca_ts_df = branc_cca_ts_df.loc[branc_cca_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_cca_ts_df = branc_cca_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_cca_ts_df = branc_cca_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_cca_ts_df.loc[ ( branc_cca_ts_df['id'].notna() ) & ( branc_cca_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_cca_ts_df['id'].isna() ) & ( branc_cca_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_cca_ts_df['id'].notna() ) & ( branc_cca_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_cca_ts_df['id'].isna() ) & ( branc_cca_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_cca_ts_df['id'].notna() ) & ( branc_cca_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_cca_ts_df['up'] == 1 )).sum())\n", + "print(\"off, no vector\", ((branc_cca_ts_df[\"id\"].isna()) & (branc_cca_ts_df[\"up\"] == 0)).sum())\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_cca_ts_df[\"id\"].notna()) & (branc_cca_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\"on, no vector?\", ((branc_cca_ts_df[\"id\"].isna()) & (branc_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"on, vector worked.\", ((branc_cca_ts_df[\"id\"].notna()) & (branc_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"total on\", (branc_cca_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_cca_ts_df['prevup'] = branc_cca_ts_df['up'].shift(1)\n", - "display(branc_cca_ts_df.loc[(( branc_cca_ts_df['id'].isna() ) & (( branc_cca_ts_df['up'] == 1 )) | ( branc_cca_ts_df['up'] != branc_cca_ts_df['prevup'] ) ) ])\n", + "branc_cca_ts_df[\"prevup\"] = branc_cca_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_cca_ts_df.loc[\n", + " (\n", + " (branc_cca_ts_df[\"id\"].isna()) & (branc_cca_ts_df[\"up\"] == 1)\n", + " | (branc_cca_ts_df[\"up\"] != branc_cca_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 169" ] @@ -21732,30 +22031,44 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(hours=1))[['up']].sum() >= 10\n", + "thaloslogs_stpatrick_isup = (\n", + " thaloslogs_stpatrick_isup.resample(timedelta(hours=1))[[\"up\"]].sum() >= 10\n", + ")\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_cca_ts_df = stpat_cca_df.set_index('datetime')\n", + "stpat_cca_ts_df = stpat_cca_df.set_index(\"datetime\")\n", "# stpat_cca_ts_df = stpat_cca_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_cca_ts_df = stpat_cca_ts_df.resample(timedelta(hours=1)).last()\n", "display()\n", "stpat_cca_ts_df = stpat_cca_ts_df.loc[stpat_cca_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_cca_ts_df = stpat_cca_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_cca_ts_df = stpat_cca_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_cca_ts_df.loc[ ( stpat_cca_ts_df['id'].notna() ) & ( stpat_cca_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_cca_ts_df['id'].isna() ) & ( stpat_cca_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_cca_ts_df['id'].notna() ) & ( stpat_cca_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_cca_ts_df['id'].isna() ) & ( stpat_cca_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_cca_ts_df['id'].notna() ) & ( stpat_cca_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_cca_ts_df['up'] == 1 )).sum())\n", + "print(\"off, no vector\", ((stpat_cca_ts_df[\"id\"].isna()) & (stpat_cca_ts_df[\"up\"] == 0)).sum())\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_cca_ts_df[\"id\"].notna()) & (stpat_cca_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\"on, no vector?\", ((stpat_cca_ts_df[\"id\"].isna()) & (stpat_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"on, vector worked.\", ((stpat_cca_ts_df[\"id\"].notna()) & (stpat_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"total on\", (stpat_cca_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_cca_ts_df['prevup'] = stpat_cca_ts_df['up'].shift(1)\n", - "display(stpat_cca_ts_df.loc[(( stpat_cca_ts_df['id'].isna() ) & (( stpat_cca_ts_df['up'] == 1 )) | ( stpat_cca_ts_df['up'] != stpat_cca_ts_df['prevup'] ) ) ])\n", + "stpat_cca_ts_df[\"prevup\"] = stpat_cca_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_cca_ts_df.loc[\n", + " (\n", + " (stpat_cca_ts_df[\"id\"].isna()) & (stpat_cca_ts_df[\"up\"] == 1)\n", + " | (stpat_cca_ts_df[\"up\"] != stpat_cca_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 108" ] @@ -21795,7 +22108,7 @@ "# 108\n", "\n", "# (863+976)/(1186-169+1126-108)\n", - "(863+976)/(1086-169+1126-108)" + "(863 + 976) / (1086 - 169 + 1126 - 108)" ] }, { @@ -23677,7 +23990,7 @@ } ], "source": [ - "tmp = stpat_eloggap_df.set_index('datetime')\n", + "tmp = stpat_eloggap_df.set_index(\"datetime\")\n", "tmp.sort_index()" ] } diff --git a/notebooks/tnc_edge_bv_excel_parsing.ipynb b/notebooks/tnc_edge_bv_excel_parsing.ipynb index 03e9e2e..a391344 100644 --- a/notebooks/tnc_edge_bv_excel_parsing.ipynb +++ b/notebooks/tnc_edge_bv_excel_parsing.ipynb @@ -25,12 +25,12 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas\n", - "import numpy as np\n", - "from datetime import datetime, date, time, timezone, timedelta\n", - "from dateutil.parser import parse as parse_datetime\n", "# help(np.argwhere)\n", - "import re" + "import re\n", + "from datetime import UTC, datetime, timedelta\n", + "\n", + "import pandas\n", + "from dateutil.parser import parse as parse_datetime" ] }, { @@ -52,14 +52,14 @@ "# dir(pandas)\n", "# help(pandas.read_excel)\n", "\n", - "fname = '20240308_SAINT PATRICK_FO6_FO7_FO8_FO9_FO10_FO11_FO12_FO13.xlsx'\n", + "fname = \"20240308_SAINT PATRICK_FO6_FO7_FO8_FO9_FO10_FO11_FO12_FO13.xlsx\"\n", "\n", "# boat = 'brancol'\n", - "boat = 'stpatrick'\n", + "boat = \"stpatrick\"\n", "\n", "all_sheets = pandas.read_excel(fname, sheet_name=None)\n", "\n", - "sheet_names = list(filter(lambda k: re.match('^FO \\d+', k), all_sheets.keys()))\n", + "sheet_names = list(filter(lambda k: re.match(r\"^FO \\d+\", k), all_sheets.keys()))\n", "\n", "curr_sheet = all_sheets[sheet_names[1]]\n" ] @@ -72,18 +72,18 @@ "outputs": [], "source": [ "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -93,55 +93,78 @@ "metadata": {}, "outputs": [], "source": [ - "\n", - "\n", "def findcell(sheet, needle):\n", " for col_name in list(sheet.keys()):\n", " try:\n", " start_idx = sheet[col_name].to_list().index(needle)\n", - " return (col_name, start_idx+1)\n", + " return (col_name, start_idx + 1)\n", " except ValueError:\n", " pass\n", " return None\n", "\n", + "\n", "def set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, cell_str, cell_offset):\n", " cols_index = list(curr_sheet.keys())\n", - " \n", + "\n", " if cols_index.index(set_haul_title_cell[0]) + cell_offset[0] >= len(cols_index):\n", - " raise ValueError(f'index offset {cell_offset[0]} out of bounds in sheet {cols_index}')\n", - " \n", - "# print(set_haul_title_cell)\n", - "# print(cell_offset)\n", - "# print(cols_index.index(set_haul_title_cell[0]))\n", - "# print(cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]])\n", - "# print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]])\n", - "# print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]])\n", - " \n", - " if curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]] != cell_str:\n", + " raise ValueError(f\"index offset {cell_offset[0]} out of bounds in sheet {cols_index}\")\n", + "\n", + " # print(set_haul_title_cell)\n", + " # print(cell_offset)\n", + " # print(cols_index.index(set_haul_title_cell[0]))\n", + " # print(cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]])\n", + " # print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]])\n", + " # print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]])\n", + "\n", + " if (\n", + " curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][\n", + " set_haul_title_cell[1] + cell_offset[1]\n", + " ]\n", + " != cell_str\n", + " ):\n", " raise ValueError(f\"can't find {cell_str}\")\n", - " return curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]+1]\n", + " return curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][\n", + " set_haul_title_cell[1] + cell_offset[1] + 1\n", + " ]\n", "\n", "\n", "def set_haul_grid_fetch_all(curr_sheet, set_haul_title_cell):\n", - " start_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'start date', (0, 0))\n", + " start_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"start date\", (0, 0))\n", " try:\n", - " start_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'start time (UTC)', (1, 0))\n", + " start_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"start time (UTC)\", (1, 0)\n", + " )\n", " except:\n", - " start_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'start time', (1, 0))\n", - " start_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'latitude', (2, 0))\n", - " start_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'longitude', (3, 0))\n", - " finish_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish date', (0, 2))\n", + " start_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"start time\", (1, 0))\n", + " start_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"latitude\", (2, 0))\n", + " start_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"longitude\", (3, 0))\n", + " finish_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"finish date\", (0, 2))\n", " try:\n", - " finish_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish time (UTC)', (1, 2))\n", + " finish_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"finish time (UTC)\", (1, 2)\n", + " )\n", " except:\n", " try:\n", - " finish_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish time (UTC', (1, 2))\n", + " finish_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"finish time (UTC\", (1, 2)\n", + " )\n", " except:\n", - " finish_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish time', (1, 2))\n", - " finish_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'latitude', (2, 2))\n", - " finish_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'longitude', (3, 2))\n", + " finish_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"finish time\", (1, 2)\n", + " )\n", + " finish_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"latitude\", (2, 2))\n", + " finish_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"longitude\", (3, 2))\n", "\n", - " return (start_date, start_time, start_lat, start_lon, finish_date, finish_time, finish_lat, finish_lon)\n" + " return (\n", + " start_date,\n", + " start_time,\n", + " start_lat,\n", + " start_lon,\n", + " finish_date,\n", + " finish_time,\n", + " finish_lat,\n", + " finish_lon,\n", + " )\n" ] }, { @@ -2159,65 +2182,73 @@ } ], "source": [ - "\n", - "\n", "# trip info\n", "\n", "\n", - "if list(all_sheets.keys())[0] != 'TRIP':\n", - " raise ValueError('first sheet should be trip info')\n", + "if list(all_sheets.keys())[0] != \"TRIP\":\n", + " raise ValueError(\"first sheet should be trip info\")\n", "\n", - "trip_sheet = all_sheets['TRIP']\n", + "trip_sheet = all_sheets[\"TRIP\"]\n", "\n", - "if 'Fishing trip' in trip_sheet.keys():\n", - " trip_start_date_cell = ('Fishing trip', 0)\n", + "if \"Fishing trip\" in trip_sheet.keys():\n", + " trip_start_date_cell = (\"Fishing trip\", 0)\n", "else:\n", - " trip_start_date_cell = findcell(curr_sheet, 'Fishing trip')\n", + " trip_start_date_cell = findcell(curr_sheet, \"Fishing trip\")\n", " if not setting_cell:\n", " raise ValueError(\"no 'Fishing trip' block in sheet\")\n", - " \n", "\n", - "trip_notes = ''\n", + "\n", + "trip_notes = \"\"\n", "try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'General notes ', (7,0)) \n", + " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"General notes \", (7, 0))\n", "except:\n", " try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Notes', (7,0)) \n", + " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Notes\", (7, 0))\n", " except:\n", " try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Notes', (7,1)) \n", + " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Notes\", (7, 1))\n", " except:\n", " try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Note ', (8,0)) \n", + " trip_notes = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Note \", (8, 0)\n", + " )\n", " except:\n", " pass\n", "try:\n", - " trip_start_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Start date', (0,1)).date()\n", + " trip_start_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Start date\", (0, 1)\n", + " ).date()\n", "except:\n", - " trip_start_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Start date', (0,3)).date()\n", + " trip_start_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Start date\", (0, 3)\n", + " ).date()\n", "\n", "try:\n", - " trip_end_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Finish date', (2,1)).date()\n", + " trip_end_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Finish date\", (2, 1)\n", + " ).date()\n", "except:\n", - " trip_end_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Finish date', (2,3)).date()\n", + " trip_end_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Finish date\", (2, 3)\n", + " ).date()\n", "trip_id = boat + \"_\" + str(trip_start_date)\n", "\n", "try:\n", - " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0,4))\n", - "except ValueError as e:\n", - " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0,6))\n", - " \n", + " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0, 4))\n", + "except ValueError:\n", + " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0, 6))\n", + "\n", "\n", "trip_data = dict(\n", - "trip_id = trip_id,\n", - "trip_start_date = trip_start_date,\n", - "trip_end_date = trip_end_date,\n", - "trip_notes = trip_notes,\n", - "obsv_name = obsv_name,\n", + " trip_id=trip_id,\n", + " trip_start_date=trip_start_date,\n", + " trip_end_date=trip_end_date,\n", + " trip_notes=trip_notes,\n", + " obsv_name=obsv_name,\n", ")\n", - "trip_data = {k:[v] for k,v in trip_data.items()}\n", + "trip_data = {k: [v] for k, v in trip_data.items()}\n", "trip_df = pandas.DataFrame(trip_data)\n", - "trip_df = trip_df.set_index('trip_id')\n", + "trip_df = trip_df.set_index(\"trip_id\")\n", "# print(trip_df)\n", "\n", "\n", @@ -2233,120 +2264,147 @@ " curr_sheet = all_sheets[sheet_name]\n", "\n", " # look for 'latitude' in this sheet. If found, it's probably a sheet with set/haul data\n", - " fao_code_cell = findcell(curr_sheet, 'FAO code')\n", + " fao_code_cell = findcell(curr_sheet, \"FAO code\")\n", " if not fao_code_cell:\n", " continue\n", - " catchcondition_cell = findcell(curr_sheet, 'catch condition')\n", + " catchcondition_cell = findcell(curr_sheet, \"catch condition\")\n", " if not catchcondition_cell:\n", " continue\n", - " discardreason_cell = findcell(curr_sheet, 'reason for discard')\n", + " discardreason_cell = findcell(curr_sheet, \"reason for discard\")\n", " if not discardreason_cell:\n", " continue\n", - " if discardreason_cell[1] != catchcondition_cell[1] or catchcondition_cell[1] != discardreason_cell[1]:\n", + " if (\n", + " discardreason_cell[1] != catchcondition_cell[1]\n", + " or catchcondition_cell[1] != discardreason_cell[1]\n", + " ):\n", " raise ValueError(f\"cannot find header row for fish data in sheet {sheet_name}\")\n", "\n", " # find other metadata values\n", "\n", " cols_index = list(curr_sheet.keys())\n", - " if 'Setting' in cols_index:\n", - " setting_cell = ('Setting', 0)\n", + " if \"Setting\" in cols_index:\n", + " setting_cell = (\"Setting\", 0)\n", " else:\n", - " setting_cell = findcell(curr_sheet, 'Setting')\n", + " setting_cell = findcell(curr_sheet, \"Setting\")\n", " if not setting_cell:\n", " raise ValueError(\"no 'Setting' block in sheet\")\n", "\n", - " (set_start_date, set_start_time, set_start_lat ,\n", - " set_start_lon ,set_end_date, set_end_time,set_end_lat ,\n", - " set_end_lon,) = set_haul_grid_fetch_all(curr_sheet, setting_cell)\n", + " (\n", + " set_start_date,\n", + " set_start_time,\n", + " set_start_lat,\n", + " set_start_lon,\n", + " set_end_date,\n", + " set_end_time,\n", + " set_end_lat,\n", + " set_end_lon,\n", + " ) = set_haul_grid_fetch_all(curr_sheet, setting_cell)\n", "\n", - "\n", - " if 'Hauling' in cols_index:\n", - " hauling_cell = ('Hauling', 0)\n", + " if \"Hauling\" in cols_index:\n", + " hauling_cell = (\"Hauling\", 0)\n", " else:\n", - " hauling_cell = findcell(curr_sheet, 'Hauling')\n", + " hauling_cell = findcell(curr_sheet, \"Hauling\")\n", " if not hauling_cell:\n", " raise ValueError(\"no 'Hauling' block in sheet\")\n", "\n", - " (haul_start_date, haul_start_time, haul_start_lat ,\n", - " haul_start_lon ,haul_end_date, haul_end_time,haul_end_lat ,\n", - " haul_end_lon,) = set_haul_grid_fetch_all(curr_sheet, hauling_cell)\n", + " (\n", + " haul_start_date,\n", + " haul_start_time,\n", + " haul_start_lat,\n", + " haul_start_lon,\n", + " haul_end_date,\n", + " haul_end_time,\n", + " haul_end_lat,\n", + " haul_end_lon,\n", + " ) = set_haul_grid_fetch_all(curr_sheet, hauling_cell)\n", "\n", " set_number += 1\n", - " set_id = trip_id + \"_set_\"+str(set_number).zfill(2)\n", - " # print(set_id)\n", + " set_id = trip_id + \"_set_\" + str(set_number).zfill(2)\n", + " # print(set_id)\n", " set_row = dict(\n", " set_id=set_id,\n", " trip_id=trip_id,\n", " set_number=set_number,\n", - "\n", - " set_start_datetime = datetime.combine(set_start_date.date(), set_start_time).replace(tzinfo=timezone.utc),\n", - " set_start_lat = set_start_lat ,\n", - " set_start_lon = set_start_lon ,\n", - "\n", - " set_end_datetime = datetime.combine(set_end_date.date(), set_end_time).replace(tzinfo=timezone.utc),\n", - " set_end_lat = set_end_lat ,\n", - " set_end_lon = set_end_lon ,\n", - "\n", - " haul_start_datetime = datetime.combine(haul_start_date.date(), haul_start_time).replace(tzinfo=timezone.utc),\n", - " haul_start_lat = haul_start_lat,\n", - " haul_start_lon = haul_start_lon ,\n", - "\n", - " haul_end_datetime = datetime.combine(haul_end_date.date(), haul_end_time).replace(tzinfo=timezone.utc),\n", - " haul_end_lat = haul_end_lat,\n", - " haul_end_lon = haul_end_lon\n", + " set_start_datetime=datetime.combine(set_start_date.date(), set_start_time).replace(\n", + " tzinfo=UTC\n", + " ),\n", + " set_start_lat=set_start_lat,\n", + " set_start_lon=set_start_lon,\n", + " set_end_datetime=datetime.combine(set_end_date.date(), set_end_time).replace(\n", + " tzinfo=UTC\n", + " ),\n", + " set_end_lat=set_end_lat,\n", + " set_end_lon=set_end_lon,\n", + " haul_start_datetime=datetime.combine(haul_start_date.date(), haul_start_time).replace(\n", + " tzinfo=UTC\n", + " ),\n", + " haul_start_lat=haul_start_lat,\n", + " haul_start_lon=haul_start_lon,\n", + " haul_end_datetime=datetime.combine(haul_end_date.date(), haul_end_time).replace(\n", + " tzinfo=UTC\n", + " ),\n", + " haul_end_lat=haul_end_lat,\n", + " haul_end_lon=haul_end_lon,\n", " )\n", - " # print({k:v for k,v in set_row.items()})\n", - " set_row = {k:[v] for k,v in set_row.items()}\n", + " # print({k:v for k,v in set_row.items()})\n", + " set_row = {k: [v] for k, v in set_row.items()}\n", "\n", - " # print(set_row)\n", + " # print(set_row)\n", " set_df = pandas.DataFrame(set_row)\n", - " set_df = set_df.set_index('set_id')\n", - " # print(set_df)\n", + " set_df = set_df.set_index(\"set_id\")\n", + " # print(set_df)\n", " if sets_df is None:\n", " sets_df = set_df\n", " else:\n", " sets_df = sets_df.append(set_df)\n", "\n", - " # print(sheet_name, fao_code_cell[1])\n", + " # print(sheet_name, fao_code_cell[1])\n", "\n", " reimport_sheet = pandas.read_excel(fname, sheet_name=sheet_name, skiprows=fao_code_cell[1])\n", - " reimport_sheet = reimport_sheet.loc[:, ~reimport_sheet.columns.str.contains('^Unnamed: ')]\n", + " reimport_sheet = reimport_sheet.loc[:, ~reimport_sheet.columns.str.contains(\"^Unnamed: \")]\n", + "\n", " def replace_catch_hour(catch_hour):\n", " if type(catch_hour) == str:\n", - " # print(catch_hour)\n", + " # print(catch_hour)\n", " catch_hour = parse_datetime(catch_hour).time()\n", - " haul_datetime = set_row['haul_start_datetime'][0]\n", + " haul_datetime = set_row[\"haul_start_datetime\"][0]\n", " catch_datetime = haul_datetime.replace(hour=catch_hour.hour, minute=catch_hour.minute)\n", " if haul_datetime - catch_datetime > timedelta(hours=2):\n", " # the catch_datetime is somehow smaller than the start of the haul\n", " # this is outside of the haul window\n", " # this is probably because the haul started just before midnight, and continued to the next day\n", " catch_datetime += timedelta(days=1)\n", - " end_datetime = set_row['haul_end_datetime'][0]\n", + " end_datetime = set_row[\"haul_end_datetime\"][0]\n", " if catch_datetime - end_datetime > timedelta(hours=2):\n", " # adding a day didn't work, now it's outside of the haul window on the other side\n", - " raise ValueError(f'catch time {catch_hour} cannot fit between haul times {haul_datetime} - {end_datetime}')\n", + " raise ValueError(\n", + " f\"catch time {catch_hour} cannot fit between haul times {haul_datetime} - {end_datetime}\"\n", + " )\n", " return catch_datetime\n", "\n", " try:\n", - " reimport_sheet['catch_datetime'] = reimport_sheet['hour'].map(replace_catch_hour)\n", + " reimport_sheet[\"catch_datetime\"] = reimport_sheet[\"hour\"].map(replace_catch_hour)\n", " except BaseException as e:\n", " print(\"error on sheetname\", sheet_name)\n", " raise e\n", "\n", - " # print(reimport_sheet)\n", + " # print(reimport_sheet)\n", "\n", - " reimport_sheet['set_id'] = set_id\n", - " reimport_sheet.insert(0, 'set_id', reimport_sheet.pop('set_id'))\n", - " reimport_sheet['fish_id'] = reimport_sheet['set_id'] + \"_fish_\" + pandas.Series(map(lambda i: str(i).zfill(3), reimport_sheet.index.values))\n", - " reimport_sheet = reimport_sheet.set_index('fish_id')\n", + " reimport_sheet[\"set_id\"] = set_id\n", + " reimport_sheet.insert(0, \"set_id\", reimport_sheet.pop(\"set_id\"))\n", + " reimport_sheet[\"fish_id\"] = (\n", + " reimport_sheet[\"set_id\"]\n", + " + \"_fish_\"\n", + " + pandas.Series(map(lambda i: str(i).zfill(3), reimport_sheet.index.values))\n", + " )\n", + " reimport_sheet = reimport_sheet.set_index(\"fish_id\")\n", "\n", " if fish_df is None:\n", " fish_df = reimport_sheet\n", " else:\n", " fish_df = fish_df.append(reimport_sheet)\n", " except BaseException as e:\n", - " print('debug - in sheet', sheet_name)\n", + " print(\"debug - in sheet\", sheet_name)\n", " raise e\n", "\n", "\n", @@ -2358,8 +2416,7 @@ "\n", "# fish_df.pop('fish_id')\n", "\n", - "display(fish_df)\n", - "\n" + "display(fish_df)\n" ] }, { @@ -2381,7 +2438,8 @@ "source": [ "import awswrangler as wr\n", "import boto3\n", - "boto3.setup_default_session(profile_name='XXXXXXXX')" + "\n", + "boto3.setup_default_session(profile_name=\"XXXXXXXX\")" ] }, { @@ -2393,15 +2451,15 @@ "source": [ "# dir(wr.s3)\n", "# wr.s3.list_buckets()\n", - "bucket='51-gema-dev-dp-raw'\n", + "bucket = \"51-gema-dev-dp-raw\"\n", "# wr.s3.list_directories(f's3://{bucket}/tnc_edge/')\n", "# help(wr.s3.to_csv)\n", "\n", "\n", "print(\n", - " wr.s3.to_csv(trip_df, f's3://{bucket}/tnc_edge/{boat}_v1_bv_trips/{trip_id}.csv'),\n", - " wr.s3.to_csv(sets_df, f's3://{bucket}/tnc_edge/{boat}_v1_bv_sets/{trip_id}.csv'),\n", - " wr.s3.to_csv(fish_df, f's3://{bucket}/tnc_edge/{boat}_v1_bv_fish/{trip_id}.csv')\n", + " wr.s3.to_csv(trip_df, f\"s3://{bucket}/tnc_edge/{boat}_v1_bv_trips/{trip_id}.csv\"),\n", + " wr.s3.to_csv(sets_df, f\"s3://{bucket}/tnc_edge/{boat}_v1_bv_sets/{trip_id}.csv\"),\n", + " wr.s3.to_csv(fish_df, f\"s3://{bucket}/tnc_edge/{boat}_v1_bv_fish/{trip_id}.csv\"),\n", ")\n" ] }, diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..5eef5ff --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,234 @@ +[project] +name = "tnc-edge-service" +version = "0.1.0" +description = "Codebase to manage Jetson edge device configuration and data retrieving and uploading to cloud." +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "alembic>=1.13.3", + "boto3>=1.35.46", + "click>=8.1.7", + "flask>=3.0.3", + "flask-admin>=1.6.1", + "flask-sqlalchemy>=3.1.1", + "geographiclib>=2.0", + "nmeasim>=1.1.1.0", + "psycopg2-binary>=2.9.10", + "pynmeagps>=1.0.43", + "requests>=2.32.3", + "schedule>=1.2.2", + "sqlalchemy>=2.0.36", + "wheel>=0.44.0", +] + +[tool.uv] +dev-dependencies = [ + "detect-secrets>=1.5.0", + "docstr-coverage>=2.3.2", + "ipykernel>=6.29.5", + "pandas>=2.2.3", + "polars>=1.10.0", + "pre-commit>=4.0.1", + "pytest>=8.3.3", + "pytest-cov>=5.0.0", + "ruff>=0.7.0", +] + +[tool.pytest.ini_options] +# Adding the current directory to the Python path so that pytest can find the modules to test. +# Directories will be added to the head of sys.path +# Docs relevant to this step: https://docs.pytest.org/en/7.1.x/reference/reference.html#confval-pythonpath +pythonpath = "." +testpaths = ["tests"] + +[tool.coverage.report] +exclude_also = [ + # Don't complain about missing debug-only code: + "def __repr__", + "def __str__", + "if self\\.debug", + + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", + + # Don't complain about abstract methods, they aren't run: + "@(abc\\.)?abstractmethod", +] +omit = ["**/tests/*", "*/__init__.py"] +include_namespace_packages = "true" + +[tool.ruff] +line-length = 100 +extend-exclude = [".venv"] + +[tool.pyright] +typeCheckingMode = "strict" +exclude = [".venv"] +reportMissingTypeStubs = false +reportUnusedExpression = false +reportUnknownVariableType = false +reportUnknownMemberType = false +reportUnknownArgumentType = false +reportUnknownParameterType = false +reportUnknownLambdaType = false +reportUntypedFunctionDecorator = false +reportMissingTypeArgument = false +reportMissingParameterType = false +reportArgumentType = false + +[tool.ruff.lint.extend-per-file-ignores] +"**/tests/**/*.py" = [ + "S101", # S101 - Checks for uses of the assert keyword. - necessary for testing. + "B018", # B018 - Found useless expression - necessary for testing exceptions are raised. + "D100", # D100 - Module dostrings not required in test files. + "D104", # D104 - Package dostrings not required in test files. + "ARG", # ARG - Unused args are common in tests with mock patches and mock functions. +] +"**/notebooks/**/*.py" = ["B018", "ANN"] + +[tool.ruff.lint] +select = [ + "F", # Pyflakes + "E", # pycodestyle (error) + "W", # pycodestyle (warning) + "D", # pydocstyle - docstring style checker + "C901", # Checks for functions with a high McCabe complexity. + "I", # isort + "N", # pep8-naming - Check PEP-8 naming conventions + "UP", # pyupgade - automatically upgrade syntax for newer versions. + "YTT", # Checks for misuse of `sys.version` or `sys.version_info` + "ANN", # flake8-annotations + "ASYNC", # flake8 plugin for Trio-AnyIO-asyncio related problems. + "S", # Automated security testing with bandit and flake8. + "BLE", # flake8-blind-except - checks for blind except: statements + "FBT", # flake8-boolean-trap - forbids boolean positional arguments + "B", # flake8-bugbear - find likely bugs and design problems in your program + "A", # flake8-builtins - Check for python builtins being used as variables or parameters + "COM", # flake8 lint for trailing commas. + "C4", # flake8-comprehensions - help you write better list/set/dict comprehensions. + #"DTZ", # flake8 - ban the usage of unsafe naive datetime class + "T10", # ipdb/pdb statement checker plugin for flake8 + "DJ", # Catch bad style specific to Django Projects. + "EM", # Flake checker for raw literals inside raises + "EXE", # checking executable permissions and shebangs. + "ISC", # Encourage correct string literal concatenation. + "ICN", # An opinionated plugin for Flake8 on how certain packages should be imported or aliased. + "LOG", # Checks for issues using the standard library logging module. + "G", # Validate (lack of) logging format strings + "PIE", # flake8-pie implements misc lints + #"T20", # flake8-print - print statement checker + "PYI", # flake8-pyi - enable linting .pyi stub files. + "PT", # checking common style issues or inconsistencies with pytest-based tests. + "Q", # Flake8 lint for quotes. + "RSE", # flake8-raise - finds improvements for raise statements. + "SLF", # flake8-self - Private member access linting + "SIM", # flake8-simplify - checks for code that can be simplified + "TID", # flake8-tidy-imports - helps you write tidier imports. + "TCH", # managing type-checking imports & forward references + "INT", # flake-8 gettext + "ARG", # flake8-unused-arguments - warn on unused function arguments + #"PTH", # finding use of functions that can be replaced by pathlib module. + "TD", # check TODOs in the project. + "FIX", # flake8-fixme - Check for FIXME, TODO and other temporary developer notes. + "ERA", # eradicate - Removes commented-out code. + "PD", # plugin to lint pandas in an opinionated way. + "PGH", # pygrep-hooks + "PL", # Pylint - python code static checker + "TRY", # tryceratops - Prevent Exception Handling AntiPatterns + "FLY", # convert a python project's %-formatted strings to f-strings. + "NPY", # NumPy-specific rules + "AIR", # airflow specific rules + "PERF", # perflint - prevent with performance anti-patterns + "FURB", # refurbish and modernize Python codebases + #"CPY", # adds copyright checks + # "INP", # Ban PEP-420 implicit namespace packages. + #"RET", # flake8-return - checks return values + #"SLOT", # Plugin to require __slots__ to be defined for subclasses of immutable types. +] + +unfixable = [ + "PIE794", # duplicate class field name - the fix is to delete one entry, which is unlikely to be the correct fix. +] + +ignore = [ + "ANN101", # Don't require type annotations for self. + "ANN102", # Don't require type annotations for cls. + "ANN401", # Any is allowed. + "D418", # overload-with-docstring; Overloads should have docstrings when they alter the API/functionality. + "FIX002", # Allow TODO tags but no others. + "TRY003", # Makes no sense for builtin exceptions. + "RSE102", # This is a stylistic choice, but the check doesn't verify the item being raised is an object preventing a function from returning an error to raise. + "PERF203", # No try...except in for loop - No clear action to take when this is flagged. + "RUF013", # This is a very basic check prone to false positives, let pyright handle it + "E203", # Whitespace before ':' - Don't lint for style beyond what black cares about. + "PLR6301", # no-self-use - Switching to static has functionality considerations and may not be appropriate. + "FBT003", # Boolean positional value in function call - this flags an issue twice and forces per use suppression. + "SIM108", # Use ternary; this can be good, but the check is overzealous and suggests changes that involve line breaks. + "SIM117", # Use single with; like SIM108 this can be good, but it isn't appropriate in all contexts and could easily result in line breaks. + "A003", # Class attribute shadowing builtin; doesn't matter for classes. + "PIE790", # Unnecessary "..." literal; conflicts with pyright for protocols. Removing ... pyright expects a return statement. + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D103", # Missing docstring in public function + "D104", # Missing docstring in public package + "D106", # Missing docstring in public nested class + "D107", # Missing docstring in `__init__` + # The settings below are suggested to be disabled when formatting with Ruff. + "W191", # tab indentation; conflicts with formatting. + "E111", # indentation with with invalid multiple; conflicts with formatting. + "E114", # indentation with invalid comment; conflicts with formatting. + "E117", # over indented; conflicts with formatting. + "D206", # indent with spaces; conflicts with formatting. + "D300", # triple single quotes; conflicts with formatting. + "Q000", # bad quotes inline string; conflicts with formatting. + "Q001", # bad quotes multiline string; conflicts with formatting. + "Q002", # bad quotes docstring; conflicts with formatting. + "Q003", # avoidable escpaped quote; conflicts with formatting. + "COM812", # missing trailing comma; conflicts with formatting. + "COM819", # prohibited trailing comma; conflicts with formatting. + "ISC001", # single line implicit string concatenation; conflicts with formatting. + "ISC002", # multi line implicit string concatenation; conflicts with formatting. + "G004", # f-string formatting; conflicts with formatting. + "PLR0913", # too many arguments in function definition + "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes + "E501", # Line too long + "ERA001", # Found commented-out code + "ANN001", # "Missing type annotation" + "ANN201", # "Missing return type" +] + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", + "notebooks", +] + +# Settings for google standard. +[tool.ruff.lint.pydocstyle] +convention = "google" diff --git a/reencode.py b/reencode.py index 98991ab..441fa79 100644 --- a/reencode.py +++ b/reencode.py @@ -1,62 +1,70 @@ - -import shutil -import click -import json import os -from pathlib import Path -import re -import schedule import subprocess -from subprocess import CompletedProcess import time +from pathlib import Path +from subprocess import CompletedProcess -from model import Base as ModelBase, VideoFile, OndeckData +import click +import schedule import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Query +from flask.config import Config as FlaskConfig +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker as SessionMaker from sqlalchemy.orm.session import Session -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from model import Base as ModelBase +from model import VideoFile + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def system_gst_check() -> str: # nvidia hw encoder - p: CompletedProcess[str] = subprocess.run("gst-inspect-1.0 | grep -q 'nvv4l2h265enc:'", shell=True, capture_output=False) + p: CompletedProcess[str] = subprocess.run( + "gst-inspect-1.0 | grep -q 'nvv4l2h265enc:'", shell=True, capture_output=False, check=False + ) if p.returncode == 0: - return ' nvv4l2decoder mjpeg=true ! nvv4l2h265enc bitrate=2000000 ' - + return " nvv4l2decoder mjpeg=true ! nvv4l2h265enc bitrate=2000000 " + # osx hw encoder - p: CompletedProcess[str] = subprocess.run("gst-inspect-1.0 | grep -q 'vtenc_h265_hw:'", shell=True, capture_output=False) + p: CompletedProcess[str] = subprocess.run( + "gst-inspect-1.0 | grep -q 'vtenc_h265_hw:'", shell=True, capture_output=False, check=False + ) if p.returncode == 0: - return ' jpegdec ! vtenc_h265_hw bitrate=2000 ' + return " jpegdec ! vtenc_h265_hw bitrate=2000 " raise Exception("unknown gst plugins") + gst_internal_plugins = system_gst_check() + def next_videos(session: Session): - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files + results: Query[VideoFile] = session.query(VideoFile).from_statement( + sa.text( + """ + select video_files.* from video_files cross join ( - select coalesce(max(start_datetime), to_timestamp(0)) as latest_reencoded - from video_files + select coalesce(max(start_datetime), to_timestamp(0)) as latest_reencoded + from video_files where video_files.reencoded_stdout is not null or video_files.reencoded_stderr is not null ) latest_reencoded - where video_files.decrypted_path is not null + where video_files.decrypted_path is not null and video_files.reencoded_stdout is null and video_files.reencoded_stderr is null and video_files.start_datetime >= latest_reencoded.latest_reencoded order by video_files.start_datetime asc; - """)) - return list(results) + """ + ) + ) + return list(results) def run_reencode(output_dir: Path, sessionmaker: SessionMaker): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -67,25 +75,30 @@ def run_reencode(output_dir: Path, sessionmaker: SessionMaker): video_file: VideoFile = video_files.pop(0) # print(video_file) decrypted_path = Path(video_file.decrypted_path) - last_dot_index: int = decrypted_path.name.index('.') + last_dot_index: int = decrypted_path.name.index(".") if last_dot_index < 0: last_dot_index = None mkv_out_file: Path = output_dir / Path(decrypted_path.name[0:last_dot_index] + "_reenc.mkv") - - cmd: str = "gst-launch-1.0 filesrc location='%s' ! avidemux ! \ + + cmd: str = ( + "gst-launch-1.0 filesrc location='%s' ! avidemux ! \ %s ! \ - h265parse ! matroskamux ! filesink location='%s'"%( - str(decrypted_path.absolute()), - gst_internal_plugins, - str(mkv_out_file.absolute()) - ) - + h265parse ! matroskamux ! filesink location='%s'" + % (str(decrypted_path.absolute()), gst_internal_plugins, str(mkv_out_file.absolute())) + ) + update_reencoded_path = None - p: CompletedProcess[str] = subprocess.run(cmd, shell=True, capture_output=True, text=True) - if p.returncode == 0 and p.stderr.find("No such file") < 0 and p.stderr.find("Failed to start") < 0: + p: CompletedProcess[str] = subprocess.run( + cmd, shell=True, capture_output=True, text=True, check=False + ) + if ( + p.returncode == 0 + and p.stderr.find("No such file") < 0 + and p.stderr.find("Failed to start") < 0 + ): update_reencoded_path = str(mkv_out_file.absolute()) - + try: # shutil.copy(mkv_out_file, Path('/usbdrive/') / mkv_out_file.name ) pass @@ -93,43 +106,40 @@ def run_reencode(output_dir: Path, sessionmaker: SessionMaker): # FileNotFoundError or some other permissions error. Drive must not be inserted. Ignore. pass - with sessionmaker() as session: - session.execute(sa.text("update video_files set \ + session.execute( + sa.text( + "update video_files set \ reencoded_path = :reencoded_path, reencoded_datetime = current_timestamp, \ reencoded_stdout = :reencoded_stdout, reencoded_stderr = :reencoded_stderr \ - where decrypted_path = :decrypted_path;"), { - "reencoded_path": update_reencoded_path, - "reencoded_stdout":p.stdout, - "reencoded_stderr":p.stderr, - "decrypted_path": str(decrypted_path.absolute()), - } - ) + where decrypted_path = :decrypted_path;" + ), + { + "reencoded_path": update_reencoded_path, + "reencoded_stdout": p.stdout, + "reencoded_stderr": p.stderr, + "decrypted_path": str(decrypted_path.absolute()), + }, + ) session.commit() - + with sessionmaker() as session: video_files = next_videos(session) - - @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--print_queue', is_flag=True) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--print_queue", is_flag=True) def main(dbname, dbuser, output_dir, print_queue): - output_dir = Path(output_dir) - - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) - if print_queue: with sessionmaker() as session: video_files = next_videos(session) @@ -140,10 +150,10 @@ def main(dbname, dbuser, output_dir, print_queue): def runonce(output_dir, sessionmaker): run_reencode(output_dir, sessionmaker) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce, output_dir, sessionmaker) - schedule.every(5).minutes.do(run_reencode, output_dir, sessionmaker ) + schedule.every(5).minutes.do(run_reencode, output_dir, sessionmaker) while 1: n = schedule.idle_seconds() @@ -152,10 +162,10 @@ def runonce(output_dir, sessionmaker): break elif n > 0: # sleep exactly the right amount of time - click.echo(f'sleeping for: {n}') + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 441acca..0000000 --- a/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -flask -flask_admin -sqlalchemy<2.0 -click -nmeasim -geographiclib -pynmeagps -wheel -psycopg2-binary -Flask-SQLAlchemy==3.0.3 -alembic -requests -schedule -boto3 diff --git a/run_aifish.py b/run_aifish.py index f6a6fce..3f28ff5 100644 --- a/run_aifish.py +++ b/run_aifish.py @@ -1,104 +1,122 @@ - -from datetime import datetime, timezone, timedelta -from dateutil import parser -import click -from collections import defaultdict import json import os -from pathlib import Path -import re -import requests -from requests import Response -import schedule import shutil import subprocess -from subprocess import CompletedProcess import sys import time +from collections import defaultdict +from datetime import UTC, datetime, timedelta +from pathlib import Path -from model import Base as ModelBase, VideoFile, AifishData, Track +import click +import requests +import schedule import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Query +from dateutil import parser +from flask.config import Config as FlaskConfig +from requests import Response +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker as SessionMaker from sqlalchemy.orm.session import Session -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from model import AifishData, Track, VideoFile +from model import Base as ModelBase + +flaskconfig = FlaskConfig(root_path="") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -# select video_files.* from video_files +# select video_files.* from video_files # join ( -# select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday +# select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday # from ( # select date(start_datetime AT TIME ZONE 'utc' - interval '8 hours' ) as workday, -# count(*) as count -# from video_files -# where decrypted_path is not null +# count(*) as count +# from video_files +# where decrypted_path is not null # group by workday -# ) workday_counts +# ) workday_counts # where workday_counts.count > 4 -# ) workdays +# ) workdays # on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone '08:00Z' -# left join aifishdata -# on video_files.decrypted_path = aifishdata.video_uri -# where video_files.decrypted_path is not null +# left join aifishdata +# on video_files.decrypted_path = aifishdata.video_uri +# where video_files.decrypted_path is not null # and aifishdata.video_uri is null # and video_files.cam_name = 'cam1' # order by video_files.decrypted_datetime asc; + def next_videos(session: Session, thalos_cam_name): - workday_start_hour_at_utc_interval = '8 hours'; - workday_start_hour_at_utc_timestr = '08:00Z'; - num_vids_required = 4; - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files + workday_start_hour_at_utc_interval = "8 hours" + workday_start_hour_at_utc_timestr = "08:00Z" + num_vids_required = 4 + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files join ( - select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday + select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday from ( select date(start_datetime AT TIME ZONE 'utc' - interval :timei ) as workday, - count(*) as count - from video_files - where decrypted_path is not null + count(*) as count + from video_files + where decrypted_path is not null group by workday - ) workday_counts + ) workday_counts where workday_counts.count > :numvids - ) workdays - on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times - left join aifishdata - on video_files.decrypted_path = aifishdata.video_uri - where video_files.decrypted_path is not null + ) workdays + on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times + left join aifishdata + on video_files.decrypted_path = aifishdata.video_uri + where video_files.decrypted_path is not null and aifishdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.decrypted_datetime asc; - """)).params( - { - "timei": workday_start_hour_at_utc_interval, - "times": workday_start_hour_at_utc_timestr, - "numvids": num_vids_required, - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "timei": workday_start_hour_at_utc_interval, + "times": workday_start_hour_at_utc_timestr, + "numvids": num_vids_required, + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) + def v2_next_videos(session: Session, thalos_cam_name): - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files - left join aifishdata - on video_files.decrypted_path = aifishdata.video_uri - where video_files.decrypted_path is not null + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files + left join aifishdata + on video_files.decrypted_path = aifishdata.video_uri + where video_files.decrypted_path is not null and video_files.start_datetime is not null and aifishdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.start_datetime asc; - """)).params( - { - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) MAGIC_VALUE_5_MiB = 5 * 1024 * 1024 @@ -111,47 +129,53 @@ def parse_json(session: Session, decrypted_path: Path, json_out_file: Path, only if len(detections) == 0: # error handling here pass - - fish_detections = list(filter(lambda d: d.get('class_name') == 'fish', detections)) + + fish_detections = list(filter(lambda d: d.get("class_name") == "fish", detections)) if len(fish_detections) == 0: # error handling here if only_tracks: return - session.execute(sa.text("""insert into aifishdata ( video_uri, output_uri, - count, detection_confidence ) - values ( :decrypted_path, :json_out_file , :cnt, :mean_c) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into aifishdata ( video_uri, output_uri, + count, detection_confidence ) + values ( :decrypted_path, :json_out_file , :cnt, :mean_c) + on conflict (video_uri) do update set output_uri = :json_out_file, count = :cnt, detection_confidence = :mean_c - ;"""), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt": 0, - "mean_c": 0, - } - ) + ;"""), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": 0, + "mean_c": 0, + }, + ) session.commit() return - last_frame = max(map(lambda d: d.get('frame'), detections)) + last_frame = max(map(lambda d: d.get("frame"), detections)) frames = [] - detectionconfidences = list(filter(lambda x: x is not None, map(lambda d: d.get('object_confidence'), fish_detections))) + detectionconfidences = list( + filter( + lambda x: x is not None, map(lambda d: d.get("object_confidence"), fish_detections) + ) + ) # = max(map(lambda detection: detection.get('object_confidence'), detections)) # trackedconfidences = [] tracks = defaultdict(list) for d in fish_detections: - tracks[d.get('track')].append(d) - + tracks[d.get("track")].append(d) + cnt = len(tracks.keys()) done_tracks = [] for track_id, detections in tracks.items(): - frame_nums = list(map(lambda d: d.get('frame'), detections)) + frame_nums = list(map(lambda d: d.get("frame"), detections)) min_frame = min(frame_nums) max_frame = max(frame_nums) @@ -163,41 +187,45 @@ def parse_json(session: Session, decrypted_path: Path, json_out_file: Path, only t.last_framenum = max_frame t.confidences = [0 for i in range(1 + max_frame - min_frame)] for d in detections: - t.confidences[d.get('frame') - min_frame] = d.get('object_confidence') or 0 + t.confidences[d.get("frame") - min_frame] = d.get("object_confidence") or 0 done_tracks.append(t) session.add_all(done_tracks) session.commit() if only_tracks: return - + if len(detectionconfidences) > 0: - meandetectionconfidence = float(sum(detectionconfidences)) / float(len(detectionconfidences)) + meandetectionconfidence = float(sum(detectionconfidences)) / float( + len(detectionconfidences) + ) else: meandetectionconfidence = 0 - # with sessionmaker() as session: - session.execute(sa.text("""insert into aifishdata ( video_uri, output_uri, - count, detection_confidence ) - values ( :decrypted_path, :json_out_file , :cnt, :mean_c) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into aifishdata ( video_uri, output_uri, + count, detection_confidence ) + values ( :decrypted_path, :json_out_file , :cnt, :mean_c) + on conflict (video_uri) do update set output_uri = :json_out_file, count = :cnt, detection_confidence = :mean_c - ;"""), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt":cnt, - "mean_c":meandetectionconfidence, - } - ) + ;"""), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": cnt, + "mean_c": meandetectionconfidence, + }, + ) session.commit() -VIDEO_TOO_SMALL = 1024*1024 + +VIDEO_TOO_SMALL = 1024 * 1024 + def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -212,27 +240,41 @@ def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): # use_reencoded = False v_source_path = str(decrypted_path.absolute()) v_source_name = decrypted_path.name - if not decrypted_path.exists() or not decrypted_path.is_file() or decrypted_path.stat().st_size < VIDEO_TOO_SMALL: - click.echo(f"original video file {decrypted_path.name} failed basic checks. Using reencoded") + if ( + not decrypted_path.exists() + or not decrypted_path.is_file() + or decrypted_path.stat().st_size < VIDEO_TOO_SMALL + ): + click.echo( + f"original video file {decrypted_path.name} failed basic checks. Using reencoded" + ) # use_reencoded = True if video_file.reencoded_path is None: - click.echo(f"video not reencoded, skipping video") + click.echo("video not reencoded, skipping video") continue reencoded_path = Path(video_file.reencoded_path) v_source_path = str(reencoded_path.absolute()) v_source_name = reencoded_path.name - if not reencoded_path.exists() or not reencoded_path.is_file() or reencoded_path.stat().st_size < VIDEO_TOO_SMALL: - click.echo(f"reencoded_video {reencoded_path.name} fails basic checks. skipping video") + if ( + not reencoded_path.exists() + or not reencoded_path.is_file() + or reencoded_path.stat().st_size < VIDEO_TOO_SMALL + ): + click.echo( + f"reencoded_video {reencoded_path.name} fails basic checks. skipping video" + ) continue rname = v_source_name[::-1] - last_dot_index: int = rname.find('.') + last_dot_index: int = rname.find(".") if last_dot_index < 0: json_out_file: Path = output_dir / Path(v_source_name + ".json") else: - json_out_file: Path = output_dir / Path(v_source_name[0:-last_dot_index-1] + ".json") + json_out_file: Path = output_dir / Path( + v_source_name[0 : -last_dot_index - 1] + ".json" + ) - aifish_processing_path = decrypted_path.parent / 'processing' / v_source_name + aifish_processing_path = decrypted_path.parent / "processing" / v_source_name # decrypted_path.rename(aifish_processing_path) @@ -243,42 +285,44 @@ def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): shutil.copy(v_source_path, aifish_processing_path) with sessionmaker() as session: - session.execute(sa.text("""insert into aifishdata ( video_uri, processing_uri, output_uri, status ) + session.execute( + sa.text("""insert into aifishdata ( video_uri, processing_uri, output_uri, status ) values ( :video_uri, :processing_uri, :output_uri, :status ) - on conflict (video_uri) DO UPDATE SET status = :status ;"""), { - "video_uri": str(decrypted_path.absolute()), - "processing_uri": str(aifish_processing_path.absolute()), - "output_uri": str(json_out_file.absolute()), - "status": "queued" - } + on conflict (video_uri) DO UPDATE SET status = :status ;"""), + { + "video_uri": str(decrypted_path.absolute()), + "processing_uri": str(aifish_processing_path.absolute()), + "output_uri": str(json_out_file.absolute()), + "status": "queued", + }, ) session.commit() + MAGIC_VALUE_1_MINUTE = 60 + def parse(output_dir: Path, sessionmaker: SessionMaker): # only pick files that end with .json - a = filter(lambda x: x.is_file() and x.name.endswith('.json'), output_dir.iterdir()) + a = filter(lambda x: x.is_file() and x.name.endswith(".json"), output_dir.iterdir()) epoch_now = int(time.time()) # only pick files that haven't been modified in the last minute b = filter(lambda x: x.stat().st_mtime + MAGIC_VALUE_1_MINUTE < epoch_now, a) # get the filenames - c = map(lambda x: str(x.absolute()) , b) + c = map(lambda x: str(x.absolute()), b) found_aifish_files = list(c) - click.echo("found {} .json files".format(str(len(found_aifish_files)))) + click.echo(f"found {str(len(found_aifish_files))} .json files") with sessionmaker() as session: - results: Query[AifishData] = session.query(AifishData).where( AifishData.status == 'queued' ) + results: Query[AifishData] = session.query(AifishData).where(AifishData.status == "queued") for pending_aifishdata in results: - # click.echo("found {} queued row".format(str(pending_aifishdata))) if pending_aifishdata.output_uri in found_aifish_files: - video = Path(pending_aifishdata.video_uri) processing = Path(pending_aifishdata.processing_uri) output = Path(pending_aifishdata.output_uri) @@ -293,80 +337,90 @@ def parse(output_dir: Path, sessionmaker: SessionMaker): if processing.exists(): processing.unlink() - + parse_json(session, video, output) pending_aifishdata.status = "done" session.commit() - - def errors(sessionmaker: SessionMaker): try: - r: Response = requests.get('http://127.0.0.1:5000/errors') + r: Response = requests.get("http://127.0.0.1:5000/errors") - click.echo("errors resp: {} body: {}".format(repr(r), repr(r.json()))) + click.echo(f"errors resp: {repr(r)} body: {repr(r.json())}") for error in r.json(): - input_path = error.get('input_path') - error_message = error.get('error_message') + input_path = error.get("input_path") + error_message = error.get("error_message") - if error_message.startswith('Task performance mode set to SKIP'): + if error_message.startswith("Task performance mode set to SKIP"): with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, status ) - values ( :decrypted_path, :skiphalfstatus ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, status ) + values ( :decrypted_path, :skiphalfstatus ) + on conflict (video_uri) do update set status = :skiphalfstatus - ;"""), { - "decrypted_path": input_path, - "skiphalfstatus": "runningskiphalf" - } + ;"""), + {"decrypted_path": input_path, "skiphalfstatus": "runningskiphalf"}, ) session.commit() continue with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) - values ( :decrypted_path, :error_str ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) + values ( :decrypted_path, :error_str ) + on conflict (video_uri) do update set status = 'errored', cocoannotations_uri = :error_str - ;"""), { - "decrypted_path": input_path, - "error_str": "ondeck model failure. stdout, stderr: " + error_message - } + ;"""), + { + "decrypted_path": input_path, + "error_str": "ondeck model failure. stdout, stderr: " + error_message, + }, ) session.commit() except requests.exceptions.RequestException as e: - click.echo("ondeck model errors request exception: {}".format(e)) + click.echo(f"ondeck model errors request exception: {e}") return + LOST_TIME_BUFFER = timedelta(minutes=30) + def lost_inprogress(sessionmaker: SessionMaker, aifish_processing_dir: Path): - last_start_time_s = subprocess.run('journalctl -o short-iso -u aifish_model.service | grep systemd | grep Started | tail -n 1 | sed "s/edge.*//"', shell=True, text=True, capture_output=True) + last_start_time_s = subprocess.run( + 'journalctl -o short-iso -u aifish_model.service | grep systemd | grep Started | tail -n 1 | sed "s/edge.*//"', + shell=True, + text=True, + capture_output=True, + check=False, + ) last_start_time_dt = parser.parse(last_start_time_s.stdout) - - check_these = list(filter( - lambda f: f.is_file() - and (f.name.endswith('.avi') - or f.name.endswith('.mkv')) - and datetime.fromtimestamp(f.stat().st_mtime, tz=timezone.utc) + LOST_TIME_BUFFER < last_start_time_dt, - aifish_processing_dir.iterdir() - )) + check_these = list( + filter( + lambda f: f.is_file() + and (f.name.endswith(".avi") or f.name.endswith(".mkv")) + and datetime.fromtimestamp(f.stat().st_mtime, tz=UTC) + LOST_TIME_BUFFER + < last_start_time_dt, + aifish_processing_dir.iterdir(), + ) + ) if len(check_these) > 0: abs_names = list(map(lambda f: str(f.absolute()), check_these)) with sessionmaker() as session: - rows: Query = session.query(AifishData) \ - .filter(AifishData.processing_uri.in_(abs_names)) \ - .filter(AifishData.status == 'queued') + rows: Query = ( + session.query(AifishData) + .filter(AifishData.processing_uri.in_(abs_names)) + .filter(AifishData.status == "queued") + ) for lost_file in rows.all(): - click.echo(f'found lost file in progress - deleting: {lost_file.processing_uri}') + click.echo(f"found lost file in progress - deleting: {lost_file.processing_uri}") Path(lost_file.processing_uri).unlink() - lost_file.status = 'errored' - session.commit() + lost_file.status = "errored" + session.commit() def ensure_is_dir(p: Path): @@ -383,21 +437,31 @@ def ensure_is_dir(p: Path): click.echo(f"Could not create folder {a}. Exiting") sys.exit(1) -@click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--engine', default=flaskconfig.get('ONDECK_MODEL_ENGINE')) -@click.option('--thalos_cam_name', default=flaskconfig.get('THALOS_CAM_NAME')) -@click.option('--print_queue', is_flag=True) -@click.option('--parsetesta') -@click.option('--parsetestb') -@click.option('--testlostinprogress', is_flag=True) -def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parsetesta, parsetestb, testlostinprogress): +@click.command() +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--engine", default=flaskconfig.get("ONDECK_MODEL_ENGINE")) +@click.option("--thalos_cam_name", default=flaskconfig.get("THALOS_CAM_NAME")) +@click.option("--print_queue", is_flag=True) +@click.option("--parsetesta") +@click.option("--parsetestb") +@click.option("--testlostinprogress", is_flag=True) +def main( + dbname, + dbuser, + output_dir, + engine, + thalos_cam_name, + print_queue, + parsetesta, + parsetestb, + testlostinprogress, +): video_output_dir = Path(output_dir) - aifish_processing_dir = video_output_dir / 'processing' - aifish_output_dir = video_output_dir / 'output' + aifish_processing_dir = video_output_dir / "processing" + aifish_output_dir = video_output_dir / "output" ensure_is_dir(aifish_processing_dir) ensure_is_dir(aifish_output_dir) @@ -405,8 +469,7 @@ def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parse if engine: engine = Path(engine) - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) @@ -427,31 +490,29 @@ def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parse lost_inprogress(sessionmaker, aifish_processing_dir) return - def runonce_enqueue(aifish_output_dir, sessionmaker, thalos_cam_name): enqueue(aifish_output_dir, sessionmaker, thalos_cam_name) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_enqueue, aifish_output_dir, sessionmaker, thalos_cam_name) - schedule.every(5).minutes.do(enqueue, aifish_output_dir, sessionmaker, thalos_cam_name ) + schedule.every(5).minutes.do(enqueue, aifish_output_dir, sessionmaker, thalos_cam_name) def runonce_errors(sessionmaker): errors(sessionmaker) return schedule.CancelJob - - schedule.every(1).seconds.do(runonce_errors, sessionmaker) + + schedule.every(1).seconds.do(runonce_errors, sessionmaker) schedule.every(1).minutes.do(errors, sessionmaker) def runonce_parse(aifish_output_dir, sessionmaker): parse(aifish_output_dir, sessionmaker) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_parse, aifish_output_dir, sessionmaker) - schedule.every(1).minutes.do(parse, aifish_output_dir, sessionmaker ) - + schedule.every(1).minutes.do(parse, aifish_output_dir, sessionmaker) # def runonce_lost_inprogress(sessionmaker, aifish_processing_dir): # lost_inprogress(sessionmaker, aifish_processing_dir) @@ -459,8 +520,6 @@ def runonce_parse(aifish_output_dir, sessionmaker): # schedule.every(1).seconds.do(runonce_lost_inprogress, sessionmaker, aifish_processing_dir) # schedule.every(5).minutes.do(lost_inprogress, sessionmaker, aifish_processing_dir ) - - while 1: n = schedule.idle_seconds() if n is None: @@ -468,9 +527,10 @@ def runonce_parse(aifish_output_dir, sessionmaker): break elif n > 0: # sleep exactly the right amount of time - click.echo("sleeping for: {}".format(n)) + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/run_ondeck.py b/run_ondeck.py index 3401ff8..8a3f2cb 100644 --- a/run_ondeck.py +++ b/run_ondeck.py @@ -1,67 +1,78 @@ - -from datetime import datetime, timezone -import click import json import os -from pathlib import Path -import re -import requests -from requests import Response -import schedule import subprocess -from subprocess import CompletedProcess import time +from datetime import UTC +from pathlib import Path +from subprocess import CompletedProcess -from model import Base as ModelBase, VideoFile, OndeckData, Track +import click +import requests +import schedule import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Query +from flask.config import Config as FlaskConfig +from requests import Response +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker as SessionMaker from sqlalchemy.orm.session import Session -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from model import Base as ModelBase +from model import OndeckData, Track, VideoFile + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def next_videos(session: Session, thalos_cam_name): - workday_start_hour_at_utc_interval = '8 hours'; - workday_start_hour_at_utc_timestr = '08:00Z'; - num_vids_required = 4; - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files + workday_start_hour_at_utc_interval = "8 hours" + workday_start_hour_at_utc_timestr = "08:00Z" + num_vids_required = 4 + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files join ( - select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday + select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday from ( select date(start_datetime AT TIME ZONE 'utc' - interval :timei ) as workday, - count(*) as count - from video_files - where decrypted_path is not null + count(*) as count + from video_files + where decrypted_path is not null group by workday - ) workday_counts + ) workday_counts where workday_counts.count > :numvids - ) workdays - on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times - left join ondeckdata - on video_files.decrypted_path = ondeckdata.video_uri - where video_files.decrypted_path is not null + ) workdays + on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times + left join ondeckdata + on video_files.decrypted_path = ondeckdata.video_uri + where video_files.decrypted_path is not null and ondeckdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.decrypted_datetime asc; - """)).params( - { - "timei": workday_start_hour_at_utc_interval, - "times": workday_start_hour_at_utc_timestr, - "numvids": num_vids_required, - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "timei": workday_start_hour_at_utc_interval, + "times": workday_start_hour_at_utc_timestr, + "numvids": num_vids_required, + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) + MAGIC_VALUE_5_MiB = 5 * 1024 * 1024 + def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalos_cam_name): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -72,10 +83,12 @@ def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalo video_file: VideoFile = video_files.pop(0) # click.echo(video_file) decrypted_path = Path(video_file.decrypted_path) - last_dot_index: int = decrypted_path.name.index('.') + last_dot_index: int = decrypted_path.name.index(".") if last_dot_index < 0: last_dot_index = None - json_out_file: Path = output_dir / Path(decrypted_path.name[0:last_dot_index] + "_ondeck.json") + json_out_file: Path = output_dir / Path( + decrypted_path.name[0:last_dot_index] + "_ondeck.json" + ) ondeck_input = str(decrypted_path.absolute()) try: @@ -86,177 +99,195 @@ def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalo pass # sudo /usr/bin/docker run --rm -v /videos:/videos --runtime=nvidia --network none gcr.io/edge-gcr/edge-service-image:latest --output /videos --input /videos/21-07-2023-09-55.avi - cmd: str = "sudo /usr/bin/docker run --rm -v /videos:/videos --runtime=nvidia --network none \ + cmd: str = ( + "sudo /usr/bin/docker run --rm -v /videos:/videos --runtime=nvidia --network none \ gcr.io/edge-gcr/edge-service-image:latest \ - --output %s --input %s"%( - str(json_out_file.absolute()), - ondeck_input - ) + --output %s --input %s" + % (str(json_out_file.absolute()), ondeck_input) + ) if engine: - cmd += " --model %s"%( str(engine.absolute()), ) - p: CompletedProcess[str] = subprocess.run(cmd, shell=True, capture_output=True, text=True) + cmd += " --model %s" % (str(engine.absolute()),) + p: CompletedProcess[str] = subprocess.run( + cmd, shell=True, capture_output=True, text=True, check=False + ) if p.returncode == 0: - with sessionmaker() as session: parse_json(session, decrypted_path, json_out_file) else: # click.echo("ondeck model failure. stdout, stderr: {} {}".format( p.stdout, p.stderr)) with sessionmaker() as session: - session.execute(sa.text("insert into ondeckdata ( video_uri, cocoannotations_uri ) \ - values ( :decrypted_path, :error_str ) ;"), { - "decrypted_path": str(decrypted_path.absolute()), - "error_str": "ondeck model failure. stdout, stderr: " + p.stdout + p.stderr - } + session.execute( + sa.text( + "insert into ondeckdata ( video_uri, cocoannotations_uri ) \ + values ( :decrypted_path, :error_str ) ;" + ), + { + "decrypted_path": str(decrypted_path.absolute()), + "error_str": "ondeck model failure. stdout, stderr: " + p.stdout + p.stderr, + }, ) session.commit() with sessionmaker() as session: video_files = next_videos(session, thalos_cam_name) + def parse_json(session: Session, decrypted_path: Path, json_out_file: Path, only_tracks=False): with json_out_file.open() as f: o: dict = json.load(f) - if 'overallRuntimeMs' in o.keys(): + if "overallRuntimeMs" in o: if only_tracks: return v1_parse_json(session, decrypted_path, json_out_file, o) - elif 'overallRuntimeSeconds' in o.keys(): + elif "overallRuntimeSeconds" in o: v2_parse_json(session, decrypted_path, json_out_file, o, only_tracks=only_tracks) + def v1_parse_json(session: Session, decrypted_path: Path, json_out_file: Path, o: dict): - cnt = o.get('overallCount') - runtime = o.get('overallRuntimeMs') - frames = o.get('frames', []) + cnt = o.get("overallCount") + runtime = o.get("overallRuntimeMs") + frames = o.get("frames", []) - ## stats - trackedframes = filter(lambda frame: len(frame.get('trackingIds'))>0, frames) - confidencesarrs = map(lambda frame: frame.get('confidence'), trackedframes) + ## stats + trackedframes = filter(lambda frame: len(frame.get("trackingIds")) > 0, frames) + confidencesarrs = map(lambda frame: frame.get("confidence"), trackedframes) confidences = [c for confidencesarr in confidencesarrs for c in confidencesarr] if len(confidences) > 0: meanconf = float(sum(confidences)) / float(len(confidences)) else: meanconf = 0 - ## tracks + ## tracks tracks = {} for f in frames: - frame_confidences = f.get('confidence') + frame_confidences = f.get("confidence") i = 0 - for trackid in f.get('trackingIds'): + for trackid in f.get("trackingIds"): if trackid not in tracks: t = { - "first_frame": f.get('frameNum'), - "first_timestamp": f.get('timestamp'), - "confidences": [] - } + "first_frame": f.get("frameNum"), + "first_timestamp": f.get("timestamp"), + "confidences": [], + } tracks[trackid] = t t = tracks[trackid] if len(frame_confidences) > i: - t['confidences'].append(frame_confidences[i]) + t["confidences"].append(frame_confidences[i]) else: - t['confidences'].append(0) + t["confidences"].append(0) i += 1 - # with sessionmaker() as session: - session.execute(sa.text("insert into ondeckdata ( video_uri, cocoannotations_uri, \ + session.execute( + sa.text( + "insert into ondeckdata ( video_uri, cocoannotations_uri, \ overallcount, overallruntimems, tracked_confidence ) \ - values ( :decrypted_path, :json_out_file , :cnt, :runt, :mean_c) ;"), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt":cnt, - "runt":runtime, - "mean_c":meanconf, - } - ) + values ( :decrypted_path, :json_out_file , :cnt, :runt, :mean_c) ;" + ), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": cnt, + "runt": runtime, + "mean_c": meanconf, + }, + ) session.commit() -def v2_parse_json(session: Session, decrypted_path: Path, json_out_file: Path, o: dict, only_tracks=False): - - cnt = o.get('overallCount') - catches = o.get('overallCatches') - discards = o.get('overallDiscards') - runtime = o.get('overallRuntimeSeconds') - frames = o.get('frames', []) - +def v2_parse_json( + session: Session, decrypted_path: Path, json_out_file: Path, o: dict, only_tracks=False +): + cnt = o.get("overallCount") + catches = o.get("overallCatches") + discards = o.get("overallDiscards") + runtime = o.get("overallRuntimeSeconds") + frames = o.get("frames", []) detectionconfidences = [] # trackedconfidences = [] active_tracks = {} done_tracks: list[Track] = [] - - for frame in frames: - detectionconfidences.extend(frame.get('confidence')) - + detectionconfidences.extend(frame.get("confidence")) + # idx = 0 # for trackingId in frame.get('trackingIds'): # if trackingId in frame.get('allActiveTrackingIds'): # trackedconfidences.append(frame.get('confidence')[idx]) # idx += 1 - - if 'allActiveTrackingIds' not in frame: + + if "allActiveTrackingIds" not in frame: continue - for activeTrackingId_str in frame['allActiveTrackingIds']: + for activeTrackingId_str in frame["allActiveTrackingIds"]: activeTrackingId = int(activeTrackingId_str) - if activeTrackingId not in active_tracks.keys(): + if activeTrackingId not in active_tracks: active_tracks[activeTrackingId] = Track() active_tracks[activeTrackingId].video_uri = str(decrypted_path.absolute()) active_tracks[activeTrackingId].cocoannotations_uri = str(json_out_file.absolute()) active_tracks[activeTrackingId].track_id = activeTrackingId - active_tracks[activeTrackingId].first_framenum = frame['frameNum'] + active_tracks[activeTrackingId].first_framenum = frame["frameNum"] active_tracks[activeTrackingId].confidences = [] t = active_tracks[activeTrackingId] - try: - idx = frame['trackingIds'].index(activeTrackingId_str) - t.confidences.append(frame['confidence'][idx]) + try: + idx = frame["trackingIds"].index(activeTrackingId_str) + t.confidences.append(frame["confidence"][idx]) except: t.confidences.append(0.0) for track_id in list(active_tracks.keys()): track = active_tracks[track_id] - if str(track_id) not in frame['allActiveTrackingIds']: + if str(track_id) not in frame["allActiveTrackingIds"]: # the confidences will probably have a long trail of 0s at the end, which are not useful # cut them out track.confidences.reverse() - last_nonzero_index = next((i for (i,x) in enumerate(track.confidences) if x), None) + last_nonzero_index = next((i for (i, x) in enumerate(track.confidences) if x), None) track.confidences.reverse() if last_nonzero_index: track.confidences = track.confidences[:-last_nonzero_index] - track.last_framenum = frame['frameNum'] + track.last_framenum = frame["frameNum"] done_tracks.append(track) active_tracks.pop(track_id) - session.add_all(done_tracks) - session.commit() if only_tracks: return - + if len(detectionconfidences) > 0: - meandetectionconfidence = float(sum(detectionconfidences)) / float(len(detectionconfidences)) + meandetectionconfidence = float(sum(detectionconfidences)) / float( + len(detectionconfidences) + ) else: meandetectionconfidence = 0 - if len(done_tracks) > 0: - tracks_avg_conf = list(map(lambda t: float(sum(t.confidences)) / float(len(t.confidences)) if len(t.confidences) else 0.0, done_tracks)) - meantrackedconfidence = float(sum(tracks_avg_conf)) / float(len(tracks_avg_conf)) if len(tracks_avg_conf) else 0.0 + tracks_avg_conf = list( + map( + lambda t: float(sum(t.confidences)) / float(len(t.confidences)) + if len(t.confidences) + else 0.0, + done_tracks, + ) + ) + meantrackedconfidence = ( + float(sum(tracks_avg_conf)) / float(len(tracks_avg_conf)) + if len(tracks_avg_conf) + else 0.0 + ) else: meantrackedconfidence = 0 - # with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, - overallcount, overallcatches, overalldiscards, overallruntimems, detection_confidence, tracked_confidence ) - values ( :decrypted_path, :json_out_file , :cnt, :catches, :discards, :runt, :mean_c, :mean_t) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, + overallcount, overallcatches, overalldiscards, overallruntimems, detection_confidence, tracked_confidence ) + values ( :decrypted_path, :json_out_file , :cnt, :catches, :discards, :runt, :mean_c, :mean_t) + on conflict (video_uri) do update set cocoannotations_uri = :json_out_file, overallcount = :cnt, overallruntimems = :runt, @@ -264,38 +295,48 @@ def v2_parse_json(session: Session, decrypted_path: Path, json_out_file: Path, o overallcatches = :catches, overalldiscards = :discards, detection_confidence = :mean_c - ;"""), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt":cnt, - "catches":catches, - "discards":discards, - "runt":runtime, - "mean_c":meandetectionconfidence, - "mean_t":meantrackedconfidence, - } - ) + ;"""), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": cnt, + "catches": catches, + "discards": discards, + "runt": runtime, + "mean_c": meandetectionconfidence, + "mean_t": meantrackedconfidence, + }, + ) session.commit() + def v2_next_videos(session: Session, thalos_cam_name): - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files - left join ondeckdata - on video_files.decrypted_path = ondeckdata.video_uri - where video_files.decrypted_path is not null + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files + left join ondeckdata + on video_files.decrypted_path = ondeckdata.video_uri + where video_files.decrypted_path is not null and video_files.start_datetime is not null and ondeckdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.start_datetime asc; - """)).params( - { - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) + def v2_enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -306,10 +347,12 @@ def v2_enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: st video_file: VideoFile = video_files.pop(0) # print(video_file) decrypted_path = Path(video_file.decrypted_path) - last_dot_index: int = decrypted_path.name.index('.') + last_dot_index: int = decrypted_path.name.index(".") if last_dot_index < 0: last_dot_index = None - json_out_file: Path = output_dir / Path(decrypted_path.name[0:last_dot_index] + "_ondeck.json") + json_out_file: Path = output_dir / Path( + decrypted_path.name[0:last_dot_index] + "_ondeck.json" + ) ondeck_input = str(decrypted_path.absolute()) # try: @@ -320,121 +363,145 @@ def v2_enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: st # pass try: - r: Response = requests.post('http://127.0.0.1:5000/inference', json={ - "input_path":ondeck_input, - "output_path":str(json_out_file.absolute()), - "current_timestamp": video_file.start_datetime.astimezone(timezone.utc).replace(tzinfo=None).isoformat() + ".00Z" - }) + r: Response = requests.post( + "http://127.0.0.1:5000/inference", + json={ + "input_path": ondeck_input, + "output_path": str(json_out_file.absolute()), + "current_timestamp": video_file.start_datetime.astimezone(UTC) + .replace(tzinfo=None) + .isoformat() + + ".00Z", + }, + ) - click.echo("resp: {} body: {}".format(repr(r), repr(r.json()))) + click.echo(f"resp: {repr(r)} body: {repr(r.json())}") with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, status ) + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, status ) values ( :ondeck_input, :ondeck_output, :status ) - on conflict (video_uri) DO UPDATE SET status = :status ;"""), { - "ondeck_input": ondeck_input, - "ondeck_output": str(json_out_file.absolute()), - "status": "queued" - } + on conflict (video_uri) DO UPDATE SET status = :status ;"""), + { + "ondeck_input": ondeck_input, + "ondeck_output": str(json_out_file.absolute()), + "status": "queued", + }, ) session.commit() except requests.exceptions.RequestException as e: - click.echo("ondeck model request exception: {}".format(e)) + click.echo(f"ondeck model request exception: {e}") return + MAGIC_VALUE_1_MINUTE = 60 + def v2_parse(output_dir: Path, sessionmaker: SessionMaker): # only pick files that end with _ondeck.json - a = filter(lambda x: x.is_file() and x.name.endswith('_ondeck.json'), output_dir.iterdir()) + a = filter(lambda x: x.is_file() and x.name.endswith("_ondeck.json"), output_dir.iterdir()) epoch_now = int(time.time()) # only pick files that haven't been modified in the last minute b = filter(lambda x: x.stat().st_mtime + MAGIC_VALUE_1_MINUTE < epoch_now, a) # get the filenames - c = map(lambda x: str(x.absolute()) , b) + c = map(lambda x: str(x.absolute()), b) found_ondeck_files = list(c) - click.echo("found {} _ondeck.json files".format(str(len(found_ondeck_files)))) + click.echo(f"found {str(len(found_ondeck_files))} _ondeck.json files") with sessionmaker() as session: - results: Query[OndeckData] = session.query(OndeckData).where( sa.or_( OndeckData.status == 'queued' , OndeckData.status == 'runningskiphalf' )) + results: Query[OndeckData] = session.query(OndeckData).where( + sa.or_(OndeckData.status == "queued", OndeckData.status == "runningskiphalf") + ) for pending_ondeckdata in results: is_skiphalf = pending_ondeckdata.status == "runningskiphalf" # click.echo("found {} queued row".format(str(pending_ondeckdata))) if pending_ondeckdata.cocoannotations_uri in found_ondeck_files: pending_ondeckdata.status = "parsing" session.commit() - - parse_json(session, Path(pending_ondeckdata.video_uri), Path(pending_ondeckdata.cocoannotations_uri)) + + parse_json( + session, + Path(pending_ondeckdata.video_uri), + Path(pending_ondeckdata.cocoannotations_uri), + ) pending_ondeckdata.status = "doneskiphalf" if is_skiphalf else "done" session.commit() - - def v2_errors(sessionmaker: SessionMaker): try: - r: Response = requests.get('http://127.0.0.1:5000/errors') + r: Response = requests.get("http://127.0.0.1:5000/errors") - click.echo("errors resp: {} body: {}".format(repr(r), repr(r.json()))) + click.echo(f"errors resp: {repr(r)} body: {repr(r.json())}") for error in r.json(): - input_path = error.get('input_path') - error_message = error.get('error_message') + input_path = error.get("input_path") + error_message = error.get("error_message") - if error_message.startswith('Task performance mode set to SKIP'): + if error_message.startswith("Task performance mode set to SKIP"): with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, status ) - values ( :decrypted_path, :skiphalfstatus ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, status ) + values ( :decrypted_path, :skiphalfstatus ) + on conflict (video_uri) do update set status = :skiphalfstatus - ;"""), { - "decrypted_path": input_path, - "skiphalfstatus": "runningskiphalf" - } + ;"""), + {"decrypted_path": input_path, "skiphalfstatus": "runningskiphalf"}, ) session.commit() continue with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) - values ( :decrypted_path, :error_str ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) + values ( :decrypted_path, :error_str ) + on conflict (video_uri) do update set status = 'errored', cocoannotations_uri = :error_str - ;"""), { - "decrypted_path": input_path, - "error_str": "ondeck model failure. stdout, stderr: " + error_message - } + ;"""), + { + "decrypted_path": input_path, + "error_str": "ondeck model failure. stdout, stderr: " + error_message, + }, ) session.commit() except requests.exceptions.RequestException as e: - click.echo("ondeck model errors request exception: {}".format(e)) + click.echo(f"ondeck model errors request exception: {e}") return -@click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--engine', default=flaskconfig.get('ONDECK_MODEL_ENGINE')) -@click.option('--thalos_cam_name', default=flaskconfig.get('THALOS_CAM_NAME')) -@click.option('--print_queue', is_flag=True) -@click.option('--parsetesta') -@click.option('--parsetestb') -@click.option('--force_v2', is_flag=True) -def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parsetesta, parsetestb, force_v2: bool): +@click.command() +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--engine", default=flaskconfig.get("ONDECK_MODEL_ENGINE")) +@click.option("--thalos_cam_name", default=flaskconfig.get("THALOS_CAM_NAME")) +@click.option("--print_queue", is_flag=True) +@click.option("--parsetesta") +@click.option("--parsetestb") +@click.option("--force_v2", is_flag=True) +def main( + dbname, + dbuser, + output_dir, + engine, + thalos_cam_name, + print_queue, + parsetesta, + parsetestb, + force_v2: bool, +): output_dir = Path(output_dir) if engine: engine = Path(engine) - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) @@ -452,46 +519,46 @@ def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parse use_v2 = False try: - r: Response = requests.get('http://127.0.0.1:5000/queueSummary') + r: Response = requests.get("http://127.0.0.1:5000/queueSummary") use_v2 = r.status_code == 200 - click.echo("resp: {} body: {}".format(repr(r), repr(r.json()))) + click.echo(f"resp: {repr(r)} body: {repr(r.json())}") except requests.exceptions.RequestException as e: - click.echo("ondeck model request exception: {}".format(e)) + click.echo(f"ondeck model request exception: {e}") if force_v2 or use_v2: - + def runonce_enqueue(output_dir, sessionmaker, thalos_cam_name): v2_enqueue(output_dir, sessionmaker, thalos_cam_name) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_enqueue, output_dir, sessionmaker, thalos_cam_name) - schedule.every(5).minutes.do(v2_enqueue, output_dir, sessionmaker, thalos_cam_name ) + schedule.every(5).minutes.do(v2_enqueue, output_dir, sessionmaker, thalos_cam_name) def runonce_errors(sessionmaker): v2_errors(sessionmaker) return schedule.CancelJob - - schedule.every(1).seconds.do(runonce_errors, sessionmaker) + + schedule.every(1).seconds.do(runonce_errors, sessionmaker) schedule.every(1).minutes.do(v2_errors, sessionmaker) def runonce_parse(output_dir, sessionmaker): v2_parse(output_dir, sessionmaker) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_parse, output_dir, sessionmaker) - schedule.every(1).minutes.do(v2_parse, output_dir, sessionmaker ) + schedule.every(1).minutes.do(v2_parse, output_dir, sessionmaker) else: def runonce(output_dir, engine, sessionmaker, thalos_cam_name): run_ondeck(output_dir, engine, sessionmaker, thalos_cam_name) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce, output_dir, engine, sessionmaker, thalos_cam_name) - schedule.every(5).minutes.do(run_ondeck, output_dir, engine, sessionmaker, thalos_cam_name ) + schedule.every(5).minutes.do(run_ondeck, output_dir, engine, sessionmaker, thalos_cam_name) while 1: n = schedule.idle_seconds() @@ -500,10 +567,10 @@ def runonce(output_dir, engine, sessionmaker, thalos_cam_name): break elif n > 0: # sleep exactly the right amount of time - click.echo("sleeping for: {}".format(n)) + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/s3_uploader.py b/s3_uploader.py index 77cdbdb..6ce5325 100644 --- a/s3_uploader.py +++ b/s3_uploader.py @@ -1,196 +1,266 @@ -import json import io - -from flask import Flask -from flask_admin import Admin - -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -import psycopg2 -from psycopg2.pool import SimpleConnectionPool import os +import string +import time +from datetime import UTC, datetime, timedelta -from model import Base as ModelBase, RiskVector, RiskVectorModelView, Test, TestModelView -from vector import GpsVector, FishAiEventsComeInFourHourBurstsVector, InternetVector, EquipmentOutageAggVector - -import sqlite3 -from datetime import datetime, timedelta, timezone - +import boto3 import click - +import psycopg2 import schedule -import re -import time -import string +from flask.config import Config as FlaskConfig +from psycopg2.pool import SimpleConnectionPool +from sqlalchemy.orm import Session +from model import Test -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +flaskconfig = FlaskConfig(root_path="") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -import boto3 +s3 = boto3.resource("s3") +bucket = s3.Bucket("51-gema-dev-dp-raw") -s3 = boto3.resource('s3') -bucket = s3.Bucket('51-gema-dev-dp-raw') + +csvprintable = string.printable +csvprintable = csvprintable[0 : 1 + csvprintable.index("\t")] +csvprintable = csvprintable.replace(",", "") -csvprintable=string.printable -csvprintable = csvprintable[0:1+csvprintable.index("\t")] -csvprintable = csvprintable.replace(',', '') def csvfilter(s): - return ''.join(filter(lambda c: c in csvprintable, s)) + return "".join(filter(lambda c: c in csvprintable, s)) -def DEPRECATED_export_method_with_sqlalchemy_models(session: Session): +def DEPRECATED_export_method_with_sqlalchemy_models(session: Session): try: - now = datetime.now().astimezone(timezone.utc) - - result = session.query(Test)\ - .where(Test.datetime_from > now - timedelta(days=13), Test.vector_id == 2)\ - .order_by(Test.datetime.desc())\ - .limit(1).all() + now = datetime.now().astimezone(UTC) + + result = ( + session.query(Test) + .where(Test.datetime_from > now - timedelta(days=13), Test.vector_id == 2) + .order_by(Test.datetime.desc()) + .limit(1) + .all() + ) rows = list(result) if len(rows) > 0: - partition = str(now.year) + "/" + str(now.month) + "/" + str(now.day) - + body = io.BytesIO() - body.write((','.join([column.name for column in Test.__mapper__.columns]) + '\n').encode()) - [body.write((','.join([str(getattr(row, column.name)) for column in Test.__mapper__.columns]) + '\n').encode()) for row in rows] - bucket.put_object(Key="tnc_edge/"+Test.__tablename__+"/"+partition+"/"+str(int(now.timestamp()))+".csv", Body=body.getvalue()) + body.write( + (",".join([column.name for column in Test.__mapper__.columns]) + "\n").encode() + ) + [ + body.write( + ( + ",".join( + [str(getattr(row, column.name)) for column in Test.__mapper__.columns] + ) + + "\n" + ).encode() + ) + for row in rows + ] + bucket.put_object( + Key="tnc_edge/" + + Test.__tablename__ + + "/" + + partition + + "/" + + str(int(now.timestamp())) + + ".csv", + Body=body.getvalue(), + ) except Exception as e: print("Error: exception in s3 uploader", e) -def DEPRECATED_s3uploader(cpool: SimpleConnectionPool, boat, ver): +def DEPRECATED_s3uploader(cpool: SimpleConnectionPool, boat, ver): DEPRECATED_tables = [ - 'deckhandevents', - 'gpsdata', - 'internetdata', - 'deckhandevents_mostrecentlonglineevent_jsonextracted', - 'tests', - 'video_files', - 'tracks', - 'ondeckdata', - 'aifishdata', + "deckhandevents", + "gpsdata", + "internetdata", + "deckhandevents_mostrecentlonglineevent_jsonextracted", + "tests", + "video_files", + "tracks", + "ondeckdata", + "aifishdata", ] conn: psycopg2.connection = cpool.getconn() try: with conn.cursor() as cur: for table in DEPRECATED_tables: - # print(table) - cur.execute("SELECT column_name FROM information_schema.columns \ - WHERE table_name = %s order by ordinal_position;", (table,)) + cur.execute( + "SELECT column_name FROM information_schema.columns \ + WHERE table_name = %s order by ordinal_position;", + (table,), + ) columns = cur.fetchall() - - cur.execute("select max(a.max), CURRENT_TIMESTAMP from ( \ + + cur.execute( + "select max(a.max), CURRENT_TIMESTAMP from ( \ select max(s3uploads.datetime), CURRENT_TIMESTAMP \ from s3uploads where tablename = %s group by tablename \ union select timestamp with time zone '1970-01-01' as max, CURRENT_TIMESTAMP \ - ) a;", (table,)) + ) a;", + (table,), + ) dates = cur.fetchone() - - - if table == 'video_files': - cur.execute('select * from video_files where start_datetime > %s and start_datetime <= %s;', (dates[0], dates[1])) + if table == "video_files": + cur.execute( + "select * from video_files where start_datetime > %s and start_datetime <= %s;", + (dates[0], dates[1]), + ) else: - cur.execute('select * from '+table+' where datetime > %s and datetime <= %s;', (dates[0], dates[1])) + cur.execute( + "select * from " + table + " where datetime > %s and datetime <= %s;", + (dates[0], dates[1]), + ) - now = datetime.now().astimezone(timezone.utc) + now = datetime.now().astimezone(UTC) partition = str(now.year) + "/" + str(now.month) + "/" + str(now.day) rows = list(cur.fetchall()) if len(rows) > 0: body = io.BytesIO() - body.write((','.join([column[0] for column in columns]) + '\n').encode()) - [body.write((','.join([csvfilter(str(value)) for value in row]) + '\n').encode()) for row in rows] - - bucket.put_object(Key="tnc_edge/"+boat+"_"+ver+"_"+table+"/"+partition+"/"+str(int(dates[1].timestamp()))+".csv", Body=body.getvalue()) - - cur.execute('insert into s3uploads (datetime, tablename) values (%s, %s)', (dates[1], table,)) + body.write((",".join([column[0] for column in columns]) + "\n").encode()) + [ + body.write( + (",".join([csvfilter(str(value)) for value in row]) + "\n").encode() + ) + for row in rows + ] + + bucket.put_object( + Key="tnc_edge/" + + boat + + "_" + + ver + + "_" + + table + + "/" + + partition + + "/" + + str(int(dates[1].timestamp())) + + ".csv", + Body=body.getvalue(), + ) + + cur.execute( + "insert into s3uploads (datetime, tablename) values (%s, %s)", + ( + dates[1], + table, + ), + ) conn.commit() finally: cpool.putconn(conn) -def s3psqlcopyer(cpool: SimpleConnectionPool, boat, ver): +def s3psqlcopyer(cpool: SimpleConnectionPool, boat, ver): tables = [ - 'deckhandevents', - 'gpsdata', - 'internetdata', - 'deckhandevents_mostrecentlonglineevent_jsonextracted', - 'tests', - 'video_files', - 'tracks', - 'ondeckdata', - 'aifishdata', + "deckhandevents", + "gpsdata", + "internetdata", + "deckhandevents_mostrecentlonglineevent_jsonextracted", + "tests", + "video_files", + "tracks", + "ondeckdata", + "aifishdata", ] conn: psycopg2.connection = cpool.getconn() - + try: with conn.cursor() as cur: for table in tables: - # print(table) - cur.execute("SELECT column_name FROM information_schema.columns \ - WHERE table_name = %s order by ordinal_position;", (table,)) + cur.execute( + "SELECT column_name FROM information_schema.columns \ + WHERE table_name = %s order by ordinal_position;", + (table,), + ) columns = cur.fetchall() - - cur.execute("select max(a.max), CURRENT_TIMESTAMP from ( \ + + cur.execute( + "select max(a.max), CURRENT_TIMESTAMP from ( \ select max(s3uploads.datetime), CURRENT_TIMESTAMP \ from s3uploads where tablename = %s group by tablename \ union select timestamp with time zone '1970-01-01' as max, CURRENT_TIMESTAMP \ - ) a;", (table,)) + ) a;", + (table,), + ) dates = cur.fetchone() cur.execute(f"CREATE TEMP TABLE t as SELECT * from {table} where false;") - if table == 'video_files': - cur.execute(f"insert into t (select * from video_files where start_datetime > '{dates[0]}' and start_datetime <= '{dates[1]}');") + if table == "video_files": + cur.execute( + f"insert into t (select * from video_files where start_datetime > '{dates[0]}' and start_datetime <= '{dates[1]}');" + ) else: - cur.execute(f"insert into t (select * from {table} where datetime > '{dates[0]}' and datetime <= '{dates[1]}');") - copy_sql = f'COPY t TO STDOUT WITH CSV HEADER;' - now = datetime.now().astimezone(timezone.utc) + cur.execute( + f"insert into t (select * from {table} where datetime > '{dates[0]}' and datetime <= '{dates[1]}');" + ) + copy_sql = "COPY t TO STDOUT WITH CSV HEADER;" + now = datetime.now().astimezone(UTC) partition = str(now.year) + "/" + str(now.month) + "/" + str(now.day) f = io.BytesIO() cur.copy_expert(copy_sql, f) f.seek(0) - f.readline() # csv header line - if len(f.readline()) > 0: # first line of data. If it exists, write to bucket + f.readline() # csv header line + if len(f.readline()) > 0: # first line of data. If it exists, write to bucket f.seek(0) - key = "tnc_edge/"+boat+"_"+ver+"_"+table+"/"+partition+"/"+str(int(dates[1].timestamp()))+".csv" - click.echo(f'uploading {key}') + key = ( + "tnc_edge/" + + boat + + "_" + + ver + + "_" + + table + + "/" + + partition + + "/" + + str(int(dates[1].timestamp())) + + ".csv" + ) + click.echo(f"uploading {key}") bucket.put_object(Key=key, Body=f.getvalue()) - cur.execute('insert into s3uploads (datetime, tablename) values (%s, %s)', (dates[1], table,)) - cur.execute('drop table t;') + cur.execute( + "insert into s3uploads (datetime, tablename) values (%s, %s)", + ( + dates[1], + table, + ), + ) + cur.execute("drop table t;") conn.commit() finally: cpool.putconn(conn) + @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--boatname', default=flaskconfig.get('BOAT_NAME')) -@click.option('--dbtablesversion', default=flaskconfig.get('DB_TABLES_VERSION')) -@click.option('--test', is_flag=True) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--boatname", default=flaskconfig.get("BOAT_NAME")) +@click.option("--dbtablesversion", default=flaskconfig.get("DB_TABLES_VERSION")) +@click.option("--test", is_flag=True) def main(dbname, dbuser, boatname, dbtablesversion, test): - # engine = create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) # SessionMaker = sessionmaker(engine) - # ModelBase.metadata.create_all(engine) - cpool = SimpleConnectionPool(1, 1, database=dbname, user=dbuser) - + if test: # s3psqlcopyer(cpool, boatname, dbtablesversion) @@ -210,9 +280,10 @@ def runonce(cpool, boatname, dbtablesversion): break elif n > 0: # sleep exactly the right amount of time - click.echo(f'sleeping for: {n}') + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/scripts/adduser_aifish.sh b/scripts/adduser_aifish.sh index 598a9ff..da284b9 100644 --- a/scripts/adduser_aifish.sh +++ b/scripts/adduser_aifish.sh @@ -1,14 +1,13 @@ #!/bin/bash +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" - -if [ "$UID" -lt 1000 ] ; then +if [ "$UID" -lt 1000 ]; then echo "This script should be run as a non-root user with 'sudo' access" exit 1 fi -if ! [ -e "$scriptdir/secret_adduser_aifish.txt" ] ; then +if ! [ -e "$scriptdir/secret_adduser_aifish.txt" ]; then echo "Cannot adduser without secrets file containing password" exit 1 fi @@ -46,4 +45,3 @@ EOF # on prod machines, user can only run docker commands # aifish ALL=NOPASSWD: /usr/bin/docker * - diff --git a/scripts/adduser_ondeck.sh b/scripts/adduser_ondeck.sh index da48305..3d5c47d 100644 --- a/scripts/adduser_ondeck.sh +++ b/scripts/adduser_ondeck.sh @@ -1,14 +1,13 @@ #!/bin/bash +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" - -if [ "$UID" -lt 1000 ] ; then +if [ "$UID" -lt 1000 ]; then echo "This script should be run as a non-root user with 'sudo' access" exit 1 fi -if ! [ -e "$scriptdir/secret_adduser_ondeck.txt" ] ; then +if ! [ -e "$scriptdir/secret_adduser_ondeck.txt" ]; then echo "Cannot adduser without secrets file containing password" exit 1 fi @@ -43,10 +42,9 @@ EOF # ondeck ALL=NOPASSWD: /usr/bin/docker * - gapp_creds_config_line=$(sudo grep -E '^export GOOGLE_APPLICATION_CREDENTIALS=' "$USERHOME/.bashrc") -if [ $? -eq 0 ] && [ "x$gapp_creds_config_line" != "x" ] ; then +if [ $? -eq 0 ] && [ "x$gapp_creds_config_line" != "x" ]; then # eval to make this value available in this script eval "$gapp_creds_config_line" else @@ -60,8 +58,8 @@ EOF GOOGLE_APPLICATION_CREDENTIALS="$USERHOME/google_application_credentials.json" fi -if ! [ -e "$GOOGLE_APPLICATION_CREDENTIALS" ] ; then - if ! [ -e "$scriptdir/secret_ondeck_gcr_token.json" ] ; then +if ! [ -e "$GOOGLE_APPLICATION_CREDENTIALS" ]; then + if ! [ -e "$scriptdir/secret_ondeck_gcr_token.json" ]; then echo "cannot find and cannot install google app creds json file!" echo "make the creds available in this scripts dir and rerun this script" exit 1 @@ -69,5 +67,3 @@ if ! [ -e "$GOOGLE_APPLICATION_CREDENTIALS" ] ; then sudo cp "$scriptdir/secret_ondeck_gcr_token.json" "$GOOGLE_APPLICATION_CREDENTIALS" sudo chown ondeck:ondeck "$GOOGLE_APPLICATION_CREDENTIALS" fi - - diff --git a/scripts/app-install.sh b/scripts/app-install.sh index b1c62e3..12818bd 100644 --- a/scripts/app-install.sh +++ b/scripts/app-install.sh @@ -1,6 +1,6 @@ #!/bin/bash -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" cd "$scriptdir/.." || exit @@ -10,14 +10,12 @@ WORKINGDIR="$USERHOME/tnc-edge-service" cd "$WORKINGDIR" || exit - -if ! [ -e ./venv/bin/activate ] ; then +if ! [ -e ./venv/bin/activate ]; then python3 -m venv venv fi - -if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ] ; then - if [ "x$VIRTUAL_ENV" != "x" ] ; then +if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ]; then + if [ "x$VIRTUAL_ENV" != "x" ]; then deactivate fi source ./venv/bin/activate @@ -25,15 +23,13 @@ fi pip install -r requirements.txt - PROD_CONF_FILE="$WORKINGDIR/config/prod.py" -if ! [ -e "$PROD_CONF_FILE" ] ; then - echo "DEBUG=False" >> "$PROD_CONF_FILE" +if ! [ -e "$PROD_CONF_FILE" ]; then + echo "DEBUG=False" >>"$PROD_CONF_FILE" fi -if ! grep -q -E "^SECRET_KEY=" "$PROD_CONF_FILE" ; then +if ! grep -q -E "^SECRET_KEY=" "$PROD_CONF_FILE"; then echo "creating secret_key in prod config" - echo "SECRET_KEY='$(dd if=/dev/urandom count=1 | base64 | tr -d '+/Il10O' | fold -w 32 | head -n 1)'" >> "$PROD_CONF_FILE" + echo "SECRET_KEY='$(dd if=/dev/urandom count=1 | base64 | tr -d '+/Il10O' | fold -w 32 | head -n 1)'" >>"$PROD_CONF_FILE" fi - diff --git a/scripts/box_dot_com/box_reupload.py b/scripts/box_dot_com/box_reupload.py index 94e3e4e..1468a3f 100644 --- a/scripts/box_dot_com/box_reupload.py +++ b/scripts/box_dot_com/box_reupload.py @@ -1,139 +1,121 @@ -import requests - -from datetime import datetime, timezone, timedelta - -import boto3 -import click -import sys +import concurrent.futures import json import re -import concurrent.futures +import sys +from datetime import UTC, datetime, timedelta -from dateutil.parser import parse as datetimeparse, isoparse as iso8601parse +import boto3 +import click +import requests +(client_id, client_secret) = json.load(open("secret_box_creds.json")) -(client_id, client_secret) = json.load(open('secret_box_creds.json')) class Token: def __init__(self): self.token_str = None self.token_exp = datetime.now() - + def __call__(self): if self.token_str and self.token_exp > datetime.now(): return self.token_str - + resp = requests.post( - 'https://api.box.com/oauth2/token', + "https://api.box.com/oauth2/token", data={ - 'client_id': client_id, - 'client_secret': client_secret, - 'grant_type': "client_credentials", - 'box_subject_type': "enterprise", - 'box_subject_id': "994495604", - } + "client_id": client_id, + "client_secret": client_secret, + "grant_type": "client_credentials", + "box_subject_type": "enterprise", + "box_subject_id": "994495604", + }, ) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") j = resp.json() - self.token_str = j['access_token'] - self.token_exp = datetime.now() + timedelta(seconds=j['expires_in']-200) + self.token_str = j["access_token"] + self.token_exp = datetime.now() + timedelta(seconds=j["expires_in"] - 200) return self.token_str + token = Token() -s = boto3.Session(profile_name='AWSAdministratorAccess-867800856651') +s = boto3.Session(profile_name="AWSAdministratorAccess-867800856651") try: from secret_aws_s3_creds import AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + s = boto3.Session( - aws_access_key_id=AWS_ACCESS_KEY_ID, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY, - region_name='us-east-1') + aws_access_key_id=AWS_ACCESS_KEY_ID, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name="us-east-1", + ) except: pass -s3 = s.resource('s3') +s3 = s.resource("s3") + def box_folder_get_items(folder_id, offset): - url='https://api.box.com/2.0/folders/{}/items'.format(folder_id) + url = f"https://api.box.com/2.0/folders/{folder_id}/items" params = {} if offset: - params['offset'] = offset - headers={"Authorization": "Bearer "+token()} + params["offset"] = offset + headers = {"Authorization": "Bearer " + token()} # print(url, params, headers) - resp = requests.get( - url, - params, - headers=headers - ) + resp = requests.get(url, params, headers=headers) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") return resp.json() + def box_folder_upload_item(folder_id, fname, f): - url = 'https://upload.box.com/api/2.0/files/content' - - headers={"Authorization": "Bearer "+token()} - attrs = { - 'name': fname, - 'parent': { - 'id': folder_id - } - } - fdict= { - 'attributes': (None, json.dumps(attrs)), - 'file': (fname, f), + url = "https://upload.box.com/api/2.0/files/content" + + headers = {"Authorization": "Bearer " + token()} + attrs = {"name": fname, "parent": {"id": folder_id}} + fdict = { + "attributes": (None, json.dumps(attrs)), + "file": (fname, f), } # click.echo(f'{url} {attrs} {headers}') - resp = requests.post( - url, - files=fdict, - headers=headers - ) + resp = requests.post(url, files=fdict, headers=headers) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") # click.echo(f'{resp.status_code} {resp.headers} {resp.content}') return resp -def box_create_folder(folder_id, fname ): - url = 'https://api.box.com/2.0/folders' + +def box_create_folder(folder_id, fname): + url = "https://api.box.com/2.0/folders" # url = 'http://localhost:50001/2.0/folders' - - headers={"Authorization": "Bearer "+token(), - 'User-Agent':'python-requests/2.32.3', - } - data = { - 'name': fname, - 'parent': { - 'id': folder_id - } + + headers = { + "Authorization": "Bearer " + token(), + "User-Agent": "python-requests/2.32.3", } + data = {"name": fname, "parent": {"id": folder_id}} # click.echo(f'{url} {attrs} {headers}') - resp = requests.post( - url, - json=data, - headers=headers - ) + resp = requests.post(url, json=data, headers=headers) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") # click.echo(f'{resp.status_code} {resp.headers} {resp.content}') return resp.json() + @click.group() def main(): pass + @main.command() def iter_box_folder_copy_to_s3(): - files_done = [] - with open('box_reupload.done', 'r') as f: + with open("box_reupload.done") as f: for line in f.readlines(): r = line.strip() if len(r) > 0: files_done.append(r) - folder_ids = [ # ('220398416437', 'TNC EDGE Trip Video Files', 0), # ("220396095950", "Brancol", 0), @@ -159,276 +141,291 @@ def iter_box_folder_copy_to_s3(): files = [] - root = box_folder_get_items('0', 0) - for f in filter(lambda x: x['name'] == 'TNC EDGE Trip Video Files', root['entries']): - folder_ids.append((f['id'], f['name'], 0,)) - - with open('box_reupload.done', 'a') as new_done: + root = box_folder_get_items("0", 0) + for f in filter(lambda x: x["name"] == "TNC EDGE Trip Video Files", root["entries"]): + folder_ids.append( + ( + f["id"], + f["name"], + 0, + ) + ) + with open("box_reupload.done", "a") as new_done: while len(folder_ids) > 0: (folder_id, folder_name, offset) = folder_ids.pop(0) - if folder_name.endswith('/gps'): + if folder_name.endswith("/gps"): continue - j = box_folder_get_items(folder_id, offset) - if j['total_count'] - offset > j['limit']: - print("Warning: folder {} has too many items".format(folder_name)) - folder_ids.append((folder_id, folder_name , offset+100)) - for f in filter(lambda x: x['type'] == 'folder', j['entries']): - folder_ids.append((f['id'], folder_name + "/" + f['name'], 0)) - for f in filter(lambda x: x['type'] == 'file', j['entries']): - if f['id'] in files_done: + if j["total_count"] - offset > j["limit"]: + print(f"Warning: folder {folder_name} has too many items") + folder_ids.append((folder_id, folder_name, offset + 100)) + for f in filter(lambda x: x["type"] == "folder", j["entries"]): + folder_ids.append((f["id"], folder_name + "/" + f["name"], 0)) + for f in filter(lambda x: x["type"] == "file", j["entries"]): + if f["id"] in files_done: continue - files.append((f['id'], folder_name + "/" + f['name'],)) - url='https://api.box.com/2.0/files/{}/content'.format(f['id']) - print(url) - resp = requests.get( - url, - headers={"Authorization": "Bearer "+token()} + files.append( + ( + f["id"], + folder_name + "/" + f["name"], + ) ) - with open('tmpfile', 'wb') as tmp: + url = "https://api.box.com/2.0/files/{}/content".format(f["id"]) + print(url) + resp = requests.get(url, headers={"Authorization": "Bearer " + token()}) + with open("tmpfile", "wb") as tmp: tmp.write(resp.content) - s3.Object('dp.riskedge.fish', folder_name + "/" + f['name']).put(Body=open('tmpfile', 'rb')) - new_done.write(f['id'] + '\n') - - + s3.Object("dp.riskedge.fish", folder_name + "/" + f["name"]).put( + Body=open("tmpfile", "rb") + ) + new_done.write(f["id"] + "\n") for i in files: print(files) def box_navigate_path(path, all_box_folders): - """ - Use the box.com api to find the folder associated with the path - + """Use the box.com api to find the folder associated with the path + :param path: the path to find - + :param all_box_folders: a dict of box api return values. Key is box folder id. Dict is modified by method - + :return: box folder object. """ - - if not re.match('^/([\w]([\w \.-]*\w)?/)*$', path): - click.echo(f'bad path: {path}') + if not re.match(r"^/([\w]([\w \.-]*\w)?/)*$", path): + click.echo(f"bad path: {path}") sys.exit(1) - - fnames = path.split('/')[1:-1] - + + fnames = path.split("/")[1:-1] + def box_get_all(folder_id): offset = 0 ret = box_folder_get_items(folder_id, offset) last_req = ret - while last_req.get('total_count') > last_req.get('limit') + offset: - offset += last_req.get('limit') + while last_req.get("total_count") > last_req.get("limit") + offset: + offset += last_req.get("limit") last_req = box_folder_get_items(folder_id, offset) - ret.get('entries').append(last_req.get('entries')) + ret.get("entries").append(last_req.get("entries")) return ret - + if 0 not in all_box_folders.keys(): - root = box_get_all('0') - root.update({'name': "", 'id': "0"}) + root = box_get_all("0") + root.update({"name": "", "id": "0"}) all_box_folders[0] = root - + curr = all_box_folders[0] for fname in fnames: - if fname in map(lambda x: x.get('name'), curr.get('entries')): - f = next(filter(lambda x: x.get('name') == fname, curr.get('entries'))) - f_id = f.get('id') + if fname in map(lambda x: x.get("name"), curr.get("entries")): + f = next(filter(lambda x: x.get("name") == fname, curr.get("entries"))) + f_id = f.get("id") else: # have to create the folder in box - resp = box_create_folder(curr.get('id'), fname) - f_id = resp.get('id') - curr.get('entries').append({'type': 'folder', 'id': f_id, 'name': fname}) + resp = box_create_folder(curr.get("id"), fname) + f_id = resp.get("id") + curr.get("entries").append({"type": "folder", "id": f_id, "name": fname}) if f_id in all_box_folders.keys(): curr = all_box_folders[f_id] else: # have to populate the folder curr = box_get_all(f_id) - curr.update({'name': fname, 'id':f_id}) + curr.update({"name": fname, "id": f_id}) all_box_folders[f_id] = curr - - return curr + return curr # python3 box_reupload.py s3-uri-to-box dp.riskedge.fish 'TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/' # python3 box_reupload.py s3-uri-to-box dp.riskedge.fish 'TNC EDGE Trip Video Files/Brancol/alt_hd_upload/' + def byte_range_gen(total_bytes, chunksize): - offset=0 + offset = 0 while offset + chunksize < total_bytes: r = str(offset) + "-" + str(offset + chunksize - 1) # print(f'chunk {r}') - yield "bytes="+r + yield "bytes=" + r offset += chunksize - r = str(offset) + '-' + str(total_bytes - 1) + r = str(offset) + "-" + str(total_bytes - 1) # print(f'chunk {r} (last)') - yield "bytes="+r + yield "bytes=" + r + + +S3_CHUNK_S = int(2 * 1024 * 1024) -S3_CHUNK_S = int(2*1024*1024) @main.command() -@click.option('--dry-run', is_flag=True) -@click.option('--done-filename', default='s3_to_box.done') -@click.option('--max-workers', default=25) -@click.argument('s3_bucket') -@click.argument('s3_path_prefix') -@click.argument('box_name', default='/uncompressed-video/') +@click.option("--dry-run", is_flag=True) +@click.option("--done-filename", default="s3_to_box.done") +@click.option("--max-workers", default=25) +@click.argument("s3_bucket") +@click.argument("s3_path_prefix") +@click.argument("box_name", default="/uncompressed-video/") def s3_uri_to_box(s3_bucket, s3_path_prefix, box_name, dry_run, done_filename, max_workers): print(s3_bucket, s3_path_prefix) all_box_folders = dict() box_folder = box_navigate_path(box_name, all_box_folders) - + # box_boat_folders_req = box_folder_get_items(m[0].get('id'), 0) # box_boat_folders = list(filter(lambda f: f.get('type') == 'folder', box_boat_folders_req.get('entries'))) - already_done=[] + already_done = [] with open(done_filename) as f: already_done.extend(map(lambda s: s.strip(), f.readlines())) # print(already_done) - s3c = s.client('s3') + s3c = s.client("s3") def iter_s3(): - paginator = s3c.get_paginator('list_objects_v2') + paginator = s3c.get_paginator("list_objects_v2") for page in paginator.paginate( - Bucket=s3_bucket, - Prefix=s3_path_prefix, + Bucket=s3_bucket, + Prefix=s3_path_prefix, ): - for c in page.get('Contents'): - yield c.get('Key') - - with open(done_filename, 'a') as f: + for c in page.get("Contents"): + yield c.get("Key") + + with open(done_filename, "a") as f: with concurrent.futures.ThreadPoolExecutor(max_workers=10) as exe1: with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as exe2: + def doit(k): if k in already_done: # print('skipping') return - - k_split = k.split('/') + + k_split = k.split("/") k_path = k_split[:-1] k_fname = k_split[-1] - box_target_path = box_name + '/'.join(k_path) + "/" + box_target_path = box_name + "/".join(k_path) + "/" # print("box_target_path", box_target_path) - + if not dry_run: o = s3.Object(s3_bucket, k) - + def dl_chunk(range): # print(f"dl chunk {range} start") resp = o.get(Range=range) - body = resp.get('Body') + body = resp.get("Body") b = body.read() # print(f"dl chunk {range} done") return b - + bs = exe2.map(dl_chunk, byte_range_gen(o.content_length, S3_CHUNK_S)) - b = b''.join(bs) + b = b"".join(bs) # resp = o.get() # with resp.get('Body') as streamingBytes: - # b = streamingBytes.read() - click.echo(f'downloaded {k}') + # b = streamingBytes.read() + click.echo(f"downloaded {k}") box_target_f = box_navigate_path(box_target_path, all_box_folders) - resp = box_folder_upload_item(box_target_f.get('id'), k_fname, b) + resp = box_folder_upload_item(box_target_f.get("id"), k_fname, b) if resp.status_code < 400: - click.echo(f'uploaded {k}') + click.echo(f"uploaded {k}") f.write(k + "\n") else: - click.echo('failed to upload') + click.echo("failed to upload") # print("all s3") # print(list(iter_s3())) results = list(exe1.map(doit, iter_s3())) - print('done') + print("done") @main.command() # @click.argument() def list_box(): - root = box_folder_get_items('0', 0) + root = box_folder_get_items("0", 0) print(root) @main.command() -@click.option('--dry-run', is_flag=True) -@click.option('--max-workers', default=25) -@click.option('--done-filename', default='box_reupload2.done') -@click.argument('s3_bucket', default='dp.riskedge.fish') -@click.argument('s3_path_prefix', default='TNC EDGE Trip Video Files/Brancol/alt_hd_upload/') -@click.argument('box_name', default='uncompressed-video') +@click.option("--dry-run", is_flag=True) +@click.option("--max-workers", default=25) +@click.option("--done-filename", default="box_reupload2.done") +@click.argument("s3_bucket", default="dp.riskedge.fish") +@click.argument("s3_path_prefix", default="TNC EDGE Trip Video Files/Brancol/alt_hd_upload/") +@click.argument("box_name", default="uncompressed-video") def hq_s3_to_box(dry_run, s3_bucket, s3_path_prefix, box_name, max_workers, done_filename): - root = box_folder_get_items('0', 0) - m = list(filter(lambda f: f.get('name') == box_name and f.get('type') == 'folder', root.get('entries'))) + root = box_folder_get_items("0", 0) + m = list( + filter( + lambda f: f.get("name") == box_name and f.get("type") == "folder", root.get("entries") + ) + ) if len(m) < 1: - click.echo('box folder not found') + click.echo("box folder not found") sys.exit(1) if len(m) > 1: - click.echo('too many box folders with name') + click.echo("too many box folders with name") sys.exit(1) - - box_boat_folders_req = box_folder_get_items(m[0].get('id'), 0) - box_boat_folders = list(filter(lambda f: f.get('type') == 'folder', box_boat_folders_req.get('entries'))) - already_done=[] + box_boat_folders_req = box_folder_get_items(m[0].get("id"), 0) + box_boat_folders = list( + filter(lambda f: f.get("type") == "folder", box_boat_folders_req.get("entries")) + ) + + already_done = [] with open(done_filename) as f: already_done.extend(map(lambda s: s.strip(), f.readlines())) # print(already_done) - s3c = s.client('s3') + s3c = s.client("s3") def iter_s3(): - paginator = s3c.get_paginator('list_objects_v2') + paginator = s3c.get_paginator("list_objects_v2") for page in paginator.paginate( - Bucket=s3_bucket, - Prefix=s3_path_prefix, + Bucket=s3_bucket, + Prefix=s3_path_prefix, ): - for c in page.get('Contents'): - yield c.get('Key') - - with open(done_filename, 'a') as f: + for c in page.get("Contents"): + yield c.get("Key") + + with open(done_filename, "a") as f: + def doit(k): if k in already_done: # print('skipping') return - ksplit= k.split('/') + ksplit = k.split("/") boat = ksplit[1] fname = ksplit[3] - m = list(filter(lambda f: f.get('name') == boat , box_boat_folders)) + m = list(filter(lambda f: f.get("name") == boat, box_boat_folders)) if len(m) < 1: click.echo(f"boat folder not found {boat}") return - box_boat_id = m[0].get('id') + box_boat_id = m[0].get("id") - m = re.match('^(\d+T\d+Z_cam[12].avi)(.done)?$', fname) + m = re.match(r"^(\d+T\d+Z_cam[12].avi)(.done)?$", fname) if m: fname = m[1] else: - m = re.match('^(\d+T\d+Z_cam[12])_reenc.mkv$', fname) + m = re.match(r"^(\d+T\d+Z_cam[12])_reenc.mkv$", fname) if m: - fname = m[1] + '.mkv' + fname = m[1] + ".mkv" else: - m = re.match('^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$', fname) + m = re.match(r"^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$", fname) if m: - dt = datetime.strptime(m[2], '%d-%m-%Y-%H-%M') - dt = dt.replace(tzinfo=timezone.utc) - dt_str = dt.isoformat().replace('-', '').replace(':', '').replace('+0000', 'Z') - fname = dt_str + "_" + m[1] + '.avi' + dt = datetime.strptime(m[2], "%d-%m-%Y-%H-%M") + dt = dt.replace(tzinfo=UTC) + dt_str = ( + dt.isoformat().replace("-", "").replace(":", "").replace("+0000", "Z") + ) + fname = dt_str + "_" + m[1] + ".avi" print(k + "\n" + fname) else: - - click.echo(f'no match for fname {fname}') + click.echo(f"no match for fname {fname}") sys.exit(1) if fname in already_done: @@ -436,204 +433,190 @@ def doit(k): return if not dry_run: resp = s3.Object(s3_bucket, k).get() - streamingBytes = resp.get('Body') + streamingBytes = resp.get("Body") b = streamingBytes.read() - click.echo(f'downloaded {fname}') + click.echo(f"downloaded {fname}") resp = box_folder_upload_item(box_boat_id, fname, b) if resp.status_code < 400: - click.echo(f'uploaded {fname}') + click.echo(f"uploaded {fname}") f.write(k + "\n") else: - click.echo('failed to upload') + click.echo("failed to upload") with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: results = list(executor.map(doit, iter_s3())) - print('done') - - + print("done") + @main.command() -@click.argument('s3_bucket', default='dp.riskedge.fish') -@click.argument('s3_path_prefix', default='TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/') -def list_s3(s3_bucket, s3_path_prefix ): - - s3c = s.client('s3') +@click.argument("s3_bucket", default="dp.riskedge.fish") +@click.argument("s3_path_prefix", default="TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/") +def list_s3(s3_bucket, s3_path_prefix): + s3c = s.client("s3") def iter_s3(): - paginator = s3c.get_paginator('list_objects_v2') + paginator = s3c.get_paginator("list_objects_v2") for page in paginator.paginate( - Bucket=s3_bucket, - Prefix=s3_path_prefix, + Bucket=s3_bucket, + Prefix=s3_path_prefix, ): - for c in page.get('Contents'): - yield c.get('Key') - + for c in page.get("Contents"): + yield c.get("Key") + for k in iter_s3(): - ksplit= k.split('/') + ksplit = k.split("/") fname = ksplit[-1] - m = re.match('^(\d+T\d+Z_cam[12].avi)(.done)?$', fname) + m = re.match(r"^(\d+T\d+Z_cam[12].avi)(.done)?$", fname) if m: fname = m[1] else: - m = re.match('^(\d+T\d+Z_cam[12])_reenc.mkv$', fname) + m = re.match(r"^(\d+T\d+Z_cam[12])_reenc.mkv$", fname) if m: - fname = m[1] + '.mkv' + fname = m[1] + ".mkv" else: - m = re.match('^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$', fname) + m = re.match(r"^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$", fname) if m: - dt = datetime.strptime(m[2], '%d-%m-%Y-%H-%M') - dt = dt.replace(tzinfo=timezone.utc) - dt_str = dt.isoformat().replace('-', '').replace(':', '').replace('+0000', 'Z') - fname = dt_str + "_" + m[1] + '.avi' + dt = datetime.strptime(m[2], "%d-%m-%Y-%H-%M") + dt = dt.replace(tzinfo=UTC) + dt_str = dt.isoformat().replace("-", "").replace(":", "").replace("+0000", "Z") + fname = dt_str + "_" + m[1] + ".avi" # print(k + "\n" + fname) else: - - click.echo(f'no match for fname {fname}') + click.echo(f"no match for fname {fname}") sys.exit(1) print(fname) + def list_box_fid(box_folder_id, recurse): - all_box=[] - + all_box = [] + offset = 0 res = box_folder_get_items(box_folder_id, offset) - all_box.extend(res.get('entries')) + all_box.extend(res.get("entries")) # print(res) - - while res.get('total_count') > len(res.get('entries')) + res.get('offset'): + + while res.get("total_count") > len(res.get("entries")) + res.get("offset"): offset += 100 res = box_folder_get_items(box_folder_id, offset) - all_box.extend(res.get('entries')) + all_box.extend(res.get("entries")) if recurse: - for f in filter(lambda x: x.get('type') == 'folder', all_box): - all_box.extend(list_box_fid(f.get('id'), recurse)) - + for f in filter(lambda x: x.get("type") == "folder", all_box): + all_box.extend(list_box_fid(f.get("id"), recurse)) + return all_box - + @main.command() -@click.argument('box_name', default='uncompressed-video/Saint Patrick') -@click.option('-r', 'recurse', is_flag=True) +@click.argument("box_name", default="uncompressed-video/Saint Patrick") +@click.option("-r", "recurse", is_flag=True) def list_box(box_name, recurse): # box_name = 'uncompressed-video' - box_folder_id = '0' - for fname in box_name.split('/'): + box_folder_id = "0" + for fname in box_name.split("/"): res = box_folder_get_items(box_folder_id, 0) - m = list(filter(lambda f: f.get('name') == fname and f.get('type') == 'folder', res.get('entries'))) + m = list( + filter( + lambda f: f.get("name") == fname and f.get("type") == "folder", res.get("entries") + ) + ) if len(m) < 1: - click.echo(f'box folder not found {fname}') + click.echo(f"box folder not found {fname}") sys.exit(1) if len(m) > 1: - click.echo(f'too many box folders with name {fname}') + click.echo(f"too many box folders with name {fname}") sys.exit(1) - box_folder_id = m[0].get('id') - + box_folder_id = m[0].get("id") + all_box = list_box_fid(box_folder_id, recurse) for f in all_box: - print(f.get('name')) - + print(f.get("name")) + @main.command() -@click.argument('box_name', default='uncompressed-video/Saint Patrick') +@click.argument("box_name", default="uncompressed-video/Saint Patrick") def move_box(box_name): # box_name = 'uncompressed-video' - box_folder_id = '0' - for fname in box_name.split('/'): + box_folder_id = "0" + for fname in box_name.split("/"): res = box_folder_get_items(box_folder_id, 0) - m = list(filter(lambda f: f.get('name') == fname and f.get('type') == 'folder', res.get('entries'))) + m = list( + filter( + lambda f: f.get("name") == fname and f.get("type") == "folder", res.get("entries") + ) + ) if len(m) < 1: - click.echo(f'box folder not found {fname}') + click.echo(f"box folder not found {fname}") sys.exit(1) if len(m) > 1: - click.echo(f'too many box folders with name {fname}') + click.echo(f"too many box folders with name {fname}") sys.exit(1) - box_folder_id = m[0].get('id') - + box_folder_id = m[0].get("id") + def iter_files(): offset = 0 res = box_folder_get_items(box_folder_id, offset) - for entry in res.get('entries'): + for entry in res.get("entries"): yield entry - - while res.get('total_count') > len(res.get('entries')) + res.get('offset'): + + while res.get("total_count") > len(res.get("entries")) + res.get("offset"): offset += 100 res = box_folder_get_items(box_folder_id, offset) - for entry in res.get('entries'): + for entry in res.get("entries"): yield entry dayfolders = {} filestomove = [] - - for f in iter_files(): - fname = f.get('name') + fname = f.get("name") ftype = f.get("type") - if ftype == 'file': + if ftype == "file": filestomove.append(f) - elif ftype == 'folder': + elif ftype == "folder": try: - day_str = fname.split('T')[0] - if day_str not in dayfolders.keys(): + day_str = fname.split("T")[0] + if day_str not in dayfolders: dayfolders.update({day_str: f}) - except ValueError as e: - click.echo(f'unknownfolder {fname}') + except ValueError: + click.echo(f"unknownfolder {fname}") else: - click.echo(f'unknowntype {ftype} on {fname}') + click.echo(f"unknowntype {ftype} on {fname}") # print(dayfolders) # print(filestomove[0:2]) def add_day_folder(parent_id, foldername): - url= 'https://api.box.com/2.0/folders' - j = { - "name": foldername, - "parent": { - "id": parent_id - } - } + url = "https://api.box.com/2.0/folders" + j = {"name": foldername, "parent": {"id": parent_id}} print("new folder", url, j) - resp = requests.post( - url, - headers={"Authorization": "Bearer "+token()}, - json=j - ) + resp = requests.post(url, headers={"Authorization": "Bearer " + token()}, json=j) return resp.json() - + def move_file_to_folder(box_file_id, box_parent_id): - url= f'https://api.box.com/2.0/files/{box_file_id}' - j = { - "parent": { - "id": box_parent_id - } - } + url = f"https://api.box.com/2.0/files/{box_file_id}" + j = {"parent": {"id": box_parent_id}} print("moving file", url, j) # sys.exit(1) - resp = requests.put( - url, - headers={"Authorization": "Bearer "+token()}, - json=j - ) + resp = requests.put(url, headers={"Authorization": "Bearer " + token()}, json=j) return resp.json() for f in filestomove: - fname = f.get('name') + fname = f.get("name") try: - day_str = fname.split('T')[0] - if day_str not in dayfolders.keys(): + day_str = fname.split("T")[0] + if day_str not in dayfolders: resp = add_day_folder(box_folder_id, day_str) dayfolders.update({day_str: resp}) - - parent = dayfolders.get(day_str) - move_file_to_folder(f.get('id'), parent.get('id')) - except ValueError as e: - click.echo(f'unparsable filedate, cannot move {fname}') + parent = dayfolders.get(day_str) + move_file_to_folder(f.get("id"), parent.get("id")) + except ValueError: + click.echo(f"unparsable filedate, cannot move {fname}") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/box_dot_com/box_reupload.sh b/scripts/box_dot_com/box_reupload.sh index 978e5cf..b6cf780 100644 --- a/scripts/box_dot_com/box_reupload.sh +++ b/scripts/box_dot_com/box_reupload.sh @@ -2,7 +2,7 @@ LEN="$(wc -l box_reupload2.done | awk '{print $1}')" -while [ "$LEN" -lt 3497 ] ; do +while [ "$LEN" -lt 3497 ]; do python3 box_reupload.py hq-s3-to-box sleep 1 LEN="$(wc -l box_reupload2.done | awk '{print $1}')" diff --git a/scripts/box_dot_com/box_reupload2.sh b/scripts/box_dot_com/box_reupload2.sh index 906d63f..05df132 100644 --- a/scripts/box_dot_com/box_reupload2.sh +++ b/scripts/box_dot_com/box_reupload2.sh @@ -2,8 +2,8 @@ LEN="$(wc -l box_reupload3.done | awk '{print $1}')" -while [ "$LEN" -lt 4837 ] ; do - python3 box_reupload.py hq-s3-to-box --done-filename box_reupload3.done dp.riskedge.fish 'TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/' +while [ "$LEN" -lt 4837 ]; do + python3 box_reupload.py hq-s3-to-box --done-filename box_reupload3.done dp.riskedge.fish 'TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/' sleep 1 LEN="$(wc -l box_reupload3.done | awk '{print $1}')" echo "restarting, $LEN" diff --git a/scripts/box_dot_com/boxapiexamples.sh b/scripts/box_dot_com/boxapiexamples.sh index 7a83ac0..100c524 100644 --- a/scripts/box_dot_com/boxapiexamples.sh +++ b/scripts/box_dot_com/boxapiexamples.sh @@ -15,7 +15,7 @@ curl -i -X POST "https://api.box.com/oauth2/token" \ -d "client_id=$CLIENT_ID" \ -d "client_secret=$CLIENT_SECRET" \ -d "grant_type=client_credentials" \ - -d "box_subject_type=enterprise" \ + -d "box_subject_type=enterprise" \ -d "box_subject_id=15290560022" curl -i -X POST "https://api.box.com/oauth2/token" \ @@ -23,7 +23,7 @@ curl -i -X POST "https://api.box.com/oauth2/token" \ -d "client_id=$CLIENT_ID" \ -d "client_secret=$CLIENT_SECRET" \ -d "grant_type=client_credentials" \ - -d "box_subject_type=enterprise" \ + -d "box_subject_type=enterprise" \ -d "box_subject_id=994495604" curl -i -X POST "https://api.box.com/oauth2/token" \ @@ -31,8 +31,7 @@ curl -i -X POST "https://api.box.com/oauth2/token" \ -d "client_id=$CLIENT_ID" \ -d "client_secret=$CLIENT_SECRET" \ -d "grant_type=client_credentials" \ - -d "box_subject_type=user" \ + -d "box_subject_type=user" \ -d "box_subject_id=15290560022" - curl -v -H 'Authorization: Bearer wYjFjTpWbjNVpnQmADaMnjU9vNJECoXF' 'https://api.box.com/2.0/folders/231635673007/items' diff --git a/scripts/gh_setup.sh b/scripts/gh_setup.sh index a9f9bfe..ce6ffa0 100644 --- a/scripts/gh_setup.sh +++ b/scripts/gh_setup.sh @@ -1,4 +1,4 @@ - +#!/bin/bash # visit https://github.com/productOps/tnc-edge-service/settings/actions/runners/new?arch=arm64&os=linux # replace this entire script with the contents of that webpage! @@ -6,16 +6,19 @@ # don't commit the secret token to this file! - # Create a folder - mkdir actions-runner - cd actions-runner +# Create a folder +mkdir actions-runner +cd actions-runner + # Download the latest runner package - curl -o actions-runner-linux-arm64-2.304.0.tar.gz -L https://github.com/actions/runner/releases/download/v2.304.0/actions-runner-linux-arm64-2.304.0.tar.gz +curl -o actions-runner-linux-arm64-2.304.0.tar.gz -L https://github.com/actions/runner/releases/download/v2.304.0/actions-runner-linux-arm64-2.304.0.tar.gz + # Optional: Validate the hash - echo "34c49bd0e294abce6e4a073627ed60dc2f31eee970c13d389b704697724b31c6 actions-runner-linux-arm64-2.304.0.tar.gz" | shasum -a 256 -c +echo "34c49bd0e294abce6e4a073627ed60dc2f31eee970c13d389b704697724b31c6 actions-runner-linux-arm64-2.304.0.tar.gz" | shasum -a 256 -c + # Extract the installer - tar xzf ./actions-runner-linux-arm64-2.304.0.tar.gz -#Configure -# Create the runner and start the configuration experience - ./config.sh --url https://github.com/productOps/tnc-edge-service --token XXXXXXXX +tar xzf ./actions-runner-linux-arm64-2.304.0.tar.gz +# Configure +# Create the runner and start the configuration experience +./config.sh --url https://github.com/productOps/tnc-edge-service --token XXXXXXXX diff --git a/scripts/netplan-autoswitcher.sh b/scripts/netplan-autoswitcher.sh index e47d2ff..99b8814 100644 --- a/scripts/netplan-autoswitcher.sh +++ b/scripts/netplan-autoswitcher.sh @@ -2,37 +2,33 @@ # netplan auto switcher! - -if [ "$UID" -gt 0 ] ; then +if [ "$UID" -gt 0 ]; then echo "this script must be run as root" exit 1 fi - - FOUND="" -for file in /etc/netplan/01_eth0_dhcp.yaml* ; do - if [ -e "$file" ] ; then +for file in /etc/netplan/01_eth0_dhcp.yaml*; do + if [ -e "$file" ]; then FOUND="y" fi done -if ! [ "$FOUND" ] ; then +if ! [ "$FOUND" ]; then echo "could not find /etc/netplan/01_eth0_dhcp.yaml*" exit 1 fi FOUND="" -for file in /etc/netplan/01_eth0_static.yaml* ; do - if [ -e "$file" ] ; then +for file in /etc/netplan/01_eth0_static.yaml*; do + if [ -e "$file" ]; then FOUND="y" fi done -if ! [ "$FOUND" ] ; then +if ! [ "$FOUND" ]; then echo "could not find /etc/netplan/01_eth0_static.yaml*" exit 1 fi - function switch_to_dhcp { echo "switching netplan to dhcp" mv /etc/netplan/01_eth0_dhcp.yaml* /etc/netplan/01_eth0_dhcp.yaml @@ -49,23 +45,22 @@ function switch_to_static { systemctl try-restart openvpn-client@tnc-edge.service github-actions-runner.service } - -if grep -q "method=manual" /run/NetworkManager/system-connections/netplan-eth0.nmconnection ; then - ROUTE="$(grep -e "route.*=0.0.0.0/0," /run/NetworkManager/system-connections/netplan-eth0.nmconnection )" +if grep -q "method=manual" /run/NetworkManager/system-connections/netplan-eth0.nmconnection; then + ROUTE="$(grep -e "route.*=0.0.0.0/0," /run/NetworkManager/system-connections/netplan-eth0.nmconnection)" GATEWAYIP="${ROUTE##*,}" - if ! ping "$GATEWAYIP" -c 3 >/dev/null 2>&1 ; then + if ! ping "$GATEWAYIP" -c 3 >/dev/null 2>&1; then switch_to_dhcp exit 0 fi - -elif grep -q "method=auto" /run/NetworkManager/system-connections/netplan-eth0.nmconnection ; then + +elif grep -q "method=auto" /run/NetworkManager/system-connections/netplan-eth0.nmconnection; then GATEWAYIP="$(nmcli d show eth0 | grep IP4.GATEWAY | awk '{print $2;}')" STATICGWIP="$(grep "via:" /etc/netplan/01_eth0_static.yaml* | awk '{print $2;}')" - - if [ "$GATEWAYIP" == "$STATICGWIP" ] ; then - if ping "$STATICGWIP" -c 3 >/dev/null 2>&1 ; then - if ping "api.oceanbox2.com" -c 3 >/dev/null 2>&1 ; then + + if [ "$GATEWAYIP" == "$STATICGWIP" ]; then + if ping "$STATICGWIP" -c 3 >/dev/null 2>&1; then + if ping "api.oceanbox2.com" -c 3 >/dev/null 2>&1; then switch_to_static exit 0 fi @@ -76,4 +71,3 @@ else echo "something is wrong with the NetworkManager config that netplan generated" exit 1 fi - diff --git a/scripts/purge-video.sh b/scripts/purge-video.sh index fc18710..71e8ec8 100644 --- a/scripts/purge-video.sh +++ b/scripts/purge-video.sh @@ -1,9 +1,9 @@ #!/bin/bash -while [ "$(du -s /videos/*.avi | awk '{total += $1 }; END { print total}')" -gt 50000000 ] ; do +while [ "$(du -s /videos/*.avi | awk '{total += $1 }; END { print total}')" -gt 50000000 ]; do ls -tr /videos/*.avi | head | xargs rm done -while [ "$(du -s /videos/*_reenc.mkv | awk '{total += $1 }; END {print total}')" -gt 150000000 ] ; do +while [ "$(du -s /videos/*_reenc.mkv | awk '{total += $1 }; END {print total}')" -gt 150000000 ]; do ls -tr /videos/*_reenc.mkv | head | xargs rm done diff --git a/scripts/system-install.sh b/scripts/system-install.sh index 208e011..0fce722 100644 --- a/scripts/system-install.sh +++ b/scripts/system-install.sh @@ -1,19 +1,19 @@ #!/bin/bash SCRIPTNAME="$0" -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" USERNAME="$(whoami)" USERHOME="/home/$USERNAME" cd "$USERHOME" || exit -if [ "$UID" -lt 1000 ] ; then +if [ "$UID" -lt 1000 ]; then echo "This script should be run as a non-root user with 'sudo' access" exit 1 fi -if [ "$ENVIRONMENT" == "" ] || ! [ -e "$ENVIRONMENT" ] ; then +if [ "$ENVIRONMENT" == "" ] || ! [ -e "$ENVIRONMENT" ]; then echo "No ENVIRONMENT specified. Please add an export ENVIRONMENT line to .bashrc and restart" exit 1 fi @@ -23,42 +23,42 @@ function help { exit 1 } -while (( "$#" )); do - case $1 in - --do-github) - DO_GITHUB="y" - ;; - --do-copy-numpy) - DO_COPY_PY_PANDAS_TO_VENV="y" - ;; - --do-ondeck) - DO_ONDECK="y" - ;; - --do-aifish) - DO_AIFISH="y" - ;; - *) - help - ;; - esac - shift +while (("$#")); do + case $1 in + --do-github) + DO_GITHUB="y" + ;; + --do-copy-numpy) + DO_COPY_PY_PANDAS_TO_VENV="y" + ;; + --do-ondeck) + DO_ONDECK="y" + ;; + --do-aifish) + DO_AIFISH="y" + ;; + *) + help + ;; + esac + shift done -if ! which iftop ; then sudo apt -y install iftop ; fi -if ! which traceroute ; then sudo apt -y install traceroute ; fi -if ! which jq ; then sudo apt -y install jq ; fi -if ! which curl ; then sudo apt -y install curl ; fi -if ! which mount.cifs ; then sudo apt -y install cifs-utils ; fi -if ! dpkg -s python3-pip | grep "Status: install ok installed" ; then sudo apt -y install python3-pip ; fi -if ! dpkg -s python3-venv | grep "Status: install ok installed" ; then sudo apt -y install python3-venv ; fi -if ! dpkg -s python3-dev | grep "Status: install ok installed" ; then sudo apt -y install python3-dev ; fi -if ! which netplan ; then sudo apt -y install netplan.io ; fi -if ! which rsync ; then sudo apt -y install rsync ; fi -if ! which tmux ; then sudo apt -y install tmux ; fi -if ! which parallel ; then sudo apt -y install parallel ; fi -if ! which par2 ; then sudo apt -y install par2 ; fi -if ! which nmap ; then sudo apt -y install nmap ; fi -if ! which at ; then sudo apt -y install at ; fi +if ! which iftop; then sudo apt -y install iftop; fi +if ! which traceroute; then sudo apt -y install traceroute; fi +if ! which jq; then sudo apt -y install jq; fi +if ! which curl; then sudo apt -y install curl; fi +if ! which mount.cifs; then sudo apt -y install cifs-utils; fi +if ! dpkg -s python3-pip | grep "Status: install ok installed"; then sudo apt -y install python3-pip; fi +if ! dpkg -s python3-venv | grep "Status: install ok installed"; then sudo apt -y install python3-venv; fi +if ! dpkg -s python3-dev | grep "Status: install ok installed"; then sudo apt -y install python3-dev; fi +if ! which netplan; then sudo apt -y install netplan.io; fi +if ! which rsync; then sudo apt -y install rsync; fi +if ! which tmux; then sudo apt -y install tmux; fi +if ! which parallel; then sudo apt -y install parallel; fi +if ! which par2; then sudo apt -y install par2; fi +if ! which nmap; then sudo apt -y install nmap; fi +if ! which at; then sudo apt -y install at; fi WRITE_RTC_UDEV_RULE=0 @@ -66,72 +66,67 @@ RTC_UDEV_RULE_FILE="/etc/udev/rules.d/60-rtc-custom.rules" # RTC_UDEV_RULE_FILE="arst.txt" RTC_UDEV_RULE_STR='ACTION=="add", SUBSYSTEM=="rtc", ATTRS{hctosys}=="0", RUN+="/usr/sbin/hwclock -s --utc"' -if [ -e "$RTC_UDEV_RULE_FILE" ] ; then - if grep -q "$RTC_UDEV_RULE_STR" "$RTC_UDEV_RULE_FILE" ; then +if [ -e "$RTC_UDEV_RULE_FILE" ]; then + if grep -q "$RTC_UDEV_RULE_STR" "$RTC_UDEV_RULE_FILE"; then # no need to write udev rule WRITE_RTC_UDEV_RULE=1 fi fi - -if [ $WRITE_RTC_UDEV_RULE ] ; then - sudo /bin/bash < $RTC_UDEV_RULE_FILE echo '$RTC_UDEV_RULE_STR' >> $RTC_UDEV_RULE_FILE EOF fi - -if journalctl -u systemd-timesyncd.service | tail -n 1 | grep -q -E 'synchroniz.*ntp\.org' ; then - echo "synchronization with online ntp server looks good." - echo "Running hwclock to set hw time and update drift" - sudo /bin/bash <' /etc/nvpmodel.conf ; then - echo "setting new default power level" +if ! grep -e '^< PM_CONFIG DEFAULT='"$NEW_PM_ID"' >' /etc/nvpmodel.conf; then + echo "setting new default power level" sudo sed -i"" 's/^< PM_CONFIG DEFAULT=.* >/< PM_CONFIG DEFAULT='"$NEW_PM_ID"' >/' /etc/nvpmodel.conf fi -if ! ( sudo nvpmodel -q | grep -e '^'"$NEW_PM_ID"'$' ) ; then - echo "setting new power level" +if ! (sudo nvpmodel -q | grep -e '^'"$NEW_PM_ID"'$'); then + echo "setting new power level" sudo nvpmodel -m "$NEW_PM_ID" fi - -if ! (hostname | grep -e '^edge[a-z0-9][a-z0-9]*$' ) ; then +if ! (hostname | grep -e '^edge[a-z0-9][a-z0-9]*$'); then echo "set the hostname to 'edgeX'!" echo "be sure to use the command 'sudo hostnamectl set-hostname '" exit 1 fi -if ! grep -E "^127\.[0-9\.]*\s*$(hostname)" /etc/hosts ; then - if ! grep -E "^127\.[0-9\.]*\s*ubuntu$" /etc/hosts ; then +if ! grep -E "^127\.[0-9\.]*\s*$(hostname)" /etc/hosts; then + if ! grep -E "^127\.[0-9\.]*\s*ubuntu$" /etc/hosts; then echo "aah I assumed the old hostname was 'ubuntu', but it's not in /etc/hosts! exiting!" exit 1 fi sudo sed -i"" 's/^127\.\([0-9\.\t ]*\)ubuntu.*$/127.\1'"$(hostname)"'/' /etc/hosts fi - NVFANCONTROL_FILE=/etc/nvfancontrol.conf # NVFANCONTROL_FILE=arst.txt -if [ -e "$NVFANCONTROL_FILE" ] ; then - if ! grep -q -E "FAN_DEFAULT_PROFILE\s*cool" "$NVFANCONTROL_FILE" ; then +if [ -e "$NVFANCONTROL_FILE" ]; then + if ! grep -q -E "FAN_DEFAULT_PROFILE\s*cool" "$NVFANCONTROL_FILE"; then sudo /bin/bash < ./github-actions-runner.service << EOF + if ! [ -e "/etc/systemd/system/github-actions-runner.service" ]; then + cat >./github-actions-runner.service < "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/tnc-edge-http.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "tnc-edge-http.service" fi rm "$TMP_FILE" - -if ! systemctl status postgresql ; then +if ! systemctl status postgresql; then sudo apt -y install postgresql fi -if [ -z "$(find /usr/include/ -name libpq-fe.h)" ] ; then +if [ -z "$(find /usr/include/ -name libpq-fe.h)" ]; then sudo apt -y install libpq-dev fi -if ! systemctl is-enabled postgresql ; then - sudo systemctl daemon-reload +if ! systemctl is-enabled postgresql; then + sudo systemctl daemon-reload sudo systemctl enable postgresql fi -if ! systemctl is-active postgresql ; then +if ! systemctl is-active postgresql; then sudo systemctl start postgresql sleep 2 - if ! systemctl is-active postgresql ; then - echo "fatal error with postgresql server" - echo "fix and rerun this script" - exit 1 + if ! systemctl is-active postgresql; then + echo "fatal error with postgresql server" + echo "fix and rerun this script" + exit 1 fi fi -if ! ( echo "select 1;" | psql postgres ) ; then +if ! (echo "select 1;" | psql postgres); then sudo -u postgres psql <> "$USERHOME/.ssh/authorized_keys" + if ! grep -q "$k" "$USERHOME/.ssh/authorized_keys"; then + echo "$k" >>"$USERHOME/.ssh/authorized_keys" fi done <"$scriptdir"/edge_authorized_keys.txt - - # turn off Ubuntu screen off events gsettings set org.gnome.desktop.session idle-delay 0 gsettings set org.gnome.desktop.screensaver lock-enabled false gsettings set org.gnome.desktop.screensaver ubuntu-lock-on-suspend false - # turn off Ubuntu auto apt updates sudo sed -i"" -e 's/^APT::Periodic::Update-Package-Lists "\?1"\?;/APT::Periodic::Update-Package-Lists "0";/' /etc/apt/apt.conf.d/10periodic sudo sed -i"" -e 's/^APT::Periodic::Download-Upgradeable-Packages "\?1"\?;/APT::Periodic::Download-Upgradeable-Packages "0";/' /etc/apt/apt.conf.d/10periodic @@ -325,14 +312,14 @@ sudo systemctl stop fwupd sudo systemctl disable fwupd # disable internet-connectivity polls -if ! [ -e /etc/NetworkManager/conf.d/20-connectivity-ubuntu.conf ] ; then - # writing to this file overwrites default internet checking behavior. +if ! [ -e /etc/NetworkManager/conf.d/20-connectivity-ubuntu.conf ]; then + # writing to this file overwrites default internet checking behavior. # Empty file means no internet polling # see https://askubuntu.com/a/1094558 sudo touch /etc/NetworkManager/conf.d/20-connectivity-ubuntu.conf fi -if ! which docker-credential-gcr ; then +if ! which docker-credential-gcr; then # rm ./docker-credential-gcr ./docker-credential-gcr.tar.gz # curl -L 'https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v2.1.8/docker-credential-gcr_linux_arm64-2.1.8.tar.gz' -o docker-credential-gcr.tar.gz # tar xzf docker-credential-gcr.tar.gz @@ -341,39 +328,38 @@ if ! which docker-credential-gcr ; then # actually, I'm going to copy the script from google's docs: VERSION=2.1.8 - OS=linux # or "darwin" for OSX, "windows" for Windows. + OS=linux # or "darwin" for OSX, "windows" for Windows. # shellcheck disable=SC2268 - if [ "x$(uname -p)" == 'xaarch64' ] ; then - ARCH="arm64" # or "386" for 32-bit OSs - elif [ "x$(uname -p)" == 'xx86_64' ] ; then - ARCH="amd64" # or "386" for 32-bit OSs + if [ "x$(uname -p)" == 'xaarch64' ]; then + ARCH="arm64" # or "386" for 32-bit OSs + elif [ "x$(uname -p)" == 'xx86_64' ]; then + ARCH="amd64" # or "386" for 32-bit OSs else echo "unknown system architecture" exit 1 fi - curl -fsSL "https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v${VERSION}/docker-credential-gcr_${OS}_${ARCH}-${VERSION}.tar.gz" \ - | tar xz docker-credential-gcr \ - && chmod +x docker-credential-gcr \ - && sudo mv docker-credential-gcr /usr/local/bin/ + curl -fsSL "https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v${VERSION}/docker-credential-gcr_${OS}_${ARCH}-${VERSION}.tar.gz" | + tar xz docker-credential-gcr && + chmod +x docker-credential-gcr && + sudo mv docker-credential-gcr /usr/local/bin/ fi -if ! [ -e "$USERHOME/.config/gcloud/docker_credential_gcr_config.json" ] ; then +if ! [ -e "$USERHOME/.config/gcloud/docker_credential_gcr_config.json" ]; then docker-credential-gcr config --token-source="env, store" fi -if ! grep -E '^export GOOGLE_APPLICATION_CREDENTIALS=' "$USERHOME/.bashrc" ; then - echo "export GOOGLE_APPLICATION_CREDENTIALS=$scriptdir/secret_ondeck_gcr_token.json" >> "$USERHOME/.bashrc" +if ! grep -E '^export GOOGLE_APPLICATION_CREDENTIALS=' "$USERHOME/.bashrc"; then + echo "export GOOGLE_APPLICATION_CREDENTIALS=$scriptdir/secret_ondeck_gcr_token.json" >>"$USERHOME/.bashrc" fi gsettings set org.gnome.Vino require-encryption false - -if ! [ -d "$USERHOME/.aws" ] ; then +if ! [ -d "$USERHOME/.aws" ]; then mkdir "$USERHOME/.aws" fi -if ! [ -e "$USERHOME/.aws/credentials" ] ; then - if ! [ -e "$scriptdir/secret_aws_creds.txt" ] ; then +if ! [ -e "$USERHOME/.aws/credentials" ]; then + if ! [ -e "$scriptdir/secret_aws_creds.txt" ]; then echo "aws secret keys file not found! please add the secret and rerun this script" exit 1 fi @@ -383,19 +369,18 @@ if ! [ -e "$USERHOME/.aws/credentials" ] ; then chmod go-rwx "$USERHOME/.aws/credentials" fi - -if [ -e "$USERHOME/.gnupg/pubring.kbx" ] && [ "x$USERNAME:$USERNAME" != "x$(stat --format '%U:%G' "$USERHOME/.gnupg/pubring.kbx")" ] ; then +if [ -e "$USERHOME/.gnupg/pubring.kbx" ] && [ "x$USERNAME:$USERNAME" != "x$(stat --format '%U:%G' "$USERHOME/.gnupg/pubring.kbx")" ]; then sudo chown "$USERNAME":"$USERNAME" "$USERHOME/.gnupg/pubring.kbx" fi FOUND="" -for file in /etc/netplan/01_eth0_dhcp.yaml* ; do - if [ -e "$file" ] ; then +for file in /etc/netplan/01_eth0_dhcp.yaml*; do + if [ -e "$file" ]; then FOUND="y" fi done -if ! [ "$FOUND" ] ; then - cat > ./01_eth0_dhcp.yaml <./01_eth0_dhcp.yaml < ./01_eth0_static.yaml <./01_eth0_static.yaml < ./netplan-autoswitcher.service << EOF + cat >./netplan-autoswitcher.service < "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/thalos-video-autodecrypt.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "thalos-video-autodecrypt.service" fi rm "$TMP_FILE" -if ! [ -d "/thalos" ] ; then +if ! [ -d "/thalos" ]; then sudo mkdir /thalos sudo chmod go+rwx /thalos fi -if ! [ -d "/videos" ] ; then +if ! [ -d "/videos" ]; then sudo mkdir /videos sudo chmod go+rwx /videos fi -if ! [ -d "/videos/processing" ] ; then +if ! [ -d "/videos/processing" ]; then sudo mkdir /videos/processing sudo chmod go+rwx /videos/processing fi -if ! [ -d "/videos/output" ] ; then +if ! [ -d "/videos/output" ]; then sudo mkdir /videos/output sudo chmod go+rwx /videos/output fi -if ! [ -d "/usbdrive" ] ; then +if ! [ -d "/usbdrive" ]; then sudo mkdir /usbdrive sudo chmod go+rwx /usbdrive fi -if ! [ -e "/etc/systemd/system/thalos.mount" ] ; then - cat > ./thalos.mount << EOF +if ! [ -e "/etc/systemd/system/thalos.mount" ]; then + cat >./thalos.mount < ./thalos.automount << EOF +if ! [ -e "/etc/systemd/system/thalos.automount" ]; then + cat >./thalos.automount </dev/null ; then +if ! sudo test -e "/root/purge-video.sh" || ! sudo diff "$scriptdir/purge-video.sh" /root/purge-video.sh >/dev/null; then sudo cp "$scriptdir/purge-video.sh" /root/purge-video.sh fi +if ! [ -e "/etc/systemd/system/purge-video.service" ]; then -if ! [ -e "/etc/systemd/system/purge-video.service" ] ; then - - cat > ./purge-video.service << EOF + cat >./purge-video.service < "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/ondeck-runner.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "ondeck-runner.service" fi rm "$TMP_FILE" fi - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/gps_fetch.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "gps_fetch.service" fi rm "$TMP_FILE" -if [ "$DO_COPY_PY_PANDAS_TO_VENV" ] ; then +if [ "$DO_COPY_PY_PANDAS_TO_VENV" ]; then cp -r /usr/lib/python3/dist-packages/pytz* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ cp -r /usr/lib/python3/dist-packages/tzdata* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ cp -r /usr/lib/python3/dist-packages/numpy* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ cp -r /usr/lib/python3/dist-packages/pandas* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ fi - - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/s3_uploader.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "s3_uploader.service" fi rm "$TMP_FILE" - - - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/reencode_video_tnc.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "reencode_video_tnc.service" fi rm "$TMP_FILE" +if [ "$DO_ONDECK" ]; then - -if [ "$DO_ONDECK" ] ; then - - - if sudo jetson_clocks --show | grep -q "Xavier NX" ; then + if sudo jetson_clocks --show | grep -q "Xavier NX"; then DEVICE_STR="xavier" fi - if sudo jetson_clocks --show | grep -q "Orin NX" ; then + if sudo jetson_clocks --show | grep -q "Orin NX"; then DEVICE_STR="orin" fi - if [ -z "$DEVICE_STR" ] ; then + if [ -z "$DEVICE_STR" ]; then echo "cannot determine jetson device. Please check the output of 'sudo jetson_clocks --show'" exit 1 fi - if ! [ -s "$ENVIRONMENT" ] ; then + if ! [ -s "$ENVIRONMENT" ]; then echo "this script needs access to the ENVIRONMENT file" exit 1 - fi + fi export "$(grep ONDECK_MODEL_ENGINE "$ENVIRONMENT")" export "$(grep ONDECK_POLYGON_STR "$ENVIRONMENT")" - if [ -z "$ONDECK_MODEL_ENGINE" ] ; then + if [ -z "$ONDECK_MODEL_ENGINE" ]; then ONDECK_ENGINE_OVERRIDE="" else ONDECK_ENGINE_OVERRIDE="-e ENGINE_OVERRIDE=$ONDECK_MODEL_ENGINE" fi - if [ -z "$ONDECK_POLYGON_STR" ] ; then + if [ -z "$ONDECK_POLYGON_STR" ]; then echo "please set ONDECK_POLYGON_STR in ENVIRONMENT file" exit 1 fi TMP_FILE="$(mktemp)" - cat > "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/ondeck_model.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "ondeck_model.service" fi rm "$TMP_FILE" fi - -if [ "$DO_AIFISH" ] ; then - +if [ "$DO_AIFISH" ]; then TMP_FILE="$(mktemp)" - cat > "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/aifish-runner.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "aifish-runner.service" fi rm "$TMP_FILE" - TMP_FILE="$(mktemp)" - cat > "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/aifish_model.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "aifish_model.service" fi rm "$TMP_FILE" - fi - - - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/vector_schedule.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "vector_schedule.service" fi rm "$TMP_FILE" - - diff --git a/scripts/video_bulk_copy/local_to_s3_upload.sh b/scripts/video_bulk_copy/local_to_s3_upload.sh index e5fbf57..1d8b828 100644 --- a/scripts/video_bulk_copy/local_to_s3_upload.sh +++ b/scripts/video_bulk_copy/local_to_s3_upload.sh @@ -1,39 +1,39 @@ #!/bin/bash # SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" EXTHDPATH="/Volumes/Expansion 1" cd "$EXTHDPATH" || exit 1 -for i in *.avi.enc *.avi.done.enc ; do - if grep -q "$i" "$SCRIPTDIR/done.txt" ; then +for i in *.avi.enc *.avi.done.enc; do + if grep -q "$i" "$SCRIPTDIR/done.txt"; then continue fi bname="$(basename "$i")" cname="${bname%*.enc}" boatname="${cname%%_*}" dtname="${cname#*_}" - if [ 'saintpatrick' == "$boatname" ] ; then + if [ 'saintpatrick' == "$boatname" ]; then echo "$cname,Saint Patrick,$dtname" else echo "$cname,${boatname^},$dtname" fi -done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1}.enc '$SCRIPTDIR'/{1}.enc && gpg -d --batch -o '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1}.enc >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc' 2>&1 +done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1}.enc '$SCRIPTDIR'/{1}.enc && gpg -d --batch -o '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1}.enc >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc' 2>&1 -for i in *.avi *.avi.done *.mkv ; do - if grep -q "$i" "$SCRIPTDIR/done.txt" ; then +for i in *.avi *.avi.done *.mkv; do + if grep -q "$i" "$SCRIPTDIR/done.txt"; then continue fi bname="$(basename "$i")" boatname="${bname%%_*}" dtname="${bname#*_}" - if [ 'saintpatrick' == "$boatname" ] ; then + if [ 'saintpatrick' == "$boatname" ]; then echo "$bname,Saint Patrick,$dtname" else echo "$bname,${boatname^},$dtname" fi -done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1} '$SCRIPTDIR'/{1} && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1} >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1}' 2>&1 +done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1} '$SCRIPTDIR'/{1} && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1} >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1}' 2>&1 exit 0 diff --git a/scripts/video_bulk_copy/local_to_usbstick.sh b/scripts/video_bulk_copy/local_to_usbstick.sh index bb87b2d..67e2dde 100644 --- a/scripts/video_bulk_copy/local_to_usbstick.sh +++ b/scripts/video_bulk_copy/local_to_usbstick.sh @@ -1,15 +1,12 @@ #!/bin/bash # SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" EXTHDPATH="/Volumes/Expansion 1" cd "$EXTHDPATH" || exit 1 - ls *.avi *.mkv | parallel -v -r --eta --jobs 2 ' flock '$SCRIPTDIR'/usb.lock cp {} '$SCRIPTDIR'/{} && && rm '$SCRIPTDIR'/{}' 2>&1 | tee mainsh.stdout_and_stderr.txt ls *.enc | parallel -v -r --eta --jobs 2 ' flock '$SCRIPTDIR'/usb.lock cp {} '$SCRIPTDIR'/{} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{}.enc /tmp/{} && flock /tmp/usb.lock cp /tmp/{}.enc /usbdrive/{}.enc && rm /tmp/{} /tmp/{}.enc ; fi ' 2>&1 | tee /home/edge/enc_from_usb_to_usb.stdout_and_stderr.txt - - diff --git a/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh b/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh index 1b3a1a8..c313556 100644 --- a/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh +++ b/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh @@ -1,10 +1,8 @@ #!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" touch /tmp/usb.lock /tmp/network.lock - echo "select original_path from video_files where start_datetime > '2023-10-16 16:45:00Z' order by start_datetime asc;" | psql -t | awk 'NF' | python3 -c 'from datetime import datetime; import sys; from pathlib import Path; [print( line.strip(), Path(line.strip()).parents[4].name+"_"+datetime.strptime(Path(line.strip()).name[0:16], "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M00Z")+"_"+Path(line.strip()).parents[2].name+".avi") for line in sys.stdin.readlines() ]' | parallel -v -r --eta --jobs 4 --colsep " " 'if [ ! -e "/usbdrive/{2}.enc" ] ; then flock /tmp/network.lock cp {1} /tmp/{2} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{2}.enc /tmp/{2} && flock /tmp/usb.lock cp /tmp/{2}.enc /usbdrive/{2}.enc && rm /tmp/{2} /tmp/{2}.enc ; fi ' - diff --git a/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh b/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh index a1def5f..c77967b 100644 --- a/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh +++ b/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh @@ -5,6 +5,4 @@ touch /tmp/usb.lock /tmp/network.lock - find /thalos/brancol/videos/cam{1,2}/{15,16,17,18,19,20,21,22,23,24}-10-2023 -name '*.avi.done' | python3 -c 'from datetime import datetime; import sys; from pathlib import Path; [print( line.strip(), Path(line.strip()).parents[4].name+"_"+datetime.strptime(Path(line.strip()).name[0:16], "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M00Z")+"_"+Path(line.strip()).parents[2].name+".avi") for line in sys.stdin.readlines() ]' | parallel -v -r --eta --jobs 4 --colsep " " 'if [ ! -e "/usbdrive/{2}.enc" ] ; then flock /tmp/network.lock cp {1} /tmp/{2} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{2}.enc /tmp/{2} && flock /tmp/usb.lock cp /tmp/{2}.enc /usbdrive/{2}.enc && rm /tmp/{2} /tmp/{2}.enc ; fi ' - diff --git a/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh b/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh index b250c9e..aecc569 100644 --- a/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh +++ b/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh @@ -5,7 +5,6 @@ touch /tmp/usb.lock - cd /usbdrive || exit 1 ls | grep -e 'avi' -e 'mkv' | grep -v '.enc' | parallel -v -r --eta --jobs 2 'if [ ! -e "/usbdrive/{}.enc" ] ; then flock /tmp/usb.lock cp {} /tmp/{} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{}.enc /tmp/{} && flock /tmp/usb.lock cp /tmp/{}.enc /usbdrive/{}.enc && rm /tmp/{} /tmp/{}.enc ; fi ' 2>&1 | tee /home/edge/enc_from_usb_to_usb.stdout_and_stderr.txt diff --git a/scripts/vpn-install.sh b/scripts/vpn-install.sh index da16d29..3fbc8d7 100644 --- a/scripts/vpn-install.sh +++ b/scripts/vpn-install.sh @@ -7,8 +7,8 @@ echo "Before running:" echo " 1. ssh into vpn.riskedge.fish" echo " 2. stay in the home dir" -echo " 3. run `./easyrsa/easyrsa --pki-dir=tnc-edge-vpn-pki build-client-full nopass`" -echo " 4. cat the files `tnc-edge-vpn-pki/private/edgeX.key` and `tnc-edge-vpn-pki/issued/edgeX.crt `" +echo " 3. run $(./easyrsa/easyrsa --pki-dir=tnc-edge-vpn-pki build-client-full nopass)" +echo " 4. cat the files $(tnc-edge-vpn-pki/private/edgeX.key) and $(tnc-edge-vpn-pki/issued/edgeX.crt)" echo " 5. edit an existing edgeX.ovpn and paste the key+cert" function usage { @@ -16,20 +16,18 @@ function usage { echo " vpn-install.sh " } -if [ "$1" == "" ] || ! [ -e "$1" ] ; then +if [ "$1" == "" ] || ! [ -e "$1" ]; then echo "no OpenVPN config file" echo "" usage exit 1 fi - -if ! which openvpn ; then +if ! which openvpn; then echo "installing openvpn" - sudo apt -y install openvpn + sudo apt -y install openvpn fi sudo cp "$1" /etc/openvpn/client/tnc-edge.conf sudo systemctl enable openvpn-client@tnc-edge sudo systemctl restart openvpn-client@tnc-edge - diff --git a/tests/ondeck_json_to_tracks.py b/tests/ondeck_json_to_tracks.py index 163a906..4f38075 100644 --- a/tests/ondeck_json_to_tracks.py +++ b/tests/ondeck_json_to_tracks.py @@ -1,92 +1,95 @@ - -from model import Base, Track, OndeckData - import os + +import click +import sqlalchemy as sa from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from sqlalchemy.orm import Session +from sqlalchemy.orm import sessionmaker as SessionMaker -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +from model import Base, Track +flaskconfig = FlaskConfig(root_path="") -import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Session +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -import click @click.group(invoke_without_command=True) @click.pass_context -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(ctx, dbname, dbuser): - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) ctx.ensure_object(dict) - ctx.obj['sessionmaker'] = sessionmaker + ctx.obj["sessionmaker"] = sessionmaker if ctx.invoked_subcommand is None: - click.echo('I was invoked without subcommand') + click.echo("I was invoked without subcommand") with sessionmaker() as session: active_tracks = {} done_tracks = [] - fname = '/Users/ericfultz/Documents/pops/TNC/tnc-edge-service/tests/data/20231205T212500Z_cam1_ondeck.json' + fname = "/Users/ericfultz/Documents/pops/TNC/tnc-edge-service/tests/data/20231205T212500Z_cam1_ondeck.json" import json - with open(fname) as f: + with open(fname) as f: j = json.load(f) - for frame in j['frames']: - if 'allActiveTrackingIds' not in frame: + for frame in j["frames"]: + if "allActiveTrackingIds" not in frame: continue - for activeTrackingId_str in frame['allActiveTrackingIds']: + for activeTrackingId_str in frame["allActiveTrackingIds"]: activeTrackingId = int(activeTrackingId_str) - if activeTrackingId not in active_tracks.keys(): + if activeTrackingId not in active_tracks: active_tracks[activeTrackingId] = Track() active_tracks[activeTrackingId].cocoannotations_uri = fname active_tracks[activeTrackingId].track_id = activeTrackingId - active_tracks[activeTrackingId].first_framenum = frame['frameNum'] + active_tracks[activeTrackingId].first_framenum = frame["frameNum"] active_tracks[activeTrackingId].confidences = [] t = active_tracks[activeTrackingId] - try: - idx = frame['trackingIds'].index(activeTrackingId_str) - t.confidences.append(frame['confidence'][idx]) + try: + idx = frame["trackingIds"].index(activeTrackingId_str) + t.confidences.append(frame["confidence"][idx]) except: t.confidences.append(0.0) for track_id in list(active_tracks.keys()): track = active_tracks[track_id] - if str(track_id) not in frame['allActiveTrackingIds']: - track.last_framenum = frame['frameNum'] + if str(track_id) not in frame["allActiveTrackingIds"]: + track.last_framenum = frame["frameNum"] done_tracks.append(track) active_tracks.pop(track_id) - - - session.add_all(done_tracks) + session.add_all(done_tracks) session.commit() - + @main.command() @click.pass_context def archive(ctx): - import run_ondeck from pathlib import Path - sessionmaker = ctx.obj['sessionmaker'] + import run_ondeck + + sessionmaker = ctx.obj["sessionmaker"] session: Session = sessionmaker() with session: - res = session.execute(sa.text("select ondeckdata.video_uri, ondeckdata.cocoannotations_uri from ondeckdata \ + res = session.execute( + sa.text( + "select ondeckdata.video_uri, ondeckdata.cocoannotations_uri from ondeckdata \ left join tracks on ondeckdata.cocoannotations_uri = tracks.cocoannotations_uri \ - where tracks.id is null and ondeckdata.cocoannotations_uri like '/videos/%ondeck.json';")) - for (video_uri, json_uri) in res: + where tracks.id is null and ondeckdata.cocoannotations_uri like '/videos/%ondeck.json';" + ) + ) + for video_uri, json_uri in res: json_path = Path(json_uri) if json_path.is_file(): run_ondeck.parse_json(session, Path(video_uri), json_path, only_tracks=True) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tests/onetimetests/test.py b/tests/onetimetests/test.py index c5d17b9..c64bbfa 100644 --- a/tests/onetimetests/test.py +++ b/tests/onetimetests/test.py @@ -1,15 +1,19 @@ import os + +import sqlalchemy as sa from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from sqlalchemy.orm import sessionmaker as SessionMaker -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +from model import Base as ModelBase +from model import DeckhandEventView + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -import sqlalchemy as sa -from model import Base as ModelBase, DeckhandEventView -from sqlalchemy.orm import sessionmaker as SessionMaker sa_engine = sa.create_engine("postgresql+psycopg2://ericfultz@/edge", echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) diff --git a/tests/onetimetests/test2.py b/tests/onetimetests/test2.py index e768e56..a327a82 100644 --- a/tests/onetimetests/test2.py +++ b/tests/onetimetests/test2.py @@ -1,18 +1,16 @@ from datetime import datetime -with open('brancol_usb_files') as f: +with open("brancol_usb_files") as f: for l in f.readlines(): - l = l.strip() - if l.find(' ') < 0: - print(l[:-4]) - continue - n = l.split(' ')[-1] - # print(n) - # continue - try: - (_, c, r) = l.split("_") - fd = r.split(".")[0] - d = datetime.strptime(fd, "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M%SZ") - print(d+"_"+c) - except: - continue \ No newline at end of file + l = l.strip() + if l.find(" ") < 0: + print(l[:-4]) + continue + n = l.split(" ")[-1] + try: + (_, c, r) = l.split("_") + fd = r.split(".")[0] + d = datetime.strptime(fd, "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M%SZ") + print(d + "_" + c) + except: + continue diff --git a/tests/onetimetests/test3.py b/tests/onetimetests/test3.py index a185608..e99239e 100644 --- a/tests/onetimetests/test3.py +++ b/tests/onetimetests/test3.py @@ -1,1607 +1,1616 @@ -a = ['/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-55.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-00.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-05.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-10.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-15.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-20.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-55.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-00.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-05.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-10.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-15.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-20.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-55.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-00.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-05.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-10.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-15.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-20.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-35.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/18/12-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-55.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-00.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-05.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-10.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-15.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-20.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-55.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-00.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-05.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-10.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-15.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-20.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-55.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-00.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-05.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-10.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-15.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-20.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-35.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/18/12-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-45.avi.done', +from datetime import datetime + +video_paths = [ + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-55.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-00.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-05.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-10.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-15.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-20.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-55.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-00.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-05.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-10.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-15.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-20.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-55.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-00.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-05.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-10.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-15.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-20.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-35.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/18/12-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-55.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-00.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-05.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-10.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-15.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-20.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-55.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-00.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-05.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-10.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-15.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-20.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-55.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-00.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-05.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-10.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-15.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-20.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-35.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/18/12-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-45.avi.done", ] -from datetime import datetime -a.sort(key=lambda x: datetime.strptime(x.split('/')[-1].split('.')[0], "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M%SZ")) -for i in a: - (_, _, b, _, c, _, _, f) = i.split('/') - t = "{}_{}_{}".format(b, c, f) +video_paths.sort( + key=lambda x: datetime.strptime(x.split("/")[-1].split(".")[0], "%d-%m-%Y-%H-%M").strftime( + "%Y%m%dT%H%M%SZ" + ) +) + +for i in video_paths: + (_, _, b, _, c, _, _, f) = i.split("/") + t = f"{b}_{c}_{f}" e = t + ".enc" - print("cp {} /tmp/{} ; gpg -e --batch -z 0 --trust-model always -r edgedevice --output /tmp/{} /tmp/{} ; flock /tmp/lock cp /tmp/{} /usbdrive/{} ; rm /tmp/{} /tmp/{}".format(i, t, e, t, e, e, t, e)) + print( + f"cp {i} /tmp/{t} ; gpg -e --batch -z 0 --trust-model always -r edgedevice --output /tmp/{e} /tmp/{t} ; flock /tmp/lock cp /tmp/{e} /usbdrive/{e} ; rm /tmp/{t} /tmp/{e}" + ) diff --git a/tests/onetimetests/test4.py b/tests/onetimetests/test4.py index 5966fda..96d77ac 100644 --- a/tests/onetimetests/test4.py +++ b/tests/onetimetests/test4.py @@ -1,1813 +1,1814 @@ -a=['18-10-2023-00-00.ndx', -'18-10-2023-00-00.pcm', -'18-10-2023-00-01.ndx', -'18-10-2023-00-01.pcm', -'18-10-2023-00-02.ndx', -'18-10-2023-00-02.pcm', -'18-10-2023-00-03.ndx', -'18-10-2023-00-03.pcm', -'18-10-2023-00-05.ndx', -'18-10-2023-00-05.pcm', -'18-10-2023-00-06.ndx', -'18-10-2023-00-06.pcm', -'18-10-2023-00-07.ndx', -'18-10-2023-00-07.pcm', -'18-10-2023-00-08.ndx', -'18-10-2023-00-08.pcm', -'18-10-2023-00-09.ndx', -'18-10-2023-00-09.pcm', -'18-10-2023-00-10.ndx', -'18-10-2023-00-10.pcm', -'18-10-2023-00-11.ndx', -'18-10-2023-00-11.pcm', -'18-10-2023-00-12.ndx', -'18-10-2023-00-12.pcm', -'18-10-2023-00-13.ndx', -'18-10-2023-00-13.pcm', -'18-10-2023-00-14.ndx', -'18-10-2023-00-14.pcm', -'18-10-2023-00-15.ndx', -'18-10-2023-00-15.pcm', -'18-10-2023-00-16.ndx', -'18-10-2023-00-16.pcm', -'18-10-2023-00-17.ndx', -'18-10-2023-00-17.pcm', -'18-10-2023-00-18.ndx', -'18-10-2023-00-18.pcm', -'18-10-2023-00-19.ndx', -'18-10-2023-00-19.pcm', -'18-10-2023-00-20.ndx', -'18-10-2023-00-20.pcm', -'18-10-2023-00-21.ndx', -'18-10-2023-00-21.pcm', -'18-10-2023-00-22.ndx', -'18-10-2023-00-22.pcm', -'18-10-2023-00-23.ndx', -'18-10-2023-00-23.pcm', -'18-10-2023-00-24.ndx', -'18-10-2023-00-24.pcm', -'18-10-2023-00-25.ndx', -'18-10-2023-00-25.pcm', -'18-10-2023-00-26.ndx', -'18-10-2023-00-26.pcm', -'18-10-2023-00-27.ndx', -'18-10-2023-00-27.pcm', -'18-10-2023-00-28.ndx', -'18-10-2023-00-28.pcm', -'18-10-2023-00-29.ndx', -'18-10-2023-00-29.pcm', -'18-10-2023-00-30.ndx', -'18-10-2023-00-30.pcm', -'18-10-2023-00-31.ndx', -'18-10-2023-00-31.pcm', -'18-10-2023-00-32.ndx', -'18-10-2023-00-32.pcm', -'18-10-2023-00-33.ndx', -'18-10-2023-00-33.pcm', -'18-10-2023-00-34.ndx', -'18-10-2023-00-34.pcm', -'18-10-2023-00-35.ndx', -'18-10-2023-00-35.pcm', -'18-10-2023-00-36.ndx', -'18-10-2023-00-36.pcm', -'18-10-2023-00-37.ndx', -'18-10-2023-00-37.pcm', -'18-10-2023-00-38.ndx', -'18-10-2023-00-38.pcm', -'18-10-2023-00-39.ndx', -'18-10-2023-00-39.pcm', -'18-10-2023-00-40.ndx', -'18-10-2023-00-40.pcm', -'18-10-2023-00-41.ndx', -'18-10-2023-00-41.pcm', -'18-10-2023-00-42.ndx', -'18-10-2023-00-42.pcm', -'18-10-2023-00-43.ndx', -'18-10-2023-00-43.pcm', -'18-10-2023-00-45.ndx', -'18-10-2023-00-45.pcm', -'18-10-2023-00-46.ndx', -'18-10-2023-00-46.pcm', -'18-10-2023-00-47.ndx', -'18-10-2023-00-47.pcm', -'18-10-2023-00-48.ndx', -'18-10-2023-00-48.pcm', -'18-10-2023-00-49.ndx', -'18-10-2023-00-49.pcm', -'18-10-2023-00-50.ndx', -'18-10-2023-00-50.pcm', -'18-10-2023-00-51.ndx', -'18-10-2023-00-51.pcm', -'18-10-2023-00-52.ndx', -'18-10-2023-00-52.pcm', -'18-10-2023-00-53.ndx', -'18-10-2023-00-53.pcm', -'18-10-2023-00-54.ndx', -'18-10-2023-00-54.pcm', -'18-10-2023-00-55.ndx', -'18-10-2023-00-55.pcm', -'18-10-2023-00-56.ndx', -'18-10-2023-00-56.pcm', -'18-10-2023-00-57.ndx', -'18-10-2023-00-57.pcm', -'18-10-2023-00-58.ndx', -'18-10-2023-00-58.pcm', -'18-10-2023-00-59.ndx', -'18-10-2023-00-59.pcm', -'18-10-2023-01-00.ndx', -'18-10-2023-01-00.pcm', -'18-10-2023-01-01.ndx', -'18-10-2023-01-01.pcm', -'18-10-2023-01-02.ndx', -'18-10-2023-01-02.pcm', -'18-10-2023-01-03.ndx', -'18-10-2023-01-03.pcm', -'18-10-2023-01-05.ndx', -'18-10-2023-01-05.pcm', -'18-10-2023-01-06.ndx', -'18-10-2023-01-06.pcm', -'18-10-2023-01-07.ndx', -'18-10-2023-01-07.pcm', -'18-10-2023-01-08.ndx', -'18-10-2023-01-08.pcm', -'18-10-2023-01-09.ndx', -'18-10-2023-01-09.pcm', -'18-10-2023-01-10.ndx', -'18-10-2023-01-10.pcm', -'18-10-2023-01-11.ndx', -'18-10-2023-01-11.pcm', -'18-10-2023-01-12.ndx', -'18-10-2023-01-12.pcm', -'18-10-2023-01-13.ndx', -'18-10-2023-01-13.pcm', -'18-10-2023-01-14.ndx', -'18-10-2023-01-14.pcm', -'18-10-2023-01-15.ndx', -'18-10-2023-01-15.pcm', -'18-10-2023-01-16.ndx', -'18-10-2023-01-16.pcm', -'18-10-2023-01-17.ndx', -'18-10-2023-01-17.pcm', -'18-10-2023-01-18.ndx', -'18-10-2023-01-18.pcm', -'18-10-2023-01-19.mjp', -'18-10-2023-01-19.ndx', -'18-10-2023-01-19.pcm', -'18-10-2023-11-04.ndx', -'18-10-2023-11-04.pcm', -'18-10-2023-11-05.avi.done', -'18-10-2023-11-05.gpg', -'18-10-2023-11-05.mp4.done', -'18-10-2023-11-10.avi.done', -'18-10-2023-11-10.gpg', -'18-10-2023-11-10.mp4.done', -'18-10-2023-11-15.avi.done', -'18-10-2023-11-15.gpg', -'18-10-2023-11-15.mp4.done', -'18-10-2023-11-20.avi.done', -'18-10-2023-11-20.gpg', -'18-10-2023-11-20.mp4.done', -'18-10-2023-11-25.avi.done', -'18-10-2023-11-25.gpg', -'18-10-2023-11-25.mp4.done', -'18-10-2023-11-30.avi.done', -'18-10-2023-11-30.gpg', -'18-10-2023-11-30.mp4.done', -'18-10-2023-11-35.avi.done', -'18-10-2023-11-35.gpg', -'18-10-2023-11-35.mp4.done', -'18-10-2023-11-40.mjp', -'18-10-2023-11-40.ndx', -'18-10-2023-11-40.pcm', -'18-10-2023-11-41.mjp', -'18-10-2023-11-41.ndx', -'18-10-2023-11-41.pcm', -'18-10-2023-11-42.mjp', -'18-10-2023-11-42.ndx', -'18-10-2023-11-42.pcm', -'18-10-2023-11-43.mjp', -'18-10-2023-11-43.ndx', -'18-10-2023-11-43.pcm', -'18-10-2023-11-45.avi.done', -'18-10-2023-11-45.gpg', -'18-10-2023-11-45.mp4.done', -'18-10-2023-11-46.ndx', -'18-10-2023-11-46.pcm', -'18-10-2023-11-47.ndx', -'18-10-2023-11-47.pcm', -'18-10-2023-11-50.avi.done', -'18-10-2023-11-50.gpg', -'18-10-2023-11-50.mp4.done', -'18-10-2023-11-55.avi.done', -'18-10-2023-11-55.gpg', -'18-10-2023-11-55.mp4.done', -'18-10-2023-12-00.avi.done', -'18-10-2023-12-00.gpg', -'18-10-2023-12-00.mp4.done', -'18-10-2023-12-05.avi.done', -'18-10-2023-12-05.gpg', -'18-10-2023-12-05.mp4.done', -'18-10-2023-12-10.avi.done', -'18-10-2023-12-10.gpg', -'18-10-2023-12-10.mp4.done', -'18-10-2023-12-15.avi.done', -'18-10-2023-12-15.gpg', -'18-10-2023-12-15.mp4.done', -'18-10-2023-12-20.avi.done', -'18-10-2023-12-20.gpg', -'18-10-2023-12-20.mp4.done', -'18-10-2023-12-25.avi.done', -'18-10-2023-12-25.gpg', -'18-10-2023-12-25.mp4.done', -'18-10-2023-12-30.avi.done', -'18-10-2023-12-30.gpg', -'18-10-2023-12-30.mp4.done', -'18-10-2023-12-35.avi.done', -'18-10-2023-12-35.gpg', -'18-10-2023-12-35.mp4.done', -'18-10-2023-12-40.avi.done', -'18-10-2023-12-40.gpg', -'18-10-2023-12-40.mp4.done', -'18-10-2023-12-45.avi.done', -'18-10-2023-12-45.gpg', -'18-10-2023-12-45.mp4.done', -'18-10-2023-12-50.avi.done', -'18-10-2023-12-50.gpg', -'18-10-2023-12-50.mp4.done', -'18-10-2023-12-55.avi.done', -'18-10-2023-12-55.gpg', -'18-10-2023-12-55.mp4.done', -'18-10-2023-13-00.avi.done', -'18-10-2023-13-00.gpg', -'18-10-2023-13-00.mp4.done', -'18-10-2023-13-05.avi.done', -'18-10-2023-13-05.gpg', -'18-10-2023-13-05.mp4.done', -'18-10-2023-13-10.avi.done', -'18-10-2023-13-10.gpg', -'18-10-2023-13-10.mp4.done', -'18-10-2023-13-15.avi.done', -'18-10-2023-13-15.gpg', -'18-10-2023-13-15.mp4.done', -'18-10-2023-13-20.avi.done', -'18-10-2023-13-20.gpg', -'18-10-2023-13-20.mp4.done', -'18-10-2023-13-25.avi.done', -'18-10-2023-13-25.gpg', -'18-10-2023-13-25.mp4.done', -'18-10-2023-13-30.avi.done', -'18-10-2023-13-30.gpg', -'18-10-2023-13-30.mp4.done', -'18-10-2023-13-35.avi.done', -'18-10-2023-13-35.gpg', -'18-10-2023-13-35.mp4.done', -'18-10-2023-13-40.avi.done', -'18-10-2023-13-40.gpg', -'18-10-2023-13-40.mp4.done', -'18-10-2023-13-45.avi.done', -'18-10-2023-13-45.gpg', -'18-10-2023-13-45.mp4.done', -'18-10-2023-13-50.avi.done', -'18-10-2023-13-50.gpg', -'18-10-2023-13-50.mp4.done', -'18-10-2023-13-55.avi.done', -'18-10-2023-13-55.gpg', -'18-10-2023-13-55.mp4.done', -'18-10-2023-14-00.avi.done', -'18-10-2023-14-00.gpg', -'18-10-2023-14-00.mp4.done', -'18-10-2023-14-05.avi.done', -'18-10-2023-14-05.gpg', -'18-10-2023-14-05.mp4.done', -'18-10-2023-14-10.avi.done', -'18-10-2023-14-10.gpg', -'18-10-2023-14-10.mp4.done', -'18-10-2023-14-15.avi.done', -'18-10-2023-14-15.gpg', -'18-10-2023-14-15.mp4.done', -'18-10-2023-14-20.avi.done', -'18-10-2023-14-20.gpg', -'18-10-2023-14-20.mp4.done', -'18-10-2023-14-25.avi.done', -'18-10-2023-14-25.gpg', -'18-10-2023-14-25.mp4.done', -'18-10-2023-14-30.avi.done', -'18-10-2023-14-30.gpg', -'18-10-2023-14-30.mp4.done', -'18-10-2023-14-35.avi.done', -'18-10-2023-14-35.gpg', -'18-10-2023-14-35.mp4.done', -'18-10-2023-14-40.avi.done', -'18-10-2023-14-40.gpg', -'18-10-2023-14-40.mp4.done', -'18-10-2023-14-41.ndx', -'18-10-2023-14-41.pcm', -'18-10-2023-14-42.ndx', -'18-10-2023-14-42.pcm', -'18-10-2023-14-43.ndx', -'18-10-2023-14-43.pcm', -'18-10-2023-14-44.ndx', -'18-10-2023-14-44.pcm', -'18-10-2023-14-45.ndx', -'18-10-2023-14-45.pcm', -'18-10-2023-14-46.ndx', -'18-10-2023-14-46.pcm', -'18-10-2023-14-47.ndx', -'18-10-2023-14-47.pcm', -'18-10-2023-14-48.ndx', -'18-10-2023-14-48.pcm', -'18-10-2023-14-49.ndx', -'18-10-2023-14-49.pcm', -'18-10-2023-14-50.ndx', -'18-10-2023-14-50.pcm', -'18-10-2023-14-51.ndx', -'18-10-2023-14-51.pcm', -'18-10-2023-14-52.ndx', -'18-10-2023-14-52.pcm', -'18-10-2023-14-53.ndx', -'18-10-2023-14-53.pcm', -'18-10-2023-14-54.ndx', -'18-10-2023-14-54.pcm', -'18-10-2023-14-55.ndx', -'18-10-2023-14-55.pcm', -'18-10-2023-14-56.ndx', -'18-10-2023-14-56.pcm', -'18-10-2023-14-57.ndx', -'18-10-2023-14-57.pcm', -'18-10-2023-14-58.ndx', -'18-10-2023-14-58.pcm', -'18-10-2023-14-59.ndx', -'18-10-2023-14-59.pcm', -'18-10-2023-15-00.ndx', -'18-10-2023-15-00.pcm', -'18-10-2023-15-01.ndx', -'18-10-2023-15-01.pcm', -'18-10-2023-15-02.ndx', -'18-10-2023-15-02.pcm', -'18-10-2023-15-03.ndx', -'18-10-2023-15-03.pcm', -'18-10-2023-15-04.ndx', -'18-10-2023-15-04.pcm', -'18-10-2023-15-05.ndx', -'18-10-2023-15-05.pcm', -'18-10-2023-15-06.ndx', -'18-10-2023-15-06.pcm', -'18-10-2023-15-07.ndx', -'18-10-2023-15-07.pcm', -'18-10-2023-15-08.ndx', -'18-10-2023-15-08.pcm', -'18-10-2023-15-09.ndx', -'18-10-2023-15-09.pcm', -'18-10-2023-15-10.ndx', -'18-10-2023-15-10.pcm', -'18-10-2023-15-11.ndx', -'18-10-2023-15-11.pcm', -'18-10-2023-15-12.ndx', -'18-10-2023-15-12.pcm', -'18-10-2023-15-13.ndx', -'18-10-2023-15-13.pcm', -'18-10-2023-15-14.ndx', -'18-10-2023-15-14.pcm', -'18-10-2023-15-15.ndx', -'18-10-2023-15-15.pcm', -'18-10-2023-15-16.ndx', -'18-10-2023-15-16.pcm', -'18-10-2023-15-17.ndx', -'18-10-2023-15-17.pcm', -'18-10-2023-15-18.ndx', -'18-10-2023-15-18.pcm', -'18-10-2023-15-19.ndx', -'18-10-2023-15-19.pcm', -'18-10-2023-15-20.ndx', -'18-10-2023-15-20.pcm', -'18-10-2023-15-21.ndx', -'18-10-2023-15-21.pcm', -'18-10-2023-15-22.ndx', -'18-10-2023-15-22.pcm', -'18-10-2023-15-23.ndx', -'18-10-2023-15-23.pcm', -'18-10-2023-15-24.ndx', -'18-10-2023-15-24.pcm', -'18-10-2023-15-25.ndx', -'18-10-2023-15-25.pcm', -'18-10-2023-15-26.ndx', -'18-10-2023-15-26.pcm', -'18-10-2023-15-27.ndx', -'18-10-2023-15-27.pcm', -'18-10-2023-15-28.ndx', -'18-10-2023-15-28.pcm', -'18-10-2023-15-30.ndx', -'18-10-2023-15-30.pcm', -'18-10-2023-15-31.ndx', -'18-10-2023-15-31.pcm', -'18-10-2023-15-32.ndx', -'18-10-2023-15-32.pcm', -'18-10-2023-15-33.ndx', -'18-10-2023-15-33.pcm', -'18-10-2023-15-35.ndx', -'18-10-2023-15-35.pcm', -'18-10-2023-15-36.ndx', -'18-10-2023-15-36.pcm', -'18-10-2023-15-37.ndx', -'18-10-2023-15-37.pcm', -'18-10-2023-15-38.ndx', -'18-10-2023-15-38.pcm', -'18-10-2023-15-39.ndx', -'18-10-2023-15-39.pcm', -'18-10-2023-15-40.ndx', -'18-10-2023-15-40.pcm', -'18-10-2023-15-41.ndx', -'18-10-2023-15-41.pcm', -'18-10-2023-15-42.ndx', -'18-10-2023-15-42.pcm', -'18-10-2023-15-43.ndx', -'18-10-2023-15-43.pcm', -'18-10-2023-15-44.ndx', -'18-10-2023-15-44.pcm', -'18-10-2023-15-45.ndx', -'18-10-2023-15-45.pcm', -'18-10-2023-15-46.ndx', -'18-10-2023-15-46.pcm', -'18-10-2023-15-47.ndx', -'18-10-2023-15-47.pcm', -'18-10-2023-15-48.ndx', -'18-10-2023-15-48.pcm', -'18-10-2023-15-50.ndx', -'18-10-2023-15-50.pcm', -'18-10-2023-15-51.ndx', -'18-10-2023-15-51.pcm', -'18-10-2023-15-52.ndx', -'18-10-2023-15-52.pcm', -'18-10-2023-15-53.ndx', -'18-10-2023-15-53.pcm', -'18-10-2023-15-54.ndx', -'18-10-2023-15-54.pcm', -'18-10-2023-15-55.ndx', -'18-10-2023-15-55.pcm', -'18-10-2023-15-56.ndx', -'18-10-2023-15-56.pcm', -'18-10-2023-15-57.ndx', -'18-10-2023-15-57.pcm', -'18-10-2023-15-58.ndx', -'18-10-2023-15-58.pcm', -'18-10-2023-15-59.ndx', -'18-10-2023-15-59.pcm', -'18-10-2023-16-00.ndx', -'18-10-2023-16-00.pcm', -'18-10-2023-16-01.ndx', -'18-10-2023-16-01.pcm', -'18-10-2023-16-02.ndx', -'18-10-2023-16-02.pcm', -'18-10-2023-16-03.ndx', -'18-10-2023-16-03.pcm', -'18-10-2023-16-04.ndx', -'18-10-2023-16-04.pcm', -'18-10-2023-16-05.ndx', -'18-10-2023-16-05.pcm', -'18-10-2023-16-06.ndx', -'18-10-2023-16-06.pcm', -'18-10-2023-16-07.ndx', -'18-10-2023-16-07.pcm', -'18-10-2023-16-08.ndx', -'18-10-2023-16-08.pcm', -'18-10-2023-16-09.ndx', -'18-10-2023-16-09.pcm', -'18-10-2023-16-10.ndx', -'18-10-2023-16-10.pcm', -'18-10-2023-16-11.ndx', -'18-10-2023-16-11.pcm', -'18-10-2023-16-12.ndx', -'18-10-2023-16-12.pcm', -'18-10-2023-16-13.ndx', -'18-10-2023-16-13.pcm', -'18-10-2023-16-14.ndx', -'18-10-2023-16-14.pcm', -'18-10-2023-16-15.ndx', -'18-10-2023-16-15.pcm', -'18-10-2023-16-16.ndx', -'18-10-2023-16-16.pcm', -'18-10-2023-16-17.ndx', -'18-10-2023-16-17.pcm', -'18-10-2023-16-18.ndx', -'18-10-2023-16-18.pcm', -'18-10-2023-16-19.ndx', -'18-10-2023-16-19.pcm', -'18-10-2023-16-20.ndx', -'18-10-2023-16-20.pcm', -'18-10-2023-16-21.ndx', -'18-10-2023-16-21.pcm', -'18-10-2023-16-22.ndx', -'18-10-2023-16-22.pcm', -'18-10-2023-16-23.ndx', -'18-10-2023-16-23.pcm', -'18-10-2023-16-24.ndx', -'18-10-2023-16-24.pcm', -'18-10-2023-16-25.ndx', -'18-10-2023-16-25.pcm', -'18-10-2023-16-26.ndx', -'18-10-2023-16-26.pcm', -'18-10-2023-16-27.ndx', -'18-10-2023-16-27.pcm', -'18-10-2023-16-28.ndx', -'18-10-2023-16-28.pcm', -'18-10-2023-16-29.ndx', -'18-10-2023-16-29.pcm', -'18-10-2023-16-30.ndx', -'18-10-2023-16-30.pcm', -'18-10-2023-16-31.ndx', -'18-10-2023-16-31.pcm', -'18-10-2023-16-32.ndx', -'18-10-2023-16-32.pcm', -'18-10-2023-16-33.ndx', -'18-10-2023-16-33.pcm', -'18-10-2023-16-34.ndx', -'18-10-2023-16-34.pcm', -'18-10-2023-16-35.ndx', -'18-10-2023-16-35.pcm', -'18-10-2023-16-36.ndx', -'18-10-2023-16-36.pcm', -'18-10-2023-16-37.ndx', -'18-10-2023-16-37.pcm', -'18-10-2023-16-38.ndx', -'18-10-2023-16-38.pcm', -'18-10-2023-16-39.ndx', -'18-10-2023-16-39.pcm', -'18-10-2023-16-40.ndx', -'18-10-2023-16-40.pcm', -'18-10-2023-16-41.ndx', -'18-10-2023-16-41.pcm', -'18-10-2023-16-42.ndx', -'18-10-2023-16-42.pcm', -'18-10-2023-16-43.ndx', -'18-10-2023-16-43.pcm', -'18-10-2023-16-45.ndx', -'18-10-2023-16-45.pcm', -'18-10-2023-16-46.ndx', -'18-10-2023-16-46.pcm', -'18-10-2023-16-47.ndx', -'18-10-2023-16-47.pcm', -'18-10-2023-16-48.ndx', -'18-10-2023-16-48.pcm', -'18-10-2023-16-49.ndx', -'18-10-2023-16-49.pcm', -'18-10-2023-16-50.ndx', -'18-10-2023-16-50.pcm', -'18-10-2023-16-51.ndx', -'18-10-2023-16-51.pcm', -'18-10-2023-16-52.ndx', -'18-10-2023-16-52.pcm', -'18-10-2023-16-53.ndx', -'18-10-2023-16-53.pcm', -'18-10-2023-16-55.ndx', -'18-10-2023-16-55.pcm', -'18-10-2023-16-56.ndx', -'18-10-2023-16-56.pcm', -'18-10-2023-16-57.ndx', -'18-10-2023-16-57.pcm', -'18-10-2023-16-58.ndx', -'18-10-2023-16-58.pcm', -'18-10-2023-16-59.ndx', -'18-10-2023-16-59.pcm', -'18-10-2023-17-00.ndx', -'18-10-2023-17-00.pcm', -'18-10-2023-17-01.ndx', -'18-10-2023-17-01.pcm', -'18-10-2023-17-02.ndx', -'18-10-2023-17-02.pcm', -'18-10-2023-17-03.ndx', -'18-10-2023-17-03.pcm', -'18-10-2023-17-04.ndx', -'18-10-2023-17-04.pcm', -'18-10-2023-17-05.ndx', -'18-10-2023-17-05.pcm', -'18-10-2023-17-06.ndx', -'18-10-2023-17-06.pcm', -'18-10-2023-17-07.ndx', -'18-10-2023-17-07.pcm', -'18-10-2023-17-08.ndx', -'18-10-2023-17-08.pcm', -'18-10-2023-17-09.ndx', -'18-10-2023-17-09.pcm', -'18-10-2023-17-10.ndx', -'18-10-2023-17-10.pcm', -'18-10-2023-17-11.ndx', -'18-10-2023-17-11.pcm', -'18-10-2023-17-12.ndx', -'18-10-2023-17-12.pcm', -'18-10-2023-17-13.ndx', -'18-10-2023-17-13.pcm', -'18-10-2023-17-14.ndx', -'18-10-2023-17-14.pcm', -'18-10-2023-17-15.ndx', -'18-10-2023-17-15.pcm', -'18-10-2023-17-16.ndx', -'18-10-2023-17-16.pcm', -'18-10-2023-17-17.ndx', -'18-10-2023-17-17.pcm', -'18-10-2023-17-18.ndx', -'18-10-2023-17-18.pcm', -'18-10-2023-17-19.ndx', -'18-10-2023-17-19.pcm', -'18-10-2023-17-20.ndx', -'18-10-2023-17-20.pcm', -'18-10-2023-17-21.ndx', -'18-10-2023-17-21.pcm', -'18-10-2023-17-22.ndx', -'18-10-2023-17-22.pcm', -'18-10-2023-17-23.ndx', -'18-10-2023-17-23.pcm', -'18-10-2023-17-25.ndx', -'18-10-2023-17-25.pcm', -'18-10-2023-17-26.ndx', -'18-10-2023-17-26.pcm', -'18-10-2023-17-27.ndx', -'18-10-2023-17-27.pcm', -'18-10-2023-17-28.ndx', -'18-10-2023-17-28.pcm', -'18-10-2023-17-29.ndx', -'18-10-2023-17-29.pcm', -'18-10-2023-17-30.ndx', -'18-10-2023-17-30.pcm', -'18-10-2023-17-31.ndx', -'18-10-2023-17-31.pcm', -'18-10-2023-17-32.ndx', -'18-10-2023-17-32.pcm', -'18-10-2023-17-33.ndx', -'18-10-2023-17-33.pcm', -'18-10-2023-17-34.ndx', -'18-10-2023-17-34.pcm', -'18-10-2023-17-35.ndx', -'18-10-2023-17-35.pcm', -'18-10-2023-17-36.ndx', -'18-10-2023-17-36.pcm', -'18-10-2023-17-37.ndx', -'18-10-2023-17-37.pcm', -'18-10-2023-17-38.ndx', -'18-10-2023-17-38.pcm', -'18-10-2023-17-39.ndx', -'18-10-2023-17-39.pcm', -'18-10-2023-17-40.ndx', -'18-10-2023-17-40.pcm', -'18-10-2023-17-41.ndx', -'18-10-2023-17-41.pcm', -'18-10-2023-17-42.ndx', -'18-10-2023-17-42.pcm', -'18-10-2023-17-43.ndx', -'18-10-2023-17-43.pcm', -'18-10-2023-17-44.ndx', -'18-10-2023-17-44.pcm', -'18-10-2023-17-45.ndx', -'18-10-2023-17-45.pcm', -'18-10-2023-17-46.ndx', -'18-10-2023-17-46.pcm', -'18-10-2023-17-47.ndx', -'18-10-2023-17-47.pcm', -'18-10-2023-17-48.ndx', -'18-10-2023-17-48.pcm', -'18-10-2023-17-50.ndx', -'18-10-2023-17-50.pcm', -'18-10-2023-17-51.ndx', -'18-10-2023-17-51.pcm', -'18-10-2023-17-52.ndx', -'18-10-2023-17-52.pcm', -'18-10-2023-17-53.ndx', -'18-10-2023-17-53.pcm', -'18-10-2023-17-54.ndx', -'18-10-2023-17-54.pcm', -'18-10-2023-17-55.ndx', -'18-10-2023-17-55.pcm', -'18-10-2023-17-56.ndx', -'18-10-2023-17-56.pcm', -'18-10-2023-17-57.ndx', -'18-10-2023-17-57.pcm', -'18-10-2023-17-58.mjp', -'18-10-2023-17-58.ndx', -'18-10-2023-17-58.pcm', -'18-10-2023-20-01.ndx', -'18-10-2023-20-01.pcm', -'18-10-2023-20-02.ndx', -'18-10-2023-20-02.pcm', -'18-10-2023-20-03.ndx', -'18-10-2023-20-03.pcm', -'18-10-2023-20-04.ndx', -'18-10-2023-20-04.pcm', -'18-10-2023-20-05.ndx', -'18-10-2023-20-05.pcm', -'18-10-2023-20-06.ndx', -'18-10-2023-20-06.pcm', -'18-10-2023-20-07.ndx', -'18-10-2023-20-07.pcm', -'18-10-2023-20-08.ndx', -'18-10-2023-20-08.pcm', -'18-10-2023-20-09.ndx', -'18-10-2023-20-09.pcm', -'18-10-2023-20-10.ndx', -'18-10-2023-20-10.pcm', -'18-10-2023-20-11.ndx', -'18-10-2023-20-11.pcm', -'18-10-2023-20-12.ndx', -'18-10-2023-20-12.pcm', -'18-10-2023-20-13.ndx', -'18-10-2023-20-13.pcm', -'18-10-2023-20-14.ndx', -'18-10-2023-20-14.pcm', -'18-10-2023-20-15.ndx', -'18-10-2023-20-15.pcm', -'18-10-2023-20-16.ndx', -'18-10-2023-20-16.pcm', -'18-10-2023-20-17.ndx', -'18-10-2023-20-17.pcm', -'18-10-2023-20-18.ndx', -'18-10-2023-20-18.pcm', -'18-10-2023-20-19.ndx', -'18-10-2023-20-19.pcm', -'18-10-2023-20-20.ndx', -'18-10-2023-20-20.pcm', -'18-10-2023-20-21.ndx', -'18-10-2023-20-21.pcm', -'18-10-2023-20-22.ndx', -'18-10-2023-20-22.pcm', -'18-10-2023-20-23.ndx', -'18-10-2023-20-23.pcm', -'18-10-2023-20-24.ndx', -'18-10-2023-20-24.pcm', -'18-10-2023-20-25.ndx', -'18-10-2023-20-25.pcm', -'18-10-2023-20-26.ndx', -'18-10-2023-20-26.pcm', -'18-10-2023-20-27.ndx', -'18-10-2023-20-27.pcm', -'18-10-2023-20-28.ndx', -'18-10-2023-20-28.pcm', -'18-10-2023-20-29.ndx', -'18-10-2023-20-29.pcm', -'18-10-2023-20-30.ndx', -'18-10-2023-20-30.pcm', -'18-10-2023-20-31.ndx', -'18-10-2023-20-31.pcm', -'18-10-2023-20-32.ndx', -'18-10-2023-20-32.pcm', -'18-10-2023-20-33.ndx', -'18-10-2023-20-33.pcm', -'18-10-2023-20-34.ndx', -'18-10-2023-20-34.pcm', -'18-10-2023-20-35.ndx', -'18-10-2023-20-35.pcm', -'18-10-2023-20-36.ndx', -'18-10-2023-20-36.pcm', -'18-10-2023-20-37.ndx', -'18-10-2023-20-37.pcm', -'18-10-2023-20-38.ndx', -'18-10-2023-20-38.pcm', -'18-10-2023-20-39.ndx', -'18-10-2023-20-39.pcm', -'18-10-2023-20-40.ndx', -'18-10-2023-20-40.pcm', -'18-10-2023-20-41.ndx', -'18-10-2023-20-41.pcm', -'18-10-2023-20-42.ndx', -'18-10-2023-20-42.pcm', -'18-10-2023-20-43.ndx', -'18-10-2023-20-43.pcm', -'18-10-2023-20-44.ndx', -'18-10-2023-20-44.pcm', -'18-10-2023-20-45.ndx', -'18-10-2023-20-45.pcm', -'18-10-2023-20-46.ndx', -'18-10-2023-20-46.pcm', -'18-10-2023-20-47.ndx', -'18-10-2023-20-47.pcm', -'18-10-2023-20-48.ndx', -'18-10-2023-20-48.pcm', -'18-10-2023-20-49.ndx', -'18-10-2023-20-49.pcm', -'18-10-2023-20-50.ndx', -'18-10-2023-20-50.pcm', -'18-10-2023-20-51.ndx', -'18-10-2023-20-51.pcm', -'18-10-2023-20-52.ndx', -'18-10-2023-20-52.pcm', -'18-10-2023-20-53.ndx', -'18-10-2023-20-53.pcm', -'18-10-2023-20-54.ndx', -'18-10-2023-20-54.pcm', -'18-10-2023-20-55.ndx', -'18-10-2023-20-55.pcm', -'18-10-2023-20-56.ndx', -'18-10-2023-20-56.pcm', -'18-10-2023-20-57.ndx', -'18-10-2023-20-57.pcm', -'18-10-2023-20-58.ndx', -'18-10-2023-20-58.pcm', -'18-10-2023-20-59.ndx', -'18-10-2023-20-59.pcm', -'18-10-2023-21-00.ndx', -'18-10-2023-21-00.pcm', -'18-10-2023-21-01.ndx', -'18-10-2023-21-01.pcm', -'18-10-2023-21-02.ndx', -'18-10-2023-21-02.pcm', -'18-10-2023-21-03.ndx', -'18-10-2023-21-03.pcm', -'18-10-2023-21-04.ndx', -'18-10-2023-21-04.pcm', -'18-10-2023-21-05.ndx', -'18-10-2023-21-05.pcm', -'18-10-2023-21-06.ndx', -'18-10-2023-21-06.pcm', -'18-10-2023-21-07.ndx', -'18-10-2023-21-07.pcm', -'18-10-2023-21-08.ndx', -'18-10-2023-21-08.pcm', -'18-10-2023-21-10.ndx', -'18-10-2023-21-10.pcm', -'18-10-2023-21-11.ndx', -'18-10-2023-21-11.pcm', -'18-10-2023-21-12.ndx', -'18-10-2023-21-12.pcm', -'18-10-2023-21-13.ndx', -'18-10-2023-21-13.pcm', -'18-10-2023-21-14.ndx', -'18-10-2023-21-14.pcm', -'18-10-2023-21-15.ndx', -'18-10-2023-21-15.pcm', -'18-10-2023-21-16.ndx', -'18-10-2023-21-16.pcm', -'18-10-2023-21-17.ndx', -'18-10-2023-21-17.pcm', -'18-10-2023-21-18.ndx', -'18-10-2023-21-18.pcm', -'18-10-2023-21-19.ndx', -'18-10-2023-21-19.pcm', -'18-10-2023-21-20.ndx', -'18-10-2023-21-20.pcm', -'18-10-2023-21-21.ndx', -'18-10-2023-21-21.pcm', -'18-10-2023-21-22.ndx', -'18-10-2023-21-22.pcm', -'18-10-2023-21-23.ndx', -'18-10-2023-21-23.pcm', -'18-10-2023-21-24.ndx', -'18-10-2023-21-24.pcm', -'18-10-2023-21-25.ndx', -'18-10-2023-21-25.pcm', -'18-10-2023-21-26.ndx', -'18-10-2023-21-26.pcm', -'18-10-2023-21-27.ndx', -'18-10-2023-21-27.pcm', -'18-10-2023-21-28.ndx', -'18-10-2023-21-28.pcm', -'18-10-2023-21-30.ndx', -'18-10-2023-21-30.pcm', -'18-10-2023-21-31.ndx', -'18-10-2023-21-31.pcm', -'18-10-2023-21-32.ndx', -'18-10-2023-21-32.pcm', -'18-10-2023-21-33.ndx', -'18-10-2023-21-33.pcm', -'18-10-2023-21-34.ndx', -'18-10-2023-21-34.pcm', -'18-10-2023-21-35.ndx', -'18-10-2023-21-35.pcm', -'18-10-2023-21-36.ndx', -'18-10-2023-21-36.pcm', -'18-10-2023-21-37.ndx', -'18-10-2023-21-37.pcm', -'18-10-2023-21-38.ndx', -'18-10-2023-21-38.pcm', -'18-10-2023-21-39.ndx', -'18-10-2023-21-39.pcm', -'18-10-2023-21-40.ndx', -'18-10-2023-21-40.pcm', -'18-10-2023-21-41.ndx', -'18-10-2023-21-41.pcm', -'18-10-2023-21-42.ndx', -'18-10-2023-21-42.pcm', -'18-10-2023-21-43.ndx', -'18-10-2023-21-43.pcm', -'18-10-2023-21-44.mjp', -'18-10-2023-21-44.ndx', -'18-10-2023-21-44.pcm', -'18-10-2023-00-00.ndx', -'18-10-2023-00-00.pcm', -'18-10-2023-00-01.ndx', -'18-10-2023-00-01.pcm', -'18-10-2023-00-02.ndx', -'18-10-2023-00-02.pcm', -'18-10-2023-00-03.ndx', -'18-10-2023-00-03.pcm', -'18-10-2023-00-04.ndx', -'18-10-2023-00-04.pcm', -'18-10-2023-00-05.ndx', -'18-10-2023-00-05.pcm', -'18-10-2023-00-06.ndx', -'18-10-2023-00-06.pcm', -'18-10-2023-00-07.ndx', -'18-10-2023-00-07.pcm', -'18-10-2023-00-08.ndx', -'18-10-2023-00-08.pcm', -'18-10-2023-00-09.ndx', -'18-10-2023-00-09.pcm', -'18-10-2023-00-10.ndx', -'18-10-2023-00-10.pcm', -'18-10-2023-00-11.ndx', -'18-10-2023-00-11.pcm', -'18-10-2023-00-12.ndx', -'18-10-2023-00-12.pcm', -'18-10-2023-00-13.ndx', -'18-10-2023-00-13.pcm', -'18-10-2023-00-14.ndx', -'18-10-2023-00-14.pcm', -'18-10-2023-00-15.ndx', -'18-10-2023-00-15.pcm', -'18-10-2023-00-16.ndx', -'18-10-2023-00-16.pcm', -'18-10-2023-00-17.ndx', -'18-10-2023-00-17.pcm', -'18-10-2023-00-18.ndx', -'18-10-2023-00-18.pcm', -'18-10-2023-00-19.ndx', -'18-10-2023-00-19.pcm', -'18-10-2023-00-20.ndx', -'18-10-2023-00-20.pcm', -'18-10-2023-00-21.ndx', -'18-10-2023-00-21.pcm', -'18-10-2023-00-22.ndx', -'18-10-2023-00-22.pcm', -'18-10-2023-00-23.ndx', -'18-10-2023-00-23.pcm', -'18-10-2023-00-24.ndx', -'18-10-2023-00-24.pcm', -'18-10-2023-00-25.ndx', -'18-10-2023-00-25.pcm', -'18-10-2023-00-26.ndx', -'18-10-2023-00-26.pcm', -'18-10-2023-00-27.ndx', -'18-10-2023-00-27.pcm', -'18-10-2023-00-28.ndx', -'18-10-2023-00-28.pcm', -'18-10-2023-00-29.ndx', -'18-10-2023-00-29.pcm', -'18-10-2023-00-30.ndx', -'18-10-2023-00-30.pcm', -'18-10-2023-00-31.ndx', -'18-10-2023-00-31.pcm', -'18-10-2023-00-32.ndx', -'18-10-2023-00-32.pcm', -'18-10-2023-00-33.ndx', -'18-10-2023-00-33.pcm', -'18-10-2023-00-34.ndx', -'18-10-2023-00-34.pcm', -'18-10-2023-00-35.ndx', -'18-10-2023-00-35.pcm', -'18-10-2023-00-36.ndx', -'18-10-2023-00-36.pcm', -'18-10-2023-00-37.ndx', -'18-10-2023-00-37.pcm', -'18-10-2023-00-38.ndx', -'18-10-2023-00-38.pcm', -'18-10-2023-00-39.ndx', -'18-10-2023-00-39.pcm', -'18-10-2023-00-40.ndx', -'18-10-2023-00-40.pcm', -'18-10-2023-00-41.ndx', -'18-10-2023-00-41.pcm', -'18-10-2023-00-42.ndx', -'18-10-2023-00-42.pcm', -'18-10-2023-00-43.ndx', -'18-10-2023-00-43.pcm', -'18-10-2023-00-44.ndx', -'18-10-2023-00-44.pcm', -'18-10-2023-00-45.ndx', -'18-10-2023-00-45.pcm', -'18-10-2023-00-46.ndx', -'18-10-2023-00-46.pcm', -'18-10-2023-00-47.ndx', -'18-10-2023-00-47.pcm', -'18-10-2023-00-48.ndx', -'18-10-2023-00-48.pcm', -'18-10-2023-00-49.ndx', -'18-10-2023-00-49.pcm', -'18-10-2023-00-50.ndx', -'18-10-2023-00-50.pcm', -'18-10-2023-00-51.ndx', -'18-10-2023-00-51.pcm', -'18-10-2023-00-52.ndx', -'18-10-2023-00-52.pcm', -'18-10-2023-00-53.ndx', -'18-10-2023-00-53.pcm', -'18-10-2023-00-54.ndx', -'18-10-2023-00-54.pcm', -'18-10-2023-00-55.ndx', -'18-10-2023-00-55.pcm', -'18-10-2023-00-56.ndx', -'18-10-2023-00-56.pcm', -'18-10-2023-00-57.ndx', -'18-10-2023-00-57.pcm', -'18-10-2023-00-58.ndx', -'18-10-2023-00-58.pcm', -'18-10-2023-01-00.ndx', -'18-10-2023-01-00.pcm', -'18-10-2023-01-01.ndx', -'18-10-2023-01-01.pcm', -'18-10-2023-01-02.ndx', -'18-10-2023-01-02.pcm', -'18-10-2023-01-03.ndx', -'18-10-2023-01-03.pcm', -'18-10-2023-01-04.ndx', -'18-10-2023-01-04.pcm', -'18-10-2023-01-05.ndx', -'18-10-2023-01-05.pcm', -'18-10-2023-01-06.ndx', -'18-10-2023-01-06.pcm', -'18-10-2023-01-07.ndx', -'18-10-2023-01-07.pcm', -'18-10-2023-01-08.ndx', -'18-10-2023-01-08.pcm', -'18-10-2023-01-09.ndx', -'18-10-2023-01-09.pcm', -'18-10-2023-01-10.ndx', -'18-10-2023-01-10.pcm', -'18-10-2023-01-11.ndx', -'18-10-2023-01-11.pcm', -'18-10-2023-01-12.ndx', -'18-10-2023-01-12.pcm', -'18-10-2023-01-13.ndx', -'18-10-2023-01-13.pcm', -'18-10-2023-01-14.ndx', -'18-10-2023-01-14.pcm', -'18-10-2023-01-15.ndx', -'18-10-2023-01-15.pcm', -'18-10-2023-01-16.ndx', -'18-10-2023-01-16.pcm', -'18-10-2023-01-17.ndx', -'18-10-2023-01-17.pcm', -'18-10-2023-01-18.ndx', -'18-10-2023-01-18.pcm', -'18-10-2023-01-19.mjp', -'18-10-2023-01-19.ndx', -'18-10-2023-01-19.pcm', -'18-10-2023-11-04.ndx', -'18-10-2023-11-04.pcm', -'18-10-2023-11-05.avi.done', -'18-10-2023-11-05.gpg', -'18-10-2023-11-05.mp4.done', -'18-10-2023-11-10.avi.done', -'18-10-2023-11-10.gpg', -'18-10-2023-11-10.mp4.done', -'18-10-2023-11-15.avi.done', -'18-10-2023-11-15.gpg', -'18-10-2023-11-15.mp4.done', -'18-10-2023-11-20.avi.done', -'18-10-2023-11-20.gpg', -'18-10-2023-11-20.mp4.done', -'18-10-2023-11-25.avi.done', -'18-10-2023-11-25.gpg', -'18-10-2023-11-25.mp4.done', -'18-10-2023-11-30.avi.done', -'18-10-2023-11-30.gpg', -'18-10-2023-11-30.mp4.done', -'18-10-2023-11-35.avi.done', -'18-10-2023-11-35.gpg', -'18-10-2023-11-35.mp4.done', -'18-10-2023-11-40.mjp', -'18-10-2023-11-40.ndx', -'18-10-2023-11-40.pcm', -'18-10-2023-11-41.mjp', -'18-10-2023-11-41.ndx', -'18-10-2023-11-41.pcm', -'18-10-2023-11-42.mjp', -'18-10-2023-11-42.ndx', -'18-10-2023-11-42.pcm', -'18-10-2023-11-43.mjp', -'18-10-2023-11-43.ndx', -'18-10-2023-11-43.pcm', -'18-10-2023-11-45.avi.done', -'18-10-2023-11-45.gpg', -'18-10-2023-11-45.mp4.done', -'18-10-2023-11-46.ndx', -'18-10-2023-11-46.pcm', -'18-10-2023-11-47.ndx', -'18-10-2023-11-47.pcm', -'18-10-2023-11-50.avi.done', -'18-10-2023-11-50.gpg', -'18-10-2023-11-50.mp4.done', -'18-10-2023-11-55.avi.done', -'18-10-2023-11-55.gpg', -'18-10-2023-11-55.mp4.done', -'18-10-2023-12-00.avi.done', -'18-10-2023-12-00.gpg', -'18-10-2023-12-00.mp4.done', -'18-10-2023-12-05.avi.done', -'18-10-2023-12-05.gpg', -'18-10-2023-12-05.mp4.done', -'18-10-2023-12-10.avi.done', -'18-10-2023-12-10.gpg', -'18-10-2023-12-10.mp4.done', -'18-10-2023-12-15.avi.done', -'18-10-2023-12-15.gpg', -'18-10-2023-12-15.mp4.done', -'18-10-2023-12-20.avi.done', -'18-10-2023-12-20.gpg', -'18-10-2023-12-20.mp4.done', -'18-10-2023-12-25.avi.done', -'18-10-2023-12-25.gpg', -'18-10-2023-12-25.mp4.done', -'18-10-2023-12-30.avi.done', -'18-10-2023-12-30.gpg', -'18-10-2023-12-30.mp4.done', -'18-10-2023-12-35.avi.done', -'18-10-2023-12-35.gpg', -'18-10-2023-12-35.mp4.done', -'18-10-2023-12-40.avi.done', -'18-10-2023-12-40.gpg', -'18-10-2023-12-40.mp4.done', -'18-10-2023-12-45.avi.done', -'18-10-2023-12-45.gpg', -'18-10-2023-12-45.mp4.done', -'18-10-2023-12-50.avi.done', -'18-10-2023-12-50.gpg', -'18-10-2023-12-50.mp4.done', -'18-10-2023-12-55.avi.done', -'18-10-2023-12-55.gpg', -'18-10-2023-12-55.mp4.done', -'18-10-2023-13-00.avi.done', -'18-10-2023-13-00.gpg', -'18-10-2023-13-00.mp4.done', -'18-10-2023-13-05.avi.done', -'18-10-2023-13-05.gpg', -'18-10-2023-13-05.mp4.done', -'18-10-2023-13-10.avi.done', -'18-10-2023-13-10.gpg', -'18-10-2023-13-10.mp4.done', -'18-10-2023-13-15.avi.done', -'18-10-2023-13-15.gpg', -'18-10-2023-13-15.mp4.done', -'18-10-2023-13-20.avi.done', -'18-10-2023-13-20.gpg', -'18-10-2023-13-20.mp4.done', -'18-10-2023-13-25.avi.done', -'18-10-2023-13-25.gpg', -'18-10-2023-13-25.mp4.done', -'18-10-2023-13-30.avi.done', -'18-10-2023-13-30.gpg', -'18-10-2023-13-30.mp4.done', -'18-10-2023-13-35.avi.done', -'18-10-2023-13-35.gpg', -'18-10-2023-13-35.mp4.done', -'18-10-2023-13-40.avi.done', -'18-10-2023-13-40.gpg', -'18-10-2023-13-40.mp4.done', -'18-10-2023-13-45.avi.done', -'18-10-2023-13-45.gpg', -'18-10-2023-13-45.mp4.done', -'18-10-2023-13-50.avi.done', -'18-10-2023-13-50.gpg', -'18-10-2023-13-50.mp4.done', -'18-10-2023-13-55.avi.done', -'18-10-2023-13-55.gpg', -'18-10-2023-13-55.mp4.done', -'18-10-2023-14-00.avi.done', -'18-10-2023-14-00.gpg', -'18-10-2023-14-00.mp4.done', -'18-10-2023-14-05.avi.done', -'18-10-2023-14-05.gpg', -'18-10-2023-14-05.mp4.done', -'18-10-2023-14-10.avi.done', -'18-10-2023-14-10.gpg', -'18-10-2023-14-10.mp4.done', -'18-10-2023-14-15.avi.done', -'18-10-2023-14-15.gpg', -'18-10-2023-14-15.mp4.done', -'18-10-2023-14-20.avi.done', -'18-10-2023-14-20.gpg', -'18-10-2023-14-20.mp4.done', -'18-10-2023-14-25.avi.done', -'18-10-2023-14-25.gpg', -'18-10-2023-14-25.mp4.done', -'18-10-2023-14-30.avi.done', -'18-10-2023-14-30.gpg', -'18-10-2023-14-30.mp4.done', -'18-10-2023-14-35.avi.done', -'18-10-2023-14-35.gpg', -'18-10-2023-14-35.mp4.done', -'18-10-2023-14-40.avi.done', -'18-10-2023-14-40.gpg', -'18-10-2023-14-40.mp4.done', -'18-10-2023-14-41.ndx', -'18-10-2023-14-41.pcm', -'18-10-2023-14-42.ndx', -'18-10-2023-14-42.pcm', -'18-10-2023-14-43.ndx', -'18-10-2023-14-43.pcm', -'18-10-2023-14-44.ndx', -'18-10-2023-14-44.pcm', -'18-10-2023-14-45.ndx', -'18-10-2023-14-45.pcm', -'18-10-2023-14-46.ndx', -'18-10-2023-14-46.pcm', -'18-10-2023-14-47.ndx', -'18-10-2023-14-47.pcm', -'18-10-2023-14-48.ndx', -'18-10-2023-14-48.pcm', -'18-10-2023-14-49.ndx', -'18-10-2023-14-49.pcm', -'18-10-2023-14-50.ndx', -'18-10-2023-14-50.pcm', -'18-10-2023-14-51.ndx', -'18-10-2023-14-51.pcm', -'18-10-2023-14-52.ndx', -'18-10-2023-14-52.pcm', -'18-10-2023-14-53.ndx', -'18-10-2023-14-53.pcm', -'18-10-2023-14-54.ndx', -'18-10-2023-14-54.pcm', -'18-10-2023-14-55.ndx', -'18-10-2023-14-55.pcm', -'18-10-2023-14-56.ndx', -'18-10-2023-14-56.pcm', -'18-10-2023-14-57.ndx', -'18-10-2023-14-57.pcm', -'18-10-2023-14-58.ndx', -'18-10-2023-14-58.pcm', -'18-10-2023-14-59.ndx', -'18-10-2023-14-59.pcm', -'18-10-2023-15-00.ndx', -'18-10-2023-15-00.pcm', -'18-10-2023-15-01.ndx', -'18-10-2023-15-01.pcm', -'18-10-2023-15-02.ndx', -'18-10-2023-15-02.pcm', -'18-10-2023-15-03.ndx', -'18-10-2023-15-03.pcm', -'18-10-2023-15-04.ndx', -'18-10-2023-15-04.pcm', -'18-10-2023-15-05.ndx', -'18-10-2023-15-05.pcm', -'18-10-2023-15-06.ndx', -'18-10-2023-15-06.pcm', -'18-10-2023-15-07.ndx', -'18-10-2023-15-07.pcm', -'18-10-2023-15-08.ndx', -'18-10-2023-15-08.pcm', -'18-10-2023-15-09.ndx', -'18-10-2023-15-09.pcm', -'18-10-2023-15-10.ndx', -'18-10-2023-15-10.pcm', -'18-10-2023-15-11.ndx', -'18-10-2023-15-11.pcm', -'18-10-2023-15-12.ndx', -'18-10-2023-15-12.pcm', -'18-10-2023-15-13.ndx', -'18-10-2023-15-13.pcm', -'18-10-2023-15-14.ndx', -'18-10-2023-15-14.pcm', -'18-10-2023-15-15.ndx', -'18-10-2023-15-15.pcm', -'18-10-2023-15-16.ndx', -'18-10-2023-15-16.pcm', -'18-10-2023-15-17.ndx', -'18-10-2023-15-17.pcm', -'18-10-2023-15-18.ndx', -'18-10-2023-15-18.pcm', -'18-10-2023-15-19.ndx', -'18-10-2023-15-19.pcm', -'18-10-2023-15-20.ndx', -'18-10-2023-15-20.pcm', -'18-10-2023-15-21.ndx', -'18-10-2023-15-21.pcm', -'18-10-2023-15-22.ndx', -'18-10-2023-15-22.pcm', -'18-10-2023-15-23.ndx', -'18-10-2023-15-23.pcm', -'18-10-2023-15-24.ndx', -'18-10-2023-15-24.pcm', -'18-10-2023-15-25.ndx', -'18-10-2023-15-25.pcm', -'18-10-2023-15-26.ndx', -'18-10-2023-15-26.pcm', -'18-10-2023-15-27.ndx', -'18-10-2023-15-27.pcm', -'18-10-2023-15-28.ndx', -'18-10-2023-15-28.pcm', -'18-10-2023-15-29.ndx', -'18-10-2023-15-29.pcm', -'18-10-2023-15-30.ndx', -'18-10-2023-15-30.pcm', -'18-10-2023-15-31.ndx', -'18-10-2023-15-31.pcm', -'18-10-2023-15-32.ndx', -'18-10-2023-15-32.pcm', -'18-10-2023-15-33.ndx', -'18-10-2023-15-33.pcm', -'18-10-2023-15-34.ndx', -'18-10-2023-15-34.pcm', -'18-10-2023-15-35.ndx', -'18-10-2023-15-35.pcm', -'18-10-2023-15-36.ndx', -'18-10-2023-15-36.pcm', -'18-10-2023-15-37.ndx', -'18-10-2023-15-37.pcm', -'18-10-2023-15-38.ndx', -'18-10-2023-15-38.pcm', -'18-10-2023-15-39.ndx', -'18-10-2023-15-39.pcm', -'18-10-2023-15-40.ndx', -'18-10-2023-15-40.pcm', -'18-10-2023-15-41.ndx', -'18-10-2023-15-41.pcm', -'18-10-2023-15-42.ndx', -'18-10-2023-15-42.pcm', -'18-10-2023-15-43.ndx', -'18-10-2023-15-43.pcm', -'18-10-2023-15-44.ndx', -'18-10-2023-15-44.pcm', -'18-10-2023-15-45.ndx', -'18-10-2023-15-45.pcm', -'18-10-2023-15-46.ndx', -'18-10-2023-15-46.pcm', -'18-10-2023-15-47.ndx', -'18-10-2023-15-47.pcm', -'18-10-2023-15-48.ndx', -'18-10-2023-15-48.pcm', -'18-10-2023-15-49.ndx', -'18-10-2023-15-49.pcm', -'18-10-2023-15-50.ndx', -'18-10-2023-15-50.pcm', -'18-10-2023-15-51.ndx', -'18-10-2023-15-51.pcm', -'18-10-2023-15-52.ndx', -'18-10-2023-15-52.pcm', -'18-10-2023-15-53.ndx', -'18-10-2023-15-53.pcm', -'18-10-2023-15-54.ndx', -'18-10-2023-15-54.pcm', -'18-10-2023-15-55.ndx', -'18-10-2023-15-55.pcm', -'18-10-2023-15-56.ndx', -'18-10-2023-15-56.pcm', -'18-10-2023-15-57.ndx', -'18-10-2023-15-57.pcm', -'18-10-2023-15-58.ndx', -'18-10-2023-15-58.pcm', -'18-10-2023-15-59.ndx', -'18-10-2023-15-59.pcm', -'18-10-2023-16-00.ndx', -'18-10-2023-16-00.pcm', -'18-10-2023-16-01.ndx', -'18-10-2023-16-01.pcm', -'18-10-2023-16-02.ndx', -'18-10-2023-16-02.pcm', -'18-10-2023-16-03.ndx', -'18-10-2023-16-03.pcm', -'18-10-2023-16-04.ndx', -'18-10-2023-16-04.pcm', -'18-10-2023-16-05.ndx', -'18-10-2023-16-05.pcm', -'18-10-2023-16-06.ndx', -'18-10-2023-16-06.pcm', -'18-10-2023-16-07.ndx', -'18-10-2023-16-07.pcm', -'18-10-2023-16-08.ndx', -'18-10-2023-16-08.pcm', -'18-10-2023-16-09.ndx', -'18-10-2023-16-09.pcm', -'18-10-2023-16-10.ndx', -'18-10-2023-16-10.pcm', -'18-10-2023-16-11.ndx', -'18-10-2023-16-11.pcm', -'18-10-2023-16-12.ndx', -'18-10-2023-16-12.pcm', -'18-10-2023-16-13.ndx', -'18-10-2023-16-13.pcm', -'18-10-2023-16-14.ndx', -'18-10-2023-16-14.pcm', -'18-10-2023-16-15.ndx', -'18-10-2023-16-15.pcm', -'18-10-2023-16-16.ndx', -'18-10-2023-16-16.pcm', -'18-10-2023-16-17.ndx', -'18-10-2023-16-17.pcm', -'18-10-2023-16-18.ndx', -'18-10-2023-16-18.pcm', -'18-10-2023-16-19.ndx', -'18-10-2023-16-19.pcm', -'18-10-2023-16-20.ndx', -'18-10-2023-16-20.pcm', -'18-10-2023-16-21.ndx', -'18-10-2023-16-21.pcm', -'18-10-2023-16-22.ndx', -'18-10-2023-16-22.pcm', -'18-10-2023-16-23.ndx', -'18-10-2023-16-23.pcm', -'18-10-2023-16-24.ndx', -'18-10-2023-16-24.pcm', -'18-10-2023-16-25.ndx', -'18-10-2023-16-25.pcm', -'18-10-2023-16-26.ndx', -'18-10-2023-16-26.pcm', -'18-10-2023-16-27.ndx', -'18-10-2023-16-27.pcm', -'18-10-2023-16-28.ndx', -'18-10-2023-16-28.pcm', -'18-10-2023-16-29.ndx', -'18-10-2023-16-29.pcm', -'18-10-2023-16-30.ndx', -'18-10-2023-16-30.pcm', -'18-10-2023-16-31.ndx', -'18-10-2023-16-31.pcm', -'18-10-2023-16-32.ndx', -'18-10-2023-16-32.pcm', -'18-10-2023-16-33.ndx', -'18-10-2023-16-33.pcm', -'18-10-2023-16-34.ndx', -'18-10-2023-16-34.pcm', -'18-10-2023-16-35.ndx', -'18-10-2023-16-35.pcm', -'18-10-2023-16-36.ndx', -'18-10-2023-16-36.pcm', -'18-10-2023-16-37.ndx', -'18-10-2023-16-37.pcm', -'18-10-2023-16-38.ndx', -'18-10-2023-16-38.pcm', -'18-10-2023-16-40.ndx', -'18-10-2023-16-40.pcm', -'18-10-2023-16-41.ndx', -'18-10-2023-16-41.pcm', -'18-10-2023-16-42.ndx', -'18-10-2023-16-42.pcm', -'18-10-2023-16-43.ndx', -'18-10-2023-16-43.pcm', -'18-10-2023-16-44.ndx', -'18-10-2023-16-44.pcm', -'18-10-2023-16-45.ndx', -'18-10-2023-16-45.pcm', -'18-10-2023-16-46.ndx', -'18-10-2023-16-46.pcm', -'18-10-2023-16-47.ndx', -'18-10-2023-16-47.pcm', -'18-10-2023-16-48.ndx', -'18-10-2023-16-48.pcm', -'18-10-2023-16-49.ndx', -'18-10-2023-16-49.pcm', -'18-10-2023-16-50.ndx', -'18-10-2023-16-50.pcm', -'18-10-2023-16-51.ndx', -'18-10-2023-16-51.pcm', -'18-10-2023-16-52.ndx', -'18-10-2023-16-52.pcm', -'18-10-2023-16-53.ndx', -'18-10-2023-16-53.pcm', -'18-10-2023-16-54.ndx', -'18-10-2023-16-54.pcm', -'18-10-2023-16-55.ndx', -'18-10-2023-16-55.pcm', -'18-10-2023-16-56.ndx', -'18-10-2023-16-56.pcm', -'18-10-2023-16-57.ndx', -'18-10-2023-16-57.pcm', -'18-10-2023-16-58.ndx', -'18-10-2023-16-58.pcm', -'18-10-2023-16-59.ndx', -'18-10-2023-16-59.pcm', -'18-10-2023-17-00.ndx', -'18-10-2023-17-00.pcm', -'18-10-2023-17-01.ndx', -'18-10-2023-17-01.pcm', -'18-10-2023-17-02.ndx', -'18-10-2023-17-02.pcm', -'18-10-2023-17-03.ndx', -'18-10-2023-17-03.pcm', -'18-10-2023-17-04.ndx', -'18-10-2023-17-04.pcm', -'18-10-2023-17-05.ndx', -'18-10-2023-17-05.pcm', -'18-10-2023-17-06.ndx', -'18-10-2023-17-06.pcm', -'18-10-2023-17-07.ndx', -'18-10-2023-17-07.pcm', -'18-10-2023-17-08.ndx', -'18-10-2023-17-08.pcm', -'18-10-2023-17-09.ndx', -'18-10-2023-17-09.pcm', -'18-10-2023-17-10.ndx', -'18-10-2023-17-10.pcm', -'18-10-2023-17-11.ndx', -'18-10-2023-17-11.pcm', -'18-10-2023-17-12.ndx', -'18-10-2023-17-12.pcm', -'18-10-2023-17-13.ndx', -'18-10-2023-17-13.pcm', -'18-10-2023-17-14.ndx', -'18-10-2023-17-14.pcm', -'18-10-2023-17-15.ndx', -'18-10-2023-17-15.pcm', -'18-10-2023-17-16.ndx', -'18-10-2023-17-16.pcm', -'18-10-2023-17-17.ndx', -'18-10-2023-17-17.pcm', -'18-10-2023-17-18.ndx', -'18-10-2023-17-18.pcm', -'18-10-2023-17-19.ndx', -'18-10-2023-17-19.pcm', -'18-10-2023-17-20.ndx', -'18-10-2023-17-20.pcm', -'18-10-2023-17-21.ndx', -'18-10-2023-17-21.pcm', -'18-10-2023-17-22.ndx', -'18-10-2023-17-22.pcm', -'18-10-2023-17-23.ndx', -'18-10-2023-17-23.pcm', -'18-10-2023-17-24.ndx', -'18-10-2023-17-24.pcm', -'18-10-2023-17-25.ndx', -'18-10-2023-17-25.pcm', -'18-10-2023-17-26.ndx', -'18-10-2023-17-26.pcm', -'18-10-2023-17-27.ndx', -'18-10-2023-17-27.pcm', -'18-10-2023-17-28.ndx', -'18-10-2023-17-28.pcm', -'18-10-2023-17-29.ndx', -'18-10-2023-17-29.pcm', -'18-10-2023-17-30.ndx', -'18-10-2023-17-30.pcm', -'18-10-2023-17-31.ndx', -'18-10-2023-17-31.pcm', -'18-10-2023-17-32.ndx', -'18-10-2023-17-32.pcm', -'18-10-2023-17-33.ndx', -'18-10-2023-17-33.pcm', -'18-10-2023-17-34.ndx', -'18-10-2023-17-34.pcm', -'18-10-2023-17-35.ndx', -'18-10-2023-17-35.pcm', -'18-10-2023-17-36.ndx', -'18-10-2023-17-36.pcm', -'18-10-2023-17-37.ndx', -'18-10-2023-17-37.pcm', -'18-10-2023-17-38.ndx', -'18-10-2023-17-38.pcm', -'18-10-2023-17-39.ndx', -'18-10-2023-17-39.pcm', -'18-10-2023-17-40.ndx', -'18-10-2023-17-40.pcm', -'18-10-2023-17-41.ndx', -'18-10-2023-17-41.pcm', -'18-10-2023-17-42.ndx', -'18-10-2023-17-42.pcm', -'18-10-2023-17-43.ndx', -'18-10-2023-17-43.pcm', -'18-10-2023-17-44.ndx', -'18-10-2023-17-44.pcm', -'18-10-2023-17-45.ndx', -'18-10-2023-17-45.pcm', -'18-10-2023-17-46.ndx', -'18-10-2023-17-46.pcm', -'18-10-2023-17-47.ndx', -'18-10-2023-17-47.pcm', -'18-10-2023-17-48.ndx', -'18-10-2023-17-48.pcm', -'18-10-2023-17-49.ndx', -'18-10-2023-17-49.pcm', -'18-10-2023-17-50.ndx', -'18-10-2023-17-50.pcm', -'18-10-2023-17-51.ndx', -'18-10-2023-17-51.pcm', -'18-10-2023-17-52.ndx', -'18-10-2023-17-52.pcm', -'18-10-2023-17-53.ndx', -'18-10-2023-17-53.pcm', -'18-10-2023-17-54.ndx', -'18-10-2023-17-54.pcm', -'18-10-2023-17-55.ndx', -'18-10-2023-17-55.pcm', -'18-10-2023-17-56.ndx', -'18-10-2023-17-56.pcm', -'18-10-2023-17-57.ndx', -'18-10-2023-17-57.pcm', -'18-10-2023-17-58.mjp', -'18-10-2023-17-58.ndx', -'18-10-2023-17-58.pcm', -'18-10-2023-20-01.ndx', -'18-10-2023-20-01.pcm', -'18-10-2023-20-02.ndx', -'18-10-2023-20-02.pcm', -'18-10-2023-20-03.ndx', -'18-10-2023-20-03.pcm', -'18-10-2023-20-04.ndx', -'18-10-2023-20-04.pcm', -'18-10-2023-20-05.ndx', -'18-10-2023-20-05.pcm', -'18-10-2023-20-06.ndx', -'18-10-2023-20-06.pcm', -'18-10-2023-20-07.ndx', -'18-10-2023-20-07.pcm', -'18-10-2023-20-08.ndx', -'18-10-2023-20-08.pcm', -'18-10-2023-20-09.ndx', -'18-10-2023-20-09.pcm', -'18-10-2023-20-10.ndx', -'18-10-2023-20-10.pcm', -'18-10-2023-20-11.ndx', -'18-10-2023-20-11.pcm', -'18-10-2023-20-12.ndx', -'18-10-2023-20-12.pcm', -'18-10-2023-20-13.ndx', -'18-10-2023-20-13.pcm', -'18-10-2023-20-14.ndx', -'18-10-2023-20-14.pcm', -'18-10-2023-20-15.ndx', -'18-10-2023-20-15.pcm', -'18-10-2023-20-16.ndx', -'18-10-2023-20-16.pcm', -'18-10-2023-20-17.ndx', -'18-10-2023-20-17.pcm', -'18-10-2023-20-18.ndx', -'18-10-2023-20-18.pcm', -'18-10-2023-20-19.ndx', -'18-10-2023-20-19.pcm', -'18-10-2023-20-20.ndx', -'18-10-2023-20-20.pcm', -'18-10-2023-20-21.ndx', -'18-10-2023-20-21.pcm', -'18-10-2023-20-22.ndx', -'18-10-2023-20-22.pcm', -'18-10-2023-20-23.ndx', -'18-10-2023-20-23.pcm', -'18-10-2023-20-24.ndx', -'18-10-2023-20-24.pcm', -'18-10-2023-20-25.ndx', -'18-10-2023-20-25.pcm', -'18-10-2023-20-26.ndx', -'18-10-2023-20-26.pcm', -'18-10-2023-20-27.ndx', -'18-10-2023-20-27.pcm', -'18-10-2023-20-28.ndx', -'18-10-2023-20-28.pcm', -'18-10-2023-20-29.ndx', -'18-10-2023-20-29.pcm', -'18-10-2023-20-30.ndx', -'18-10-2023-20-30.pcm', -'18-10-2023-20-31.ndx', -'18-10-2023-20-31.pcm', -'18-10-2023-20-32.ndx', -'18-10-2023-20-32.pcm', -'18-10-2023-20-33.ndx', -'18-10-2023-20-33.pcm', -'18-10-2023-20-34.ndx', -'18-10-2023-20-34.pcm', -'18-10-2023-20-35.ndx', -'18-10-2023-20-35.pcm', -'18-10-2023-20-36.ndx', -'18-10-2023-20-36.pcm', -'18-10-2023-20-37.ndx', -'18-10-2023-20-37.pcm', -'18-10-2023-20-38.ndx', -'18-10-2023-20-38.pcm', -'18-10-2023-20-39.ndx', -'18-10-2023-20-39.pcm', -'18-10-2023-20-40.ndx', -'18-10-2023-20-40.pcm', -'18-10-2023-20-41.ndx', -'18-10-2023-20-41.pcm', -'18-10-2023-20-42.ndx', -'18-10-2023-20-42.pcm', -'18-10-2023-20-43.ndx', -'18-10-2023-20-43.pcm', -'18-10-2023-20-44.ndx', -'18-10-2023-20-44.pcm', -'18-10-2023-20-45.ndx', -'18-10-2023-20-45.pcm', -'18-10-2023-20-46.ndx', -'18-10-2023-20-46.pcm', -'18-10-2023-20-47.ndx', -'18-10-2023-20-47.pcm', -'18-10-2023-20-48.ndx', -'18-10-2023-20-48.pcm', -'18-10-2023-20-49.ndx', -'18-10-2023-20-49.pcm', -'18-10-2023-20-50.ndx', -'18-10-2023-20-50.pcm', -'18-10-2023-20-51.ndx', -'18-10-2023-20-51.pcm', -'18-10-2023-20-52.ndx', -'18-10-2023-20-52.pcm', -'18-10-2023-20-53.ndx', -'18-10-2023-20-53.pcm', -'18-10-2023-20-54.ndx', -'18-10-2023-20-54.pcm', -'18-10-2023-20-55.ndx', -'18-10-2023-20-55.pcm', -'18-10-2023-20-56.ndx', -'18-10-2023-20-56.pcm', -'18-10-2023-20-57.ndx', -'18-10-2023-20-57.pcm', -'18-10-2023-20-58.ndx', -'18-10-2023-20-58.pcm', -'18-10-2023-20-59.ndx', -'18-10-2023-20-59.pcm', -'18-10-2023-21-00.ndx', -'18-10-2023-21-00.pcm', -'18-10-2023-21-01.ndx', -'18-10-2023-21-01.pcm', -'18-10-2023-21-02.ndx', -'18-10-2023-21-02.pcm', -'18-10-2023-21-03.ndx', -'18-10-2023-21-03.pcm', -'18-10-2023-21-04.ndx', -'18-10-2023-21-04.pcm', -'18-10-2023-21-05.ndx', -'18-10-2023-21-05.pcm', -'18-10-2023-21-06.ndx', -'18-10-2023-21-06.pcm', -'18-10-2023-21-07.ndx', -'18-10-2023-21-07.pcm', -'18-10-2023-21-08.ndx', -'18-10-2023-21-08.pcm', -'18-10-2023-21-09.ndx', -'18-10-2023-21-09.pcm', -'18-10-2023-21-10.ndx', -'18-10-2023-21-10.pcm', -'18-10-2023-21-11.ndx', -'18-10-2023-21-11.pcm', -'18-10-2023-21-12.ndx', -'18-10-2023-21-12.pcm', -'18-10-2023-21-13.ndx', -'18-10-2023-21-13.pcm', -'18-10-2023-21-14.ndx', -'18-10-2023-21-14.pcm', -'18-10-2023-21-15.ndx', -'18-10-2023-21-15.pcm', -'18-10-2023-21-16.ndx', -'18-10-2023-21-16.pcm', -'18-10-2023-21-17.ndx', -'18-10-2023-21-17.pcm', -'18-10-2023-21-18.ndx', -'18-10-2023-21-18.pcm', -'18-10-2023-21-19.ndx', -'18-10-2023-21-19.pcm', -'18-10-2023-21-20.ndx', -'18-10-2023-21-20.pcm', -'18-10-2023-21-21.ndx', -'18-10-2023-21-21.pcm', -'18-10-2023-21-22.ndx', -'18-10-2023-21-22.pcm', -'18-10-2023-21-23.ndx', -'18-10-2023-21-23.pcm', -'18-10-2023-21-24.ndx', -'18-10-2023-21-24.pcm', -'18-10-2023-21-25.ndx', -'18-10-2023-21-25.pcm', -'18-10-2023-21-26.ndx', -'18-10-2023-21-26.pcm', -'18-10-2023-21-27.ndx', -'18-10-2023-21-27.pcm', -'18-10-2023-21-28.ndx', -'18-10-2023-21-28.pcm', -'18-10-2023-21-29.ndx', -'18-10-2023-21-29.pcm', -'18-10-2023-21-30.ndx', -'18-10-2023-21-30.pcm', -'18-10-2023-21-31.ndx', -'18-10-2023-21-31.pcm', -'18-10-2023-21-32.ndx', -'18-10-2023-21-32.pcm', -'18-10-2023-21-33.ndx', -'18-10-2023-21-33.pcm', -'18-10-2023-21-34.ndx', -'18-10-2023-21-34.pcm', -'18-10-2023-21-35.ndx', -'18-10-2023-21-35.pcm', -'18-10-2023-21-36.ndx', -'18-10-2023-21-36.pcm', -'18-10-2023-21-37.ndx', -'18-10-2023-21-37.pcm', -'18-10-2023-21-38.ndx', -'18-10-2023-21-38.pcm', -'18-10-2023-21-39.ndx', -'18-10-2023-21-39.pcm', -'18-10-2023-21-40.ndx', -'18-10-2023-21-40.pcm', -'18-10-2023-21-41.ndx', -'18-10-2023-21-41.pcm', -'18-10-2023-21-42.ndx', -'18-10-2023-21-42.pcm', -'18-10-2023-21-43.ndx', -'18-10-2023-21-43.pcm', -'18-10-2023-21-44.mjp', -'18-10-2023-21-44.ndx', -'18-10-2023-21-44.pcm', +from datetime import datetime + +file_paths = [ + "18-10-2023-00-00.ndx", + "18-10-2023-00-00.pcm", + "18-10-2023-00-01.ndx", + "18-10-2023-00-01.pcm", + "18-10-2023-00-02.ndx", + "18-10-2023-00-02.pcm", + "18-10-2023-00-03.ndx", + "18-10-2023-00-03.pcm", + "18-10-2023-00-05.ndx", + "18-10-2023-00-05.pcm", + "18-10-2023-00-06.ndx", + "18-10-2023-00-06.pcm", + "18-10-2023-00-07.ndx", + "18-10-2023-00-07.pcm", + "18-10-2023-00-08.ndx", + "18-10-2023-00-08.pcm", + "18-10-2023-00-09.ndx", + "18-10-2023-00-09.pcm", + "18-10-2023-00-10.ndx", + "18-10-2023-00-10.pcm", + "18-10-2023-00-11.ndx", + "18-10-2023-00-11.pcm", + "18-10-2023-00-12.ndx", + "18-10-2023-00-12.pcm", + "18-10-2023-00-13.ndx", + "18-10-2023-00-13.pcm", + "18-10-2023-00-14.ndx", + "18-10-2023-00-14.pcm", + "18-10-2023-00-15.ndx", + "18-10-2023-00-15.pcm", + "18-10-2023-00-16.ndx", + "18-10-2023-00-16.pcm", + "18-10-2023-00-17.ndx", + "18-10-2023-00-17.pcm", + "18-10-2023-00-18.ndx", + "18-10-2023-00-18.pcm", + "18-10-2023-00-19.ndx", + "18-10-2023-00-19.pcm", + "18-10-2023-00-20.ndx", + "18-10-2023-00-20.pcm", + "18-10-2023-00-21.ndx", + "18-10-2023-00-21.pcm", + "18-10-2023-00-22.ndx", + "18-10-2023-00-22.pcm", + "18-10-2023-00-23.ndx", + "18-10-2023-00-23.pcm", + "18-10-2023-00-24.ndx", + "18-10-2023-00-24.pcm", + "18-10-2023-00-25.ndx", + "18-10-2023-00-25.pcm", + "18-10-2023-00-26.ndx", + "18-10-2023-00-26.pcm", + "18-10-2023-00-27.ndx", + "18-10-2023-00-27.pcm", + "18-10-2023-00-28.ndx", + "18-10-2023-00-28.pcm", + "18-10-2023-00-29.ndx", + "18-10-2023-00-29.pcm", + "18-10-2023-00-30.ndx", + "18-10-2023-00-30.pcm", + "18-10-2023-00-31.ndx", + "18-10-2023-00-31.pcm", + "18-10-2023-00-32.ndx", + "18-10-2023-00-32.pcm", + "18-10-2023-00-33.ndx", + "18-10-2023-00-33.pcm", + "18-10-2023-00-34.ndx", + "18-10-2023-00-34.pcm", + "18-10-2023-00-35.ndx", + "18-10-2023-00-35.pcm", + "18-10-2023-00-36.ndx", + "18-10-2023-00-36.pcm", + "18-10-2023-00-37.ndx", + "18-10-2023-00-37.pcm", + "18-10-2023-00-38.ndx", + "18-10-2023-00-38.pcm", + "18-10-2023-00-39.ndx", + "18-10-2023-00-39.pcm", + "18-10-2023-00-40.ndx", + "18-10-2023-00-40.pcm", + "18-10-2023-00-41.ndx", + "18-10-2023-00-41.pcm", + "18-10-2023-00-42.ndx", + "18-10-2023-00-42.pcm", + "18-10-2023-00-43.ndx", + "18-10-2023-00-43.pcm", + "18-10-2023-00-45.ndx", + "18-10-2023-00-45.pcm", + "18-10-2023-00-46.ndx", + "18-10-2023-00-46.pcm", + "18-10-2023-00-47.ndx", + "18-10-2023-00-47.pcm", + "18-10-2023-00-48.ndx", + "18-10-2023-00-48.pcm", + "18-10-2023-00-49.ndx", + "18-10-2023-00-49.pcm", + "18-10-2023-00-50.ndx", + "18-10-2023-00-50.pcm", + "18-10-2023-00-51.ndx", + "18-10-2023-00-51.pcm", + "18-10-2023-00-52.ndx", + "18-10-2023-00-52.pcm", + "18-10-2023-00-53.ndx", + "18-10-2023-00-53.pcm", + "18-10-2023-00-54.ndx", + "18-10-2023-00-54.pcm", + "18-10-2023-00-55.ndx", + "18-10-2023-00-55.pcm", + "18-10-2023-00-56.ndx", + "18-10-2023-00-56.pcm", + "18-10-2023-00-57.ndx", + "18-10-2023-00-57.pcm", + "18-10-2023-00-58.ndx", + "18-10-2023-00-58.pcm", + "18-10-2023-00-59.ndx", + "18-10-2023-00-59.pcm", + "18-10-2023-01-00.ndx", + "18-10-2023-01-00.pcm", + "18-10-2023-01-01.ndx", + "18-10-2023-01-01.pcm", + "18-10-2023-01-02.ndx", + "18-10-2023-01-02.pcm", + "18-10-2023-01-03.ndx", + "18-10-2023-01-03.pcm", + "18-10-2023-01-05.ndx", + "18-10-2023-01-05.pcm", + "18-10-2023-01-06.ndx", + "18-10-2023-01-06.pcm", + "18-10-2023-01-07.ndx", + "18-10-2023-01-07.pcm", + "18-10-2023-01-08.ndx", + "18-10-2023-01-08.pcm", + "18-10-2023-01-09.ndx", + "18-10-2023-01-09.pcm", + "18-10-2023-01-10.ndx", + "18-10-2023-01-10.pcm", + "18-10-2023-01-11.ndx", + "18-10-2023-01-11.pcm", + "18-10-2023-01-12.ndx", + "18-10-2023-01-12.pcm", + "18-10-2023-01-13.ndx", + "18-10-2023-01-13.pcm", + "18-10-2023-01-14.ndx", + "18-10-2023-01-14.pcm", + "18-10-2023-01-15.ndx", + "18-10-2023-01-15.pcm", + "18-10-2023-01-16.ndx", + "18-10-2023-01-16.pcm", + "18-10-2023-01-17.ndx", + "18-10-2023-01-17.pcm", + "18-10-2023-01-18.ndx", + "18-10-2023-01-18.pcm", + "18-10-2023-01-19.mjp", + "18-10-2023-01-19.ndx", + "18-10-2023-01-19.pcm", + "18-10-2023-11-04.ndx", + "18-10-2023-11-04.pcm", + "18-10-2023-11-05.avi.done", + "18-10-2023-11-05.gpg", + "18-10-2023-11-05.mp4.done", + "18-10-2023-11-10.avi.done", + "18-10-2023-11-10.gpg", + "18-10-2023-11-10.mp4.done", + "18-10-2023-11-15.avi.done", + "18-10-2023-11-15.gpg", + "18-10-2023-11-15.mp4.done", + "18-10-2023-11-20.avi.done", + "18-10-2023-11-20.gpg", + "18-10-2023-11-20.mp4.done", + "18-10-2023-11-25.avi.done", + "18-10-2023-11-25.gpg", + "18-10-2023-11-25.mp4.done", + "18-10-2023-11-30.avi.done", + "18-10-2023-11-30.gpg", + "18-10-2023-11-30.mp4.done", + "18-10-2023-11-35.avi.done", + "18-10-2023-11-35.gpg", + "18-10-2023-11-35.mp4.done", + "18-10-2023-11-40.mjp", + "18-10-2023-11-40.ndx", + "18-10-2023-11-40.pcm", + "18-10-2023-11-41.mjp", + "18-10-2023-11-41.ndx", + "18-10-2023-11-41.pcm", + "18-10-2023-11-42.mjp", + "18-10-2023-11-42.ndx", + "18-10-2023-11-42.pcm", + "18-10-2023-11-43.mjp", + "18-10-2023-11-43.ndx", + "18-10-2023-11-43.pcm", + "18-10-2023-11-45.avi.done", + "18-10-2023-11-45.gpg", + "18-10-2023-11-45.mp4.done", + "18-10-2023-11-46.ndx", + "18-10-2023-11-46.pcm", + "18-10-2023-11-47.ndx", + "18-10-2023-11-47.pcm", + "18-10-2023-11-50.avi.done", + "18-10-2023-11-50.gpg", + "18-10-2023-11-50.mp4.done", + "18-10-2023-11-55.avi.done", + "18-10-2023-11-55.gpg", + "18-10-2023-11-55.mp4.done", + "18-10-2023-12-00.avi.done", + "18-10-2023-12-00.gpg", + "18-10-2023-12-00.mp4.done", + "18-10-2023-12-05.avi.done", + "18-10-2023-12-05.gpg", + "18-10-2023-12-05.mp4.done", + "18-10-2023-12-10.avi.done", + "18-10-2023-12-10.gpg", + "18-10-2023-12-10.mp4.done", + "18-10-2023-12-15.avi.done", + "18-10-2023-12-15.gpg", + "18-10-2023-12-15.mp4.done", + "18-10-2023-12-20.avi.done", + "18-10-2023-12-20.gpg", + "18-10-2023-12-20.mp4.done", + "18-10-2023-12-25.avi.done", + "18-10-2023-12-25.gpg", + "18-10-2023-12-25.mp4.done", + "18-10-2023-12-30.avi.done", + "18-10-2023-12-30.gpg", + "18-10-2023-12-30.mp4.done", + "18-10-2023-12-35.avi.done", + "18-10-2023-12-35.gpg", + "18-10-2023-12-35.mp4.done", + "18-10-2023-12-40.avi.done", + "18-10-2023-12-40.gpg", + "18-10-2023-12-40.mp4.done", + "18-10-2023-12-45.avi.done", + "18-10-2023-12-45.gpg", + "18-10-2023-12-45.mp4.done", + "18-10-2023-12-50.avi.done", + "18-10-2023-12-50.gpg", + "18-10-2023-12-50.mp4.done", + "18-10-2023-12-55.avi.done", + "18-10-2023-12-55.gpg", + "18-10-2023-12-55.mp4.done", + "18-10-2023-13-00.avi.done", + "18-10-2023-13-00.gpg", + "18-10-2023-13-00.mp4.done", + "18-10-2023-13-05.avi.done", + "18-10-2023-13-05.gpg", + "18-10-2023-13-05.mp4.done", + "18-10-2023-13-10.avi.done", + "18-10-2023-13-10.gpg", + "18-10-2023-13-10.mp4.done", + "18-10-2023-13-15.avi.done", + "18-10-2023-13-15.gpg", + "18-10-2023-13-15.mp4.done", + "18-10-2023-13-20.avi.done", + "18-10-2023-13-20.gpg", + "18-10-2023-13-20.mp4.done", + "18-10-2023-13-25.avi.done", + "18-10-2023-13-25.gpg", + "18-10-2023-13-25.mp4.done", + "18-10-2023-13-30.avi.done", + "18-10-2023-13-30.gpg", + "18-10-2023-13-30.mp4.done", + "18-10-2023-13-35.avi.done", + "18-10-2023-13-35.gpg", + "18-10-2023-13-35.mp4.done", + "18-10-2023-13-40.avi.done", + "18-10-2023-13-40.gpg", + "18-10-2023-13-40.mp4.done", + "18-10-2023-13-45.avi.done", + "18-10-2023-13-45.gpg", + "18-10-2023-13-45.mp4.done", + "18-10-2023-13-50.avi.done", + "18-10-2023-13-50.gpg", + "18-10-2023-13-50.mp4.done", + "18-10-2023-13-55.avi.done", + "18-10-2023-13-55.gpg", + "18-10-2023-13-55.mp4.done", + "18-10-2023-14-00.avi.done", + "18-10-2023-14-00.gpg", + "18-10-2023-14-00.mp4.done", + "18-10-2023-14-05.avi.done", + "18-10-2023-14-05.gpg", + "18-10-2023-14-05.mp4.done", + "18-10-2023-14-10.avi.done", + "18-10-2023-14-10.gpg", + "18-10-2023-14-10.mp4.done", + "18-10-2023-14-15.avi.done", + "18-10-2023-14-15.gpg", + "18-10-2023-14-15.mp4.done", + "18-10-2023-14-20.avi.done", + "18-10-2023-14-20.gpg", + "18-10-2023-14-20.mp4.done", + "18-10-2023-14-25.avi.done", + "18-10-2023-14-25.gpg", + "18-10-2023-14-25.mp4.done", + "18-10-2023-14-30.avi.done", + "18-10-2023-14-30.gpg", + "18-10-2023-14-30.mp4.done", + "18-10-2023-14-35.avi.done", + "18-10-2023-14-35.gpg", + "18-10-2023-14-35.mp4.done", + "18-10-2023-14-40.avi.done", + "18-10-2023-14-40.gpg", + "18-10-2023-14-40.mp4.done", + "18-10-2023-14-41.ndx", + "18-10-2023-14-41.pcm", + "18-10-2023-14-42.ndx", + "18-10-2023-14-42.pcm", + "18-10-2023-14-43.ndx", + "18-10-2023-14-43.pcm", + "18-10-2023-14-44.ndx", + "18-10-2023-14-44.pcm", + "18-10-2023-14-45.ndx", + "18-10-2023-14-45.pcm", + "18-10-2023-14-46.ndx", + "18-10-2023-14-46.pcm", + "18-10-2023-14-47.ndx", + "18-10-2023-14-47.pcm", + "18-10-2023-14-48.ndx", + "18-10-2023-14-48.pcm", + "18-10-2023-14-49.ndx", + "18-10-2023-14-49.pcm", + "18-10-2023-14-50.ndx", + "18-10-2023-14-50.pcm", + "18-10-2023-14-51.ndx", + "18-10-2023-14-51.pcm", + "18-10-2023-14-52.ndx", + "18-10-2023-14-52.pcm", + "18-10-2023-14-53.ndx", + "18-10-2023-14-53.pcm", + "18-10-2023-14-54.ndx", + "18-10-2023-14-54.pcm", + "18-10-2023-14-55.ndx", + "18-10-2023-14-55.pcm", + "18-10-2023-14-56.ndx", + "18-10-2023-14-56.pcm", + "18-10-2023-14-57.ndx", + "18-10-2023-14-57.pcm", + "18-10-2023-14-58.ndx", + "18-10-2023-14-58.pcm", + "18-10-2023-14-59.ndx", + "18-10-2023-14-59.pcm", + "18-10-2023-15-00.ndx", + "18-10-2023-15-00.pcm", + "18-10-2023-15-01.ndx", + "18-10-2023-15-01.pcm", + "18-10-2023-15-02.ndx", + "18-10-2023-15-02.pcm", + "18-10-2023-15-03.ndx", + "18-10-2023-15-03.pcm", + "18-10-2023-15-04.ndx", + "18-10-2023-15-04.pcm", + "18-10-2023-15-05.ndx", + "18-10-2023-15-05.pcm", + "18-10-2023-15-06.ndx", + "18-10-2023-15-06.pcm", + "18-10-2023-15-07.ndx", + "18-10-2023-15-07.pcm", + "18-10-2023-15-08.ndx", + "18-10-2023-15-08.pcm", + "18-10-2023-15-09.ndx", + "18-10-2023-15-09.pcm", + "18-10-2023-15-10.ndx", + "18-10-2023-15-10.pcm", + "18-10-2023-15-11.ndx", + "18-10-2023-15-11.pcm", + "18-10-2023-15-12.ndx", + "18-10-2023-15-12.pcm", + "18-10-2023-15-13.ndx", + "18-10-2023-15-13.pcm", + "18-10-2023-15-14.ndx", + "18-10-2023-15-14.pcm", + "18-10-2023-15-15.ndx", + "18-10-2023-15-15.pcm", + "18-10-2023-15-16.ndx", + "18-10-2023-15-16.pcm", + "18-10-2023-15-17.ndx", + "18-10-2023-15-17.pcm", + "18-10-2023-15-18.ndx", + "18-10-2023-15-18.pcm", + "18-10-2023-15-19.ndx", + "18-10-2023-15-19.pcm", + "18-10-2023-15-20.ndx", + "18-10-2023-15-20.pcm", + "18-10-2023-15-21.ndx", + "18-10-2023-15-21.pcm", + "18-10-2023-15-22.ndx", + "18-10-2023-15-22.pcm", + "18-10-2023-15-23.ndx", + "18-10-2023-15-23.pcm", + "18-10-2023-15-24.ndx", + "18-10-2023-15-24.pcm", + "18-10-2023-15-25.ndx", + "18-10-2023-15-25.pcm", + "18-10-2023-15-26.ndx", + "18-10-2023-15-26.pcm", + "18-10-2023-15-27.ndx", + "18-10-2023-15-27.pcm", + "18-10-2023-15-28.ndx", + "18-10-2023-15-28.pcm", + "18-10-2023-15-30.ndx", + "18-10-2023-15-30.pcm", + "18-10-2023-15-31.ndx", + "18-10-2023-15-31.pcm", + "18-10-2023-15-32.ndx", + "18-10-2023-15-32.pcm", + "18-10-2023-15-33.ndx", + "18-10-2023-15-33.pcm", + "18-10-2023-15-35.ndx", + "18-10-2023-15-35.pcm", + "18-10-2023-15-36.ndx", + "18-10-2023-15-36.pcm", + "18-10-2023-15-37.ndx", + "18-10-2023-15-37.pcm", + "18-10-2023-15-38.ndx", + "18-10-2023-15-38.pcm", + "18-10-2023-15-39.ndx", + "18-10-2023-15-39.pcm", + "18-10-2023-15-40.ndx", + "18-10-2023-15-40.pcm", + "18-10-2023-15-41.ndx", + "18-10-2023-15-41.pcm", + "18-10-2023-15-42.ndx", + "18-10-2023-15-42.pcm", + "18-10-2023-15-43.ndx", + "18-10-2023-15-43.pcm", + "18-10-2023-15-44.ndx", + "18-10-2023-15-44.pcm", + "18-10-2023-15-45.ndx", + "18-10-2023-15-45.pcm", + "18-10-2023-15-46.ndx", + "18-10-2023-15-46.pcm", + "18-10-2023-15-47.ndx", + "18-10-2023-15-47.pcm", + "18-10-2023-15-48.ndx", + "18-10-2023-15-48.pcm", + "18-10-2023-15-50.ndx", + "18-10-2023-15-50.pcm", + "18-10-2023-15-51.ndx", + "18-10-2023-15-51.pcm", + "18-10-2023-15-52.ndx", + "18-10-2023-15-52.pcm", + "18-10-2023-15-53.ndx", + "18-10-2023-15-53.pcm", + "18-10-2023-15-54.ndx", + "18-10-2023-15-54.pcm", + "18-10-2023-15-55.ndx", + "18-10-2023-15-55.pcm", + "18-10-2023-15-56.ndx", + "18-10-2023-15-56.pcm", + "18-10-2023-15-57.ndx", + "18-10-2023-15-57.pcm", + "18-10-2023-15-58.ndx", + "18-10-2023-15-58.pcm", + "18-10-2023-15-59.ndx", + "18-10-2023-15-59.pcm", + "18-10-2023-16-00.ndx", + "18-10-2023-16-00.pcm", + "18-10-2023-16-01.ndx", + "18-10-2023-16-01.pcm", + "18-10-2023-16-02.ndx", + "18-10-2023-16-02.pcm", + "18-10-2023-16-03.ndx", + "18-10-2023-16-03.pcm", + "18-10-2023-16-04.ndx", + "18-10-2023-16-04.pcm", + "18-10-2023-16-05.ndx", + "18-10-2023-16-05.pcm", + "18-10-2023-16-06.ndx", + "18-10-2023-16-06.pcm", + "18-10-2023-16-07.ndx", + "18-10-2023-16-07.pcm", + "18-10-2023-16-08.ndx", + "18-10-2023-16-08.pcm", + "18-10-2023-16-09.ndx", + "18-10-2023-16-09.pcm", + "18-10-2023-16-10.ndx", + "18-10-2023-16-10.pcm", + "18-10-2023-16-11.ndx", + "18-10-2023-16-11.pcm", + "18-10-2023-16-12.ndx", + "18-10-2023-16-12.pcm", + "18-10-2023-16-13.ndx", + "18-10-2023-16-13.pcm", + "18-10-2023-16-14.ndx", + "18-10-2023-16-14.pcm", + "18-10-2023-16-15.ndx", + "18-10-2023-16-15.pcm", + "18-10-2023-16-16.ndx", + "18-10-2023-16-16.pcm", + "18-10-2023-16-17.ndx", + "18-10-2023-16-17.pcm", + "18-10-2023-16-18.ndx", + "18-10-2023-16-18.pcm", + "18-10-2023-16-19.ndx", + "18-10-2023-16-19.pcm", + "18-10-2023-16-20.ndx", + "18-10-2023-16-20.pcm", + "18-10-2023-16-21.ndx", + "18-10-2023-16-21.pcm", + "18-10-2023-16-22.ndx", + "18-10-2023-16-22.pcm", + "18-10-2023-16-23.ndx", + "18-10-2023-16-23.pcm", + "18-10-2023-16-24.ndx", + "18-10-2023-16-24.pcm", + "18-10-2023-16-25.ndx", + "18-10-2023-16-25.pcm", + "18-10-2023-16-26.ndx", + "18-10-2023-16-26.pcm", + "18-10-2023-16-27.ndx", + "18-10-2023-16-27.pcm", + "18-10-2023-16-28.ndx", + "18-10-2023-16-28.pcm", + "18-10-2023-16-29.ndx", + "18-10-2023-16-29.pcm", + "18-10-2023-16-30.ndx", + "18-10-2023-16-30.pcm", + "18-10-2023-16-31.ndx", + "18-10-2023-16-31.pcm", + "18-10-2023-16-32.ndx", + "18-10-2023-16-32.pcm", + "18-10-2023-16-33.ndx", + "18-10-2023-16-33.pcm", + "18-10-2023-16-34.ndx", + "18-10-2023-16-34.pcm", + "18-10-2023-16-35.ndx", + "18-10-2023-16-35.pcm", + "18-10-2023-16-36.ndx", + "18-10-2023-16-36.pcm", + "18-10-2023-16-37.ndx", + "18-10-2023-16-37.pcm", + "18-10-2023-16-38.ndx", + "18-10-2023-16-38.pcm", + "18-10-2023-16-39.ndx", + "18-10-2023-16-39.pcm", + "18-10-2023-16-40.ndx", + "18-10-2023-16-40.pcm", + "18-10-2023-16-41.ndx", + "18-10-2023-16-41.pcm", + "18-10-2023-16-42.ndx", + "18-10-2023-16-42.pcm", + "18-10-2023-16-43.ndx", + "18-10-2023-16-43.pcm", + "18-10-2023-16-45.ndx", + "18-10-2023-16-45.pcm", + "18-10-2023-16-46.ndx", + "18-10-2023-16-46.pcm", + "18-10-2023-16-47.ndx", + "18-10-2023-16-47.pcm", + "18-10-2023-16-48.ndx", + "18-10-2023-16-48.pcm", + "18-10-2023-16-49.ndx", + "18-10-2023-16-49.pcm", + "18-10-2023-16-50.ndx", + "18-10-2023-16-50.pcm", + "18-10-2023-16-51.ndx", + "18-10-2023-16-51.pcm", + "18-10-2023-16-52.ndx", + "18-10-2023-16-52.pcm", + "18-10-2023-16-53.ndx", + "18-10-2023-16-53.pcm", + "18-10-2023-16-55.ndx", + "18-10-2023-16-55.pcm", + "18-10-2023-16-56.ndx", + "18-10-2023-16-56.pcm", + "18-10-2023-16-57.ndx", + "18-10-2023-16-57.pcm", + "18-10-2023-16-58.ndx", + "18-10-2023-16-58.pcm", + "18-10-2023-16-59.ndx", + "18-10-2023-16-59.pcm", + "18-10-2023-17-00.ndx", + "18-10-2023-17-00.pcm", + "18-10-2023-17-01.ndx", + "18-10-2023-17-01.pcm", + "18-10-2023-17-02.ndx", + "18-10-2023-17-02.pcm", + "18-10-2023-17-03.ndx", + "18-10-2023-17-03.pcm", + "18-10-2023-17-04.ndx", + "18-10-2023-17-04.pcm", + "18-10-2023-17-05.ndx", + "18-10-2023-17-05.pcm", + "18-10-2023-17-06.ndx", + "18-10-2023-17-06.pcm", + "18-10-2023-17-07.ndx", + "18-10-2023-17-07.pcm", + "18-10-2023-17-08.ndx", + "18-10-2023-17-08.pcm", + "18-10-2023-17-09.ndx", + "18-10-2023-17-09.pcm", + "18-10-2023-17-10.ndx", + "18-10-2023-17-10.pcm", + "18-10-2023-17-11.ndx", + "18-10-2023-17-11.pcm", + "18-10-2023-17-12.ndx", + "18-10-2023-17-12.pcm", + "18-10-2023-17-13.ndx", + "18-10-2023-17-13.pcm", + "18-10-2023-17-14.ndx", + "18-10-2023-17-14.pcm", + "18-10-2023-17-15.ndx", + "18-10-2023-17-15.pcm", + "18-10-2023-17-16.ndx", + "18-10-2023-17-16.pcm", + "18-10-2023-17-17.ndx", + "18-10-2023-17-17.pcm", + "18-10-2023-17-18.ndx", + "18-10-2023-17-18.pcm", + "18-10-2023-17-19.ndx", + "18-10-2023-17-19.pcm", + "18-10-2023-17-20.ndx", + "18-10-2023-17-20.pcm", + "18-10-2023-17-21.ndx", + "18-10-2023-17-21.pcm", + "18-10-2023-17-22.ndx", + "18-10-2023-17-22.pcm", + "18-10-2023-17-23.ndx", + "18-10-2023-17-23.pcm", + "18-10-2023-17-25.ndx", + "18-10-2023-17-25.pcm", + "18-10-2023-17-26.ndx", + "18-10-2023-17-26.pcm", + "18-10-2023-17-27.ndx", + "18-10-2023-17-27.pcm", + "18-10-2023-17-28.ndx", + "18-10-2023-17-28.pcm", + "18-10-2023-17-29.ndx", + "18-10-2023-17-29.pcm", + "18-10-2023-17-30.ndx", + "18-10-2023-17-30.pcm", + "18-10-2023-17-31.ndx", + "18-10-2023-17-31.pcm", + "18-10-2023-17-32.ndx", + "18-10-2023-17-32.pcm", + "18-10-2023-17-33.ndx", + "18-10-2023-17-33.pcm", + "18-10-2023-17-34.ndx", + "18-10-2023-17-34.pcm", + "18-10-2023-17-35.ndx", + "18-10-2023-17-35.pcm", + "18-10-2023-17-36.ndx", + "18-10-2023-17-36.pcm", + "18-10-2023-17-37.ndx", + "18-10-2023-17-37.pcm", + "18-10-2023-17-38.ndx", + "18-10-2023-17-38.pcm", + "18-10-2023-17-39.ndx", + "18-10-2023-17-39.pcm", + "18-10-2023-17-40.ndx", + "18-10-2023-17-40.pcm", + "18-10-2023-17-41.ndx", + "18-10-2023-17-41.pcm", + "18-10-2023-17-42.ndx", + "18-10-2023-17-42.pcm", + "18-10-2023-17-43.ndx", + "18-10-2023-17-43.pcm", + "18-10-2023-17-44.ndx", + "18-10-2023-17-44.pcm", + "18-10-2023-17-45.ndx", + "18-10-2023-17-45.pcm", + "18-10-2023-17-46.ndx", + "18-10-2023-17-46.pcm", + "18-10-2023-17-47.ndx", + "18-10-2023-17-47.pcm", + "18-10-2023-17-48.ndx", + "18-10-2023-17-48.pcm", + "18-10-2023-17-50.ndx", + "18-10-2023-17-50.pcm", + "18-10-2023-17-51.ndx", + "18-10-2023-17-51.pcm", + "18-10-2023-17-52.ndx", + "18-10-2023-17-52.pcm", + "18-10-2023-17-53.ndx", + "18-10-2023-17-53.pcm", + "18-10-2023-17-54.ndx", + "18-10-2023-17-54.pcm", + "18-10-2023-17-55.ndx", + "18-10-2023-17-55.pcm", + "18-10-2023-17-56.ndx", + "18-10-2023-17-56.pcm", + "18-10-2023-17-57.ndx", + "18-10-2023-17-57.pcm", + "18-10-2023-17-58.mjp", + "18-10-2023-17-58.ndx", + "18-10-2023-17-58.pcm", + "18-10-2023-20-01.ndx", + "18-10-2023-20-01.pcm", + "18-10-2023-20-02.ndx", + "18-10-2023-20-02.pcm", + "18-10-2023-20-03.ndx", + "18-10-2023-20-03.pcm", + "18-10-2023-20-04.ndx", + "18-10-2023-20-04.pcm", + "18-10-2023-20-05.ndx", + "18-10-2023-20-05.pcm", + "18-10-2023-20-06.ndx", + "18-10-2023-20-06.pcm", + "18-10-2023-20-07.ndx", + "18-10-2023-20-07.pcm", + "18-10-2023-20-08.ndx", + "18-10-2023-20-08.pcm", + "18-10-2023-20-09.ndx", + "18-10-2023-20-09.pcm", + "18-10-2023-20-10.ndx", + "18-10-2023-20-10.pcm", + "18-10-2023-20-11.ndx", + "18-10-2023-20-11.pcm", + "18-10-2023-20-12.ndx", + "18-10-2023-20-12.pcm", + "18-10-2023-20-13.ndx", + "18-10-2023-20-13.pcm", + "18-10-2023-20-14.ndx", + "18-10-2023-20-14.pcm", + "18-10-2023-20-15.ndx", + "18-10-2023-20-15.pcm", + "18-10-2023-20-16.ndx", + "18-10-2023-20-16.pcm", + "18-10-2023-20-17.ndx", + "18-10-2023-20-17.pcm", + "18-10-2023-20-18.ndx", + "18-10-2023-20-18.pcm", + "18-10-2023-20-19.ndx", + "18-10-2023-20-19.pcm", + "18-10-2023-20-20.ndx", + "18-10-2023-20-20.pcm", + "18-10-2023-20-21.ndx", + "18-10-2023-20-21.pcm", + "18-10-2023-20-22.ndx", + "18-10-2023-20-22.pcm", + "18-10-2023-20-23.ndx", + "18-10-2023-20-23.pcm", + "18-10-2023-20-24.ndx", + "18-10-2023-20-24.pcm", + "18-10-2023-20-25.ndx", + "18-10-2023-20-25.pcm", + "18-10-2023-20-26.ndx", + "18-10-2023-20-26.pcm", + "18-10-2023-20-27.ndx", + "18-10-2023-20-27.pcm", + "18-10-2023-20-28.ndx", + "18-10-2023-20-28.pcm", + "18-10-2023-20-29.ndx", + "18-10-2023-20-29.pcm", + "18-10-2023-20-30.ndx", + "18-10-2023-20-30.pcm", + "18-10-2023-20-31.ndx", + "18-10-2023-20-31.pcm", + "18-10-2023-20-32.ndx", + "18-10-2023-20-32.pcm", + "18-10-2023-20-33.ndx", + "18-10-2023-20-33.pcm", + "18-10-2023-20-34.ndx", + "18-10-2023-20-34.pcm", + "18-10-2023-20-35.ndx", + "18-10-2023-20-35.pcm", + "18-10-2023-20-36.ndx", + "18-10-2023-20-36.pcm", + "18-10-2023-20-37.ndx", + "18-10-2023-20-37.pcm", + "18-10-2023-20-38.ndx", + "18-10-2023-20-38.pcm", + "18-10-2023-20-39.ndx", + "18-10-2023-20-39.pcm", + "18-10-2023-20-40.ndx", + "18-10-2023-20-40.pcm", + "18-10-2023-20-41.ndx", + "18-10-2023-20-41.pcm", + "18-10-2023-20-42.ndx", + "18-10-2023-20-42.pcm", + "18-10-2023-20-43.ndx", + "18-10-2023-20-43.pcm", + "18-10-2023-20-44.ndx", + "18-10-2023-20-44.pcm", + "18-10-2023-20-45.ndx", + "18-10-2023-20-45.pcm", + "18-10-2023-20-46.ndx", + "18-10-2023-20-46.pcm", + "18-10-2023-20-47.ndx", + "18-10-2023-20-47.pcm", + "18-10-2023-20-48.ndx", + "18-10-2023-20-48.pcm", + "18-10-2023-20-49.ndx", + "18-10-2023-20-49.pcm", + "18-10-2023-20-50.ndx", + "18-10-2023-20-50.pcm", + "18-10-2023-20-51.ndx", + "18-10-2023-20-51.pcm", + "18-10-2023-20-52.ndx", + "18-10-2023-20-52.pcm", + "18-10-2023-20-53.ndx", + "18-10-2023-20-53.pcm", + "18-10-2023-20-54.ndx", + "18-10-2023-20-54.pcm", + "18-10-2023-20-55.ndx", + "18-10-2023-20-55.pcm", + "18-10-2023-20-56.ndx", + "18-10-2023-20-56.pcm", + "18-10-2023-20-57.ndx", + "18-10-2023-20-57.pcm", + "18-10-2023-20-58.ndx", + "18-10-2023-20-58.pcm", + "18-10-2023-20-59.ndx", + "18-10-2023-20-59.pcm", + "18-10-2023-21-00.ndx", + "18-10-2023-21-00.pcm", + "18-10-2023-21-01.ndx", + "18-10-2023-21-01.pcm", + "18-10-2023-21-02.ndx", + "18-10-2023-21-02.pcm", + "18-10-2023-21-03.ndx", + "18-10-2023-21-03.pcm", + "18-10-2023-21-04.ndx", + "18-10-2023-21-04.pcm", + "18-10-2023-21-05.ndx", + "18-10-2023-21-05.pcm", + "18-10-2023-21-06.ndx", + "18-10-2023-21-06.pcm", + "18-10-2023-21-07.ndx", + "18-10-2023-21-07.pcm", + "18-10-2023-21-08.ndx", + "18-10-2023-21-08.pcm", + "18-10-2023-21-10.ndx", + "18-10-2023-21-10.pcm", + "18-10-2023-21-11.ndx", + "18-10-2023-21-11.pcm", + "18-10-2023-21-12.ndx", + "18-10-2023-21-12.pcm", + "18-10-2023-21-13.ndx", + "18-10-2023-21-13.pcm", + "18-10-2023-21-14.ndx", + "18-10-2023-21-14.pcm", + "18-10-2023-21-15.ndx", + "18-10-2023-21-15.pcm", + "18-10-2023-21-16.ndx", + "18-10-2023-21-16.pcm", + "18-10-2023-21-17.ndx", + "18-10-2023-21-17.pcm", + "18-10-2023-21-18.ndx", + "18-10-2023-21-18.pcm", + "18-10-2023-21-19.ndx", + "18-10-2023-21-19.pcm", + "18-10-2023-21-20.ndx", + "18-10-2023-21-20.pcm", + "18-10-2023-21-21.ndx", + "18-10-2023-21-21.pcm", + "18-10-2023-21-22.ndx", + "18-10-2023-21-22.pcm", + "18-10-2023-21-23.ndx", + "18-10-2023-21-23.pcm", + "18-10-2023-21-24.ndx", + "18-10-2023-21-24.pcm", + "18-10-2023-21-25.ndx", + "18-10-2023-21-25.pcm", + "18-10-2023-21-26.ndx", + "18-10-2023-21-26.pcm", + "18-10-2023-21-27.ndx", + "18-10-2023-21-27.pcm", + "18-10-2023-21-28.ndx", + "18-10-2023-21-28.pcm", + "18-10-2023-21-30.ndx", + "18-10-2023-21-30.pcm", + "18-10-2023-21-31.ndx", + "18-10-2023-21-31.pcm", + "18-10-2023-21-32.ndx", + "18-10-2023-21-32.pcm", + "18-10-2023-21-33.ndx", + "18-10-2023-21-33.pcm", + "18-10-2023-21-34.ndx", + "18-10-2023-21-34.pcm", + "18-10-2023-21-35.ndx", + "18-10-2023-21-35.pcm", + "18-10-2023-21-36.ndx", + "18-10-2023-21-36.pcm", + "18-10-2023-21-37.ndx", + "18-10-2023-21-37.pcm", + "18-10-2023-21-38.ndx", + "18-10-2023-21-38.pcm", + "18-10-2023-21-39.ndx", + "18-10-2023-21-39.pcm", + "18-10-2023-21-40.ndx", + "18-10-2023-21-40.pcm", + "18-10-2023-21-41.ndx", + "18-10-2023-21-41.pcm", + "18-10-2023-21-42.ndx", + "18-10-2023-21-42.pcm", + "18-10-2023-21-43.ndx", + "18-10-2023-21-43.pcm", + "18-10-2023-21-44.mjp", + "18-10-2023-21-44.ndx", + "18-10-2023-21-44.pcm", + "18-10-2023-00-00.ndx", + "18-10-2023-00-00.pcm", + "18-10-2023-00-01.ndx", + "18-10-2023-00-01.pcm", + "18-10-2023-00-02.ndx", + "18-10-2023-00-02.pcm", + "18-10-2023-00-03.ndx", + "18-10-2023-00-03.pcm", + "18-10-2023-00-04.ndx", + "18-10-2023-00-04.pcm", + "18-10-2023-00-05.ndx", + "18-10-2023-00-05.pcm", + "18-10-2023-00-06.ndx", + "18-10-2023-00-06.pcm", + "18-10-2023-00-07.ndx", + "18-10-2023-00-07.pcm", + "18-10-2023-00-08.ndx", + "18-10-2023-00-08.pcm", + "18-10-2023-00-09.ndx", + "18-10-2023-00-09.pcm", + "18-10-2023-00-10.ndx", + "18-10-2023-00-10.pcm", + "18-10-2023-00-11.ndx", + "18-10-2023-00-11.pcm", + "18-10-2023-00-12.ndx", + "18-10-2023-00-12.pcm", + "18-10-2023-00-13.ndx", + "18-10-2023-00-13.pcm", + "18-10-2023-00-14.ndx", + "18-10-2023-00-14.pcm", + "18-10-2023-00-15.ndx", + "18-10-2023-00-15.pcm", + "18-10-2023-00-16.ndx", + "18-10-2023-00-16.pcm", + "18-10-2023-00-17.ndx", + "18-10-2023-00-17.pcm", + "18-10-2023-00-18.ndx", + "18-10-2023-00-18.pcm", + "18-10-2023-00-19.ndx", + "18-10-2023-00-19.pcm", + "18-10-2023-00-20.ndx", + "18-10-2023-00-20.pcm", + "18-10-2023-00-21.ndx", + "18-10-2023-00-21.pcm", + "18-10-2023-00-22.ndx", + "18-10-2023-00-22.pcm", + "18-10-2023-00-23.ndx", + "18-10-2023-00-23.pcm", + "18-10-2023-00-24.ndx", + "18-10-2023-00-24.pcm", + "18-10-2023-00-25.ndx", + "18-10-2023-00-25.pcm", + "18-10-2023-00-26.ndx", + "18-10-2023-00-26.pcm", + "18-10-2023-00-27.ndx", + "18-10-2023-00-27.pcm", + "18-10-2023-00-28.ndx", + "18-10-2023-00-28.pcm", + "18-10-2023-00-29.ndx", + "18-10-2023-00-29.pcm", + "18-10-2023-00-30.ndx", + "18-10-2023-00-30.pcm", + "18-10-2023-00-31.ndx", + "18-10-2023-00-31.pcm", + "18-10-2023-00-32.ndx", + "18-10-2023-00-32.pcm", + "18-10-2023-00-33.ndx", + "18-10-2023-00-33.pcm", + "18-10-2023-00-34.ndx", + "18-10-2023-00-34.pcm", + "18-10-2023-00-35.ndx", + "18-10-2023-00-35.pcm", + "18-10-2023-00-36.ndx", + "18-10-2023-00-36.pcm", + "18-10-2023-00-37.ndx", + "18-10-2023-00-37.pcm", + "18-10-2023-00-38.ndx", + "18-10-2023-00-38.pcm", + "18-10-2023-00-39.ndx", + "18-10-2023-00-39.pcm", + "18-10-2023-00-40.ndx", + "18-10-2023-00-40.pcm", + "18-10-2023-00-41.ndx", + "18-10-2023-00-41.pcm", + "18-10-2023-00-42.ndx", + "18-10-2023-00-42.pcm", + "18-10-2023-00-43.ndx", + "18-10-2023-00-43.pcm", + "18-10-2023-00-44.ndx", + "18-10-2023-00-44.pcm", + "18-10-2023-00-45.ndx", + "18-10-2023-00-45.pcm", + "18-10-2023-00-46.ndx", + "18-10-2023-00-46.pcm", + "18-10-2023-00-47.ndx", + "18-10-2023-00-47.pcm", + "18-10-2023-00-48.ndx", + "18-10-2023-00-48.pcm", + "18-10-2023-00-49.ndx", + "18-10-2023-00-49.pcm", + "18-10-2023-00-50.ndx", + "18-10-2023-00-50.pcm", + "18-10-2023-00-51.ndx", + "18-10-2023-00-51.pcm", + "18-10-2023-00-52.ndx", + "18-10-2023-00-52.pcm", + "18-10-2023-00-53.ndx", + "18-10-2023-00-53.pcm", + "18-10-2023-00-54.ndx", + "18-10-2023-00-54.pcm", + "18-10-2023-00-55.ndx", + "18-10-2023-00-55.pcm", + "18-10-2023-00-56.ndx", + "18-10-2023-00-56.pcm", + "18-10-2023-00-57.ndx", + "18-10-2023-00-57.pcm", + "18-10-2023-00-58.ndx", + "18-10-2023-00-58.pcm", + "18-10-2023-01-00.ndx", + "18-10-2023-01-00.pcm", + "18-10-2023-01-01.ndx", + "18-10-2023-01-01.pcm", + "18-10-2023-01-02.ndx", + "18-10-2023-01-02.pcm", + "18-10-2023-01-03.ndx", + "18-10-2023-01-03.pcm", + "18-10-2023-01-04.ndx", + "18-10-2023-01-04.pcm", + "18-10-2023-01-05.ndx", + "18-10-2023-01-05.pcm", + "18-10-2023-01-06.ndx", + "18-10-2023-01-06.pcm", + "18-10-2023-01-07.ndx", + "18-10-2023-01-07.pcm", + "18-10-2023-01-08.ndx", + "18-10-2023-01-08.pcm", + "18-10-2023-01-09.ndx", + "18-10-2023-01-09.pcm", + "18-10-2023-01-10.ndx", + "18-10-2023-01-10.pcm", + "18-10-2023-01-11.ndx", + "18-10-2023-01-11.pcm", + "18-10-2023-01-12.ndx", + "18-10-2023-01-12.pcm", + "18-10-2023-01-13.ndx", + "18-10-2023-01-13.pcm", + "18-10-2023-01-14.ndx", + "18-10-2023-01-14.pcm", + "18-10-2023-01-15.ndx", + "18-10-2023-01-15.pcm", + "18-10-2023-01-16.ndx", + "18-10-2023-01-16.pcm", + "18-10-2023-01-17.ndx", + "18-10-2023-01-17.pcm", + "18-10-2023-01-18.ndx", + "18-10-2023-01-18.pcm", + "18-10-2023-01-19.mjp", + "18-10-2023-01-19.ndx", + "18-10-2023-01-19.pcm", + "18-10-2023-11-04.ndx", + "18-10-2023-11-04.pcm", + "18-10-2023-11-05.avi.done", + "18-10-2023-11-05.gpg", + "18-10-2023-11-05.mp4.done", + "18-10-2023-11-10.avi.done", + "18-10-2023-11-10.gpg", + "18-10-2023-11-10.mp4.done", + "18-10-2023-11-15.avi.done", + "18-10-2023-11-15.gpg", + "18-10-2023-11-15.mp4.done", + "18-10-2023-11-20.avi.done", + "18-10-2023-11-20.gpg", + "18-10-2023-11-20.mp4.done", + "18-10-2023-11-25.avi.done", + "18-10-2023-11-25.gpg", + "18-10-2023-11-25.mp4.done", + "18-10-2023-11-30.avi.done", + "18-10-2023-11-30.gpg", + "18-10-2023-11-30.mp4.done", + "18-10-2023-11-35.avi.done", + "18-10-2023-11-35.gpg", + "18-10-2023-11-35.mp4.done", + "18-10-2023-11-40.mjp", + "18-10-2023-11-40.ndx", + "18-10-2023-11-40.pcm", + "18-10-2023-11-41.mjp", + "18-10-2023-11-41.ndx", + "18-10-2023-11-41.pcm", + "18-10-2023-11-42.mjp", + "18-10-2023-11-42.ndx", + "18-10-2023-11-42.pcm", + "18-10-2023-11-43.mjp", + "18-10-2023-11-43.ndx", + "18-10-2023-11-43.pcm", + "18-10-2023-11-45.avi.done", + "18-10-2023-11-45.gpg", + "18-10-2023-11-45.mp4.done", + "18-10-2023-11-46.ndx", + "18-10-2023-11-46.pcm", + "18-10-2023-11-47.ndx", + "18-10-2023-11-47.pcm", + "18-10-2023-11-50.avi.done", + "18-10-2023-11-50.gpg", + "18-10-2023-11-50.mp4.done", + "18-10-2023-11-55.avi.done", + "18-10-2023-11-55.gpg", + "18-10-2023-11-55.mp4.done", + "18-10-2023-12-00.avi.done", + "18-10-2023-12-00.gpg", + "18-10-2023-12-00.mp4.done", + "18-10-2023-12-05.avi.done", + "18-10-2023-12-05.gpg", + "18-10-2023-12-05.mp4.done", + "18-10-2023-12-10.avi.done", + "18-10-2023-12-10.gpg", + "18-10-2023-12-10.mp4.done", + "18-10-2023-12-15.avi.done", + "18-10-2023-12-15.gpg", + "18-10-2023-12-15.mp4.done", + "18-10-2023-12-20.avi.done", + "18-10-2023-12-20.gpg", + "18-10-2023-12-20.mp4.done", + "18-10-2023-12-25.avi.done", + "18-10-2023-12-25.gpg", + "18-10-2023-12-25.mp4.done", + "18-10-2023-12-30.avi.done", + "18-10-2023-12-30.gpg", + "18-10-2023-12-30.mp4.done", + "18-10-2023-12-35.avi.done", + "18-10-2023-12-35.gpg", + "18-10-2023-12-35.mp4.done", + "18-10-2023-12-40.avi.done", + "18-10-2023-12-40.gpg", + "18-10-2023-12-40.mp4.done", + "18-10-2023-12-45.avi.done", + "18-10-2023-12-45.gpg", + "18-10-2023-12-45.mp4.done", + "18-10-2023-12-50.avi.done", + "18-10-2023-12-50.gpg", + "18-10-2023-12-50.mp4.done", + "18-10-2023-12-55.avi.done", + "18-10-2023-12-55.gpg", + "18-10-2023-12-55.mp4.done", + "18-10-2023-13-00.avi.done", + "18-10-2023-13-00.gpg", + "18-10-2023-13-00.mp4.done", + "18-10-2023-13-05.avi.done", + "18-10-2023-13-05.gpg", + "18-10-2023-13-05.mp4.done", + "18-10-2023-13-10.avi.done", + "18-10-2023-13-10.gpg", + "18-10-2023-13-10.mp4.done", + "18-10-2023-13-15.avi.done", + "18-10-2023-13-15.gpg", + "18-10-2023-13-15.mp4.done", + "18-10-2023-13-20.avi.done", + "18-10-2023-13-20.gpg", + "18-10-2023-13-20.mp4.done", + "18-10-2023-13-25.avi.done", + "18-10-2023-13-25.gpg", + "18-10-2023-13-25.mp4.done", + "18-10-2023-13-30.avi.done", + "18-10-2023-13-30.gpg", + "18-10-2023-13-30.mp4.done", + "18-10-2023-13-35.avi.done", + "18-10-2023-13-35.gpg", + "18-10-2023-13-35.mp4.done", + "18-10-2023-13-40.avi.done", + "18-10-2023-13-40.gpg", + "18-10-2023-13-40.mp4.done", + "18-10-2023-13-45.avi.done", + "18-10-2023-13-45.gpg", + "18-10-2023-13-45.mp4.done", + "18-10-2023-13-50.avi.done", + "18-10-2023-13-50.gpg", + "18-10-2023-13-50.mp4.done", + "18-10-2023-13-55.avi.done", + "18-10-2023-13-55.gpg", + "18-10-2023-13-55.mp4.done", + "18-10-2023-14-00.avi.done", + "18-10-2023-14-00.gpg", + "18-10-2023-14-00.mp4.done", + "18-10-2023-14-05.avi.done", + "18-10-2023-14-05.gpg", + "18-10-2023-14-05.mp4.done", + "18-10-2023-14-10.avi.done", + "18-10-2023-14-10.gpg", + "18-10-2023-14-10.mp4.done", + "18-10-2023-14-15.avi.done", + "18-10-2023-14-15.gpg", + "18-10-2023-14-15.mp4.done", + "18-10-2023-14-20.avi.done", + "18-10-2023-14-20.gpg", + "18-10-2023-14-20.mp4.done", + "18-10-2023-14-25.avi.done", + "18-10-2023-14-25.gpg", + "18-10-2023-14-25.mp4.done", + "18-10-2023-14-30.avi.done", + "18-10-2023-14-30.gpg", + "18-10-2023-14-30.mp4.done", + "18-10-2023-14-35.avi.done", + "18-10-2023-14-35.gpg", + "18-10-2023-14-35.mp4.done", + "18-10-2023-14-40.avi.done", + "18-10-2023-14-40.gpg", + "18-10-2023-14-40.mp4.done", + "18-10-2023-14-41.ndx", + "18-10-2023-14-41.pcm", + "18-10-2023-14-42.ndx", + "18-10-2023-14-42.pcm", + "18-10-2023-14-43.ndx", + "18-10-2023-14-43.pcm", + "18-10-2023-14-44.ndx", + "18-10-2023-14-44.pcm", + "18-10-2023-14-45.ndx", + "18-10-2023-14-45.pcm", + "18-10-2023-14-46.ndx", + "18-10-2023-14-46.pcm", + "18-10-2023-14-47.ndx", + "18-10-2023-14-47.pcm", + "18-10-2023-14-48.ndx", + "18-10-2023-14-48.pcm", + "18-10-2023-14-49.ndx", + "18-10-2023-14-49.pcm", + "18-10-2023-14-50.ndx", + "18-10-2023-14-50.pcm", + "18-10-2023-14-51.ndx", + "18-10-2023-14-51.pcm", + "18-10-2023-14-52.ndx", + "18-10-2023-14-52.pcm", + "18-10-2023-14-53.ndx", + "18-10-2023-14-53.pcm", + "18-10-2023-14-54.ndx", + "18-10-2023-14-54.pcm", + "18-10-2023-14-55.ndx", + "18-10-2023-14-55.pcm", + "18-10-2023-14-56.ndx", + "18-10-2023-14-56.pcm", + "18-10-2023-14-57.ndx", + "18-10-2023-14-57.pcm", + "18-10-2023-14-58.ndx", + "18-10-2023-14-58.pcm", + "18-10-2023-14-59.ndx", + "18-10-2023-14-59.pcm", + "18-10-2023-15-00.ndx", + "18-10-2023-15-00.pcm", + "18-10-2023-15-01.ndx", + "18-10-2023-15-01.pcm", + "18-10-2023-15-02.ndx", + "18-10-2023-15-02.pcm", + "18-10-2023-15-03.ndx", + "18-10-2023-15-03.pcm", + "18-10-2023-15-04.ndx", + "18-10-2023-15-04.pcm", + "18-10-2023-15-05.ndx", + "18-10-2023-15-05.pcm", + "18-10-2023-15-06.ndx", + "18-10-2023-15-06.pcm", + "18-10-2023-15-07.ndx", + "18-10-2023-15-07.pcm", + "18-10-2023-15-08.ndx", + "18-10-2023-15-08.pcm", + "18-10-2023-15-09.ndx", + "18-10-2023-15-09.pcm", + "18-10-2023-15-10.ndx", + "18-10-2023-15-10.pcm", + "18-10-2023-15-11.ndx", + "18-10-2023-15-11.pcm", + "18-10-2023-15-12.ndx", + "18-10-2023-15-12.pcm", + "18-10-2023-15-13.ndx", + "18-10-2023-15-13.pcm", + "18-10-2023-15-14.ndx", + "18-10-2023-15-14.pcm", + "18-10-2023-15-15.ndx", + "18-10-2023-15-15.pcm", + "18-10-2023-15-16.ndx", + "18-10-2023-15-16.pcm", + "18-10-2023-15-17.ndx", + "18-10-2023-15-17.pcm", + "18-10-2023-15-18.ndx", + "18-10-2023-15-18.pcm", + "18-10-2023-15-19.ndx", + "18-10-2023-15-19.pcm", + "18-10-2023-15-20.ndx", + "18-10-2023-15-20.pcm", + "18-10-2023-15-21.ndx", + "18-10-2023-15-21.pcm", + "18-10-2023-15-22.ndx", + "18-10-2023-15-22.pcm", + "18-10-2023-15-23.ndx", + "18-10-2023-15-23.pcm", + "18-10-2023-15-24.ndx", + "18-10-2023-15-24.pcm", + "18-10-2023-15-25.ndx", + "18-10-2023-15-25.pcm", + "18-10-2023-15-26.ndx", + "18-10-2023-15-26.pcm", + "18-10-2023-15-27.ndx", + "18-10-2023-15-27.pcm", + "18-10-2023-15-28.ndx", + "18-10-2023-15-28.pcm", + "18-10-2023-15-29.ndx", + "18-10-2023-15-29.pcm", + "18-10-2023-15-30.ndx", + "18-10-2023-15-30.pcm", + "18-10-2023-15-31.ndx", + "18-10-2023-15-31.pcm", + "18-10-2023-15-32.ndx", + "18-10-2023-15-32.pcm", + "18-10-2023-15-33.ndx", + "18-10-2023-15-33.pcm", + "18-10-2023-15-34.ndx", + "18-10-2023-15-34.pcm", + "18-10-2023-15-35.ndx", + "18-10-2023-15-35.pcm", + "18-10-2023-15-36.ndx", + "18-10-2023-15-36.pcm", + "18-10-2023-15-37.ndx", + "18-10-2023-15-37.pcm", + "18-10-2023-15-38.ndx", + "18-10-2023-15-38.pcm", + "18-10-2023-15-39.ndx", + "18-10-2023-15-39.pcm", + "18-10-2023-15-40.ndx", + "18-10-2023-15-40.pcm", + "18-10-2023-15-41.ndx", + "18-10-2023-15-41.pcm", + "18-10-2023-15-42.ndx", + "18-10-2023-15-42.pcm", + "18-10-2023-15-43.ndx", + "18-10-2023-15-43.pcm", + "18-10-2023-15-44.ndx", + "18-10-2023-15-44.pcm", + "18-10-2023-15-45.ndx", + "18-10-2023-15-45.pcm", + "18-10-2023-15-46.ndx", + "18-10-2023-15-46.pcm", + "18-10-2023-15-47.ndx", + "18-10-2023-15-47.pcm", + "18-10-2023-15-48.ndx", + "18-10-2023-15-48.pcm", + "18-10-2023-15-49.ndx", + "18-10-2023-15-49.pcm", + "18-10-2023-15-50.ndx", + "18-10-2023-15-50.pcm", + "18-10-2023-15-51.ndx", + "18-10-2023-15-51.pcm", + "18-10-2023-15-52.ndx", + "18-10-2023-15-52.pcm", + "18-10-2023-15-53.ndx", + "18-10-2023-15-53.pcm", + "18-10-2023-15-54.ndx", + "18-10-2023-15-54.pcm", + "18-10-2023-15-55.ndx", + "18-10-2023-15-55.pcm", + "18-10-2023-15-56.ndx", + "18-10-2023-15-56.pcm", + "18-10-2023-15-57.ndx", + "18-10-2023-15-57.pcm", + "18-10-2023-15-58.ndx", + "18-10-2023-15-58.pcm", + "18-10-2023-15-59.ndx", + "18-10-2023-15-59.pcm", + "18-10-2023-16-00.ndx", + "18-10-2023-16-00.pcm", + "18-10-2023-16-01.ndx", + "18-10-2023-16-01.pcm", + "18-10-2023-16-02.ndx", + "18-10-2023-16-02.pcm", + "18-10-2023-16-03.ndx", + "18-10-2023-16-03.pcm", + "18-10-2023-16-04.ndx", + "18-10-2023-16-04.pcm", + "18-10-2023-16-05.ndx", + "18-10-2023-16-05.pcm", + "18-10-2023-16-06.ndx", + "18-10-2023-16-06.pcm", + "18-10-2023-16-07.ndx", + "18-10-2023-16-07.pcm", + "18-10-2023-16-08.ndx", + "18-10-2023-16-08.pcm", + "18-10-2023-16-09.ndx", + "18-10-2023-16-09.pcm", + "18-10-2023-16-10.ndx", + "18-10-2023-16-10.pcm", + "18-10-2023-16-11.ndx", + "18-10-2023-16-11.pcm", + "18-10-2023-16-12.ndx", + "18-10-2023-16-12.pcm", + "18-10-2023-16-13.ndx", + "18-10-2023-16-13.pcm", + "18-10-2023-16-14.ndx", + "18-10-2023-16-14.pcm", + "18-10-2023-16-15.ndx", + "18-10-2023-16-15.pcm", + "18-10-2023-16-16.ndx", + "18-10-2023-16-16.pcm", + "18-10-2023-16-17.ndx", + "18-10-2023-16-17.pcm", + "18-10-2023-16-18.ndx", + "18-10-2023-16-18.pcm", + "18-10-2023-16-19.ndx", + "18-10-2023-16-19.pcm", + "18-10-2023-16-20.ndx", + "18-10-2023-16-20.pcm", + "18-10-2023-16-21.ndx", + "18-10-2023-16-21.pcm", + "18-10-2023-16-22.ndx", + "18-10-2023-16-22.pcm", + "18-10-2023-16-23.ndx", + "18-10-2023-16-23.pcm", + "18-10-2023-16-24.ndx", + "18-10-2023-16-24.pcm", + "18-10-2023-16-25.ndx", + "18-10-2023-16-25.pcm", + "18-10-2023-16-26.ndx", + "18-10-2023-16-26.pcm", + "18-10-2023-16-27.ndx", + "18-10-2023-16-27.pcm", + "18-10-2023-16-28.ndx", + "18-10-2023-16-28.pcm", + "18-10-2023-16-29.ndx", + "18-10-2023-16-29.pcm", + "18-10-2023-16-30.ndx", + "18-10-2023-16-30.pcm", + "18-10-2023-16-31.ndx", + "18-10-2023-16-31.pcm", + "18-10-2023-16-32.ndx", + "18-10-2023-16-32.pcm", + "18-10-2023-16-33.ndx", + "18-10-2023-16-33.pcm", + "18-10-2023-16-34.ndx", + "18-10-2023-16-34.pcm", + "18-10-2023-16-35.ndx", + "18-10-2023-16-35.pcm", + "18-10-2023-16-36.ndx", + "18-10-2023-16-36.pcm", + "18-10-2023-16-37.ndx", + "18-10-2023-16-37.pcm", + "18-10-2023-16-38.ndx", + "18-10-2023-16-38.pcm", + "18-10-2023-16-40.ndx", + "18-10-2023-16-40.pcm", + "18-10-2023-16-41.ndx", + "18-10-2023-16-41.pcm", + "18-10-2023-16-42.ndx", + "18-10-2023-16-42.pcm", + "18-10-2023-16-43.ndx", + "18-10-2023-16-43.pcm", + "18-10-2023-16-44.ndx", + "18-10-2023-16-44.pcm", + "18-10-2023-16-45.ndx", + "18-10-2023-16-45.pcm", + "18-10-2023-16-46.ndx", + "18-10-2023-16-46.pcm", + "18-10-2023-16-47.ndx", + "18-10-2023-16-47.pcm", + "18-10-2023-16-48.ndx", + "18-10-2023-16-48.pcm", + "18-10-2023-16-49.ndx", + "18-10-2023-16-49.pcm", + "18-10-2023-16-50.ndx", + "18-10-2023-16-50.pcm", + "18-10-2023-16-51.ndx", + "18-10-2023-16-51.pcm", + "18-10-2023-16-52.ndx", + "18-10-2023-16-52.pcm", + "18-10-2023-16-53.ndx", + "18-10-2023-16-53.pcm", + "18-10-2023-16-54.ndx", + "18-10-2023-16-54.pcm", + "18-10-2023-16-55.ndx", + "18-10-2023-16-55.pcm", + "18-10-2023-16-56.ndx", + "18-10-2023-16-56.pcm", + "18-10-2023-16-57.ndx", + "18-10-2023-16-57.pcm", + "18-10-2023-16-58.ndx", + "18-10-2023-16-58.pcm", + "18-10-2023-16-59.ndx", + "18-10-2023-16-59.pcm", + "18-10-2023-17-00.ndx", + "18-10-2023-17-00.pcm", + "18-10-2023-17-01.ndx", + "18-10-2023-17-01.pcm", + "18-10-2023-17-02.ndx", + "18-10-2023-17-02.pcm", + "18-10-2023-17-03.ndx", + "18-10-2023-17-03.pcm", + "18-10-2023-17-04.ndx", + "18-10-2023-17-04.pcm", + "18-10-2023-17-05.ndx", + "18-10-2023-17-05.pcm", + "18-10-2023-17-06.ndx", + "18-10-2023-17-06.pcm", + "18-10-2023-17-07.ndx", + "18-10-2023-17-07.pcm", + "18-10-2023-17-08.ndx", + "18-10-2023-17-08.pcm", + "18-10-2023-17-09.ndx", + "18-10-2023-17-09.pcm", + "18-10-2023-17-10.ndx", + "18-10-2023-17-10.pcm", + "18-10-2023-17-11.ndx", + "18-10-2023-17-11.pcm", + "18-10-2023-17-12.ndx", + "18-10-2023-17-12.pcm", + "18-10-2023-17-13.ndx", + "18-10-2023-17-13.pcm", + "18-10-2023-17-14.ndx", + "18-10-2023-17-14.pcm", + "18-10-2023-17-15.ndx", + "18-10-2023-17-15.pcm", + "18-10-2023-17-16.ndx", + "18-10-2023-17-16.pcm", + "18-10-2023-17-17.ndx", + "18-10-2023-17-17.pcm", + "18-10-2023-17-18.ndx", + "18-10-2023-17-18.pcm", + "18-10-2023-17-19.ndx", + "18-10-2023-17-19.pcm", + "18-10-2023-17-20.ndx", + "18-10-2023-17-20.pcm", + "18-10-2023-17-21.ndx", + "18-10-2023-17-21.pcm", + "18-10-2023-17-22.ndx", + "18-10-2023-17-22.pcm", + "18-10-2023-17-23.ndx", + "18-10-2023-17-23.pcm", + "18-10-2023-17-24.ndx", + "18-10-2023-17-24.pcm", + "18-10-2023-17-25.ndx", + "18-10-2023-17-25.pcm", + "18-10-2023-17-26.ndx", + "18-10-2023-17-26.pcm", + "18-10-2023-17-27.ndx", + "18-10-2023-17-27.pcm", + "18-10-2023-17-28.ndx", + "18-10-2023-17-28.pcm", + "18-10-2023-17-29.ndx", + "18-10-2023-17-29.pcm", + "18-10-2023-17-30.ndx", + "18-10-2023-17-30.pcm", + "18-10-2023-17-31.ndx", + "18-10-2023-17-31.pcm", + "18-10-2023-17-32.ndx", + "18-10-2023-17-32.pcm", + "18-10-2023-17-33.ndx", + "18-10-2023-17-33.pcm", + "18-10-2023-17-34.ndx", + "18-10-2023-17-34.pcm", + "18-10-2023-17-35.ndx", + "18-10-2023-17-35.pcm", + "18-10-2023-17-36.ndx", + "18-10-2023-17-36.pcm", + "18-10-2023-17-37.ndx", + "18-10-2023-17-37.pcm", + "18-10-2023-17-38.ndx", + "18-10-2023-17-38.pcm", + "18-10-2023-17-39.ndx", + "18-10-2023-17-39.pcm", + "18-10-2023-17-40.ndx", + "18-10-2023-17-40.pcm", + "18-10-2023-17-41.ndx", + "18-10-2023-17-41.pcm", + "18-10-2023-17-42.ndx", + "18-10-2023-17-42.pcm", + "18-10-2023-17-43.ndx", + "18-10-2023-17-43.pcm", + "18-10-2023-17-44.ndx", + "18-10-2023-17-44.pcm", + "18-10-2023-17-45.ndx", + "18-10-2023-17-45.pcm", + "18-10-2023-17-46.ndx", + "18-10-2023-17-46.pcm", + "18-10-2023-17-47.ndx", + "18-10-2023-17-47.pcm", + "18-10-2023-17-48.ndx", + "18-10-2023-17-48.pcm", + "18-10-2023-17-49.ndx", + "18-10-2023-17-49.pcm", + "18-10-2023-17-50.ndx", + "18-10-2023-17-50.pcm", + "18-10-2023-17-51.ndx", + "18-10-2023-17-51.pcm", + "18-10-2023-17-52.ndx", + "18-10-2023-17-52.pcm", + "18-10-2023-17-53.ndx", + "18-10-2023-17-53.pcm", + "18-10-2023-17-54.ndx", + "18-10-2023-17-54.pcm", + "18-10-2023-17-55.ndx", + "18-10-2023-17-55.pcm", + "18-10-2023-17-56.ndx", + "18-10-2023-17-56.pcm", + "18-10-2023-17-57.ndx", + "18-10-2023-17-57.pcm", + "18-10-2023-17-58.mjp", + "18-10-2023-17-58.ndx", + "18-10-2023-17-58.pcm", + "18-10-2023-20-01.ndx", + "18-10-2023-20-01.pcm", + "18-10-2023-20-02.ndx", + "18-10-2023-20-02.pcm", + "18-10-2023-20-03.ndx", + "18-10-2023-20-03.pcm", + "18-10-2023-20-04.ndx", + "18-10-2023-20-04.pcm", + "18-10-2023-20-05.ndx", + "18-10-2023-20-05.pcm", + "18-10-2023-20-06.ndx", + "18-10-2023-20-06.pcm", + "18-10-2023-20-07.ndx", + "18-10-2023-20-07.pcm", + "18-10-2023-20-08.ndx", + "18-10-2023-20-08.pcm", + "18-10-2023-20-09.ndx", + "18-10-2023-20-09.pcm", + "18-10-2023-20-10.ndx", + "18-10-2023-20-10.pcm", + "18-10-2023-20-11.ndx", + "18-10-2023-20-11.pcm", + "18-10-2023-20-12.ndx", + "18-10-2023-20-12.pcm", + "18-10-2023-20-13.ndx", + "18-10-2023-20-13.pcm", + "18-10-2023-20-14.ndx", + "18-10-2023-20-14.pcm", + "18-10-2023-20-15.ndx", + "18-10-2023-20-15.pcm", + "18-10-2023-20-16.ndx", + "18-10-2023-20-16.pcm", + "18-10-2023-20-17.ndx", + "18-10-2023-20-17.pcm", + "18-10-2023-20-18.ndx", + "18-10-2023-20-18.pcm", + "18-10-2023-20-19.ndx", + "18-10-2023-20-19.pcm", + "18-10-2023-20-20.ndx", + "18-10-2023-20-20.pcm", + "18-10-2023-20-21.ndx", + "18-10-2023-20-21.pcm", + "18-10-2023-20-22.ndx", + "18-10-2023-20-22.pcm", + "18-10-2023-20-23.ndx", + "18-10-2023-20-23.pcm", + "18-10-2023-20-24.ndx", + "18-10-2023-20-24.pcm", + "18-10-2023-20-25.ndx", + "18-10-2023-20-25.pcm", + "18-10-2023-20-26.ndx", + "18-10-2023-20-26.pcm", + "18-10-2023-20-27.ndx", + "18-10-2023-20-27.pcm", + "18-10-2023-20-28.ndx", + "18-10-2023-20-28.pcm", + "18-10-2023-20-29.ndx", + "18-10-2023-20-29.pcm", + "18-10-2023-20-30.ndx", + "18-10-2023-20-30.pcm", + "18-10-2023-20-31.ndx", + "18-10-2023-20-31.pcm", + "18-10-2023-20-32.ndx", + "18-10-2023-20-32.pcm", + "18-10-2023-20-33.ndx", + "18-10-2023-20-33.pcm", + "18-10-2023-20-34.ndx", + "18-10-2023-20-34.pcm", + "18-10-2023-20-35.ndx", + "18-10-2023-20-35.pcm", + "18-10-2023-20-36.ndx", + "18-10-2023-20-36.pcm", + "18-10-2023-20-37.ndx", + "18-10-2023-20-37.pcm", + "18-10-2023-20-38.ndx", + "18-10-2023-20-38.pcm", + "18-10-2023-20-39.ndx", + "18-10-2023-20-39.pcm", + "18-10-2023-20-40.ndx", + "18-10-2023-20-40.pcm", + "18-10-2023-20-41.ndx", + "18-10-2023-20-41.pcm", + "18-10-2023-20-42.ndx", + "18-10-2023-20-42.pcm", + "18-10-2023-20-43.ndx", + "18-10-2023-20-43.pcm", + "18-10-2023-20-44.ndx", + "18-10-2023-20-44.pcm", + "18-10-2023-20-45.ndx", + "18-10-2023-20-45.pcm", + "18-10-2023-20-46.ndx", + "18-10-2023-20-46.pcm", + "18-10-2023-20-47.ndx", + "18-10-2023-20-47.pcm", + "18-10-2023-20-48.ndx", + "18-10-2023-20-48.pcm", + "18-10-2023-20-49.ndx", + "18-10-2023-20-49.pcm", + "18-10-2023-20-50.ndx", + "18-10-2023-20-50.pcm", + "18-10-2023-20-51.ndx", + "18-10-2023-20-51.pcm", + "18-10-2023-20-52.ndx", + "18-10-2023-20-52.pcm", + "18-10-2023-20-53.ndx", + "18-10-2023-20-53.pcm", + "18-10-2023-20-54.ndx", + "18-10-2023-20-54.pcm", + "18-10-2023-20-55.ndx", + "18-10-2023-20-55.pcm", + "18-10-2023-20-56.ndx", + "18-10-2023-20-56.pcm", + "18-10-2023-20-57.ndx", + "18-10-2023-20-57.pcm", + "18-10-2023-20-58.ndx", + "18-10-2023-20-58.pcm", + "18-10-2023-20-59.ndx", + "18-10-2023-20-59.pcm", + "18-10-2023-21-00.ndx", + "18-10-2023-21-00.pcm", + "18-10-2023-21-01.ndx", + "18-10-2023-21-01.pcm", + "18-10-2023-21-02.ndx", + "18-10-2023-21-02.pcm", + "18-10-2023-21-03.ndx", + "18-10-2023-21-03.pcm", + "18-10-2023-21-04.ndx", + "18-10-2023-21-04.pcm", + "18-10-2023-21-05.ndx", + "18-10-2023-21-05.pcm", + "18-10-2023-21-06.ndx", + "18-10-2023-21-06.pcm", + "18-10-2023-21-07.ndx", + "18-10-2023-21-07.pcm", + "18-10-2023-21-08.ndx", + "18-10-2023-21-08.pcm", + "18-10-2023-21-09.ndx", + "18-10-2023-21-09.pcm", + "18-10-2023-21-10.ndx", + "18-10-2023-21-10.pcm", + "18-10-2023-21-11.ndx", + "18-10-2023-21-11.pcm", + "18-10-2023-21-12.ndx", + "18-10-2023-21-12.pcm", + "18-10-2023-21-13.ndx", + "18-10-2023-21-13.pcm", + "18-10-2023-21-14.ndx", + "18-10-2023-21-14.pcm", + "18-10-2023-21-15.ndx", + "18-10-2023-21-15.pcm", + "18-10-2023-21-16.ndx", + "18-10-2023-21-16.pcm", + "18-10-2023-21-17.ndx", + "18-10-2023-21-17.pcm", + "18-10-2023-21-18.ndx", + "18-10-2023-21-18.pcm", + "18-10-2023-21-19.ndx", + "18-10-2023-21-19.pcm", + "18-10-2023-21-20.ndx", + "18-10-2023-21-20.pcm", + "18-10-2023-21-21.ndx", + "18-10-2023-21-21.pcm", + "18-10-2023-21-22.ndx", + "18-10-2023-21-22.pcm", + "18-10-2023-21-23.ndx", + "18-10-2023-21-23.pcm", + "18-10-2023-21-24.ndx", + "18-10-2023-21-24.pcm", + "18-10-2023-21-25.ndx", + "18-10-2023-21-25.pcm", + "18-10-2023-21-26.ndx", + "18-10-2023-21-26.pcm", + "18-10-2023-21-27.ndx", + "18-10-2023-21-27.pcm", + "18-10-2023-21-28.ndx", + "18-10-2023-21-28.pcm", + "18-10-2023-21-29.ndx", + "18-10-2023-21-29.pcm", + "18-10-2023-21-30.ndx", + "18-10-2023-21-30.pcm", + "18-10-2023-21-31.ndx", + "18-10-2023-21-31.pcm", + "18-10-2023-21-32.ndx", + "18-10-2023-21-32.pcm", + "18-10-2023-21-33.ndx", + "18-10-2023-21-33.pcm", + "18-10-2023-21-34.ndx", + "18-10-2023-21-34.pcm", + "18-10-2023-21-35.ndx", + "18-10-2023-21-35.pcm", + "18-10-2023-21-36.ndx", + "18-10-2023-21-36.pcm", + "18-10-2023-21-37.ndx", + "18-10-2023-21-37.pcm", + "18-10-2023-21-38.ndx", + "18-10-2023-21-38.pcm", + "18-10-2023-21-39.ndx", + "18-10-2023-21-39.pcm", + "18-10-2023-21-40.ndx", + "18-10-2023-21-40.pcm", + "18-10-2023-21-41.ndx", + "18-10-2023-21-41.pcm", + "18-10-2023-21-42.ndx", + "18-10-2023-21-42.pcm", + "18-10-2023-21-43.ndx", + "18-10-2023-21-43.pcm", + "18-10-2023-21-44.mjp", + "18-10-2023-21-44.ndx", + "18-10-2023-21-44.pcm", ] -from datetime import datetime, timezone -for s in a: - i = s.split('.')[0] - dt = datetime.strptime(i, '%d-%m-%Y-%H-%M') - # print(dt) -a.sort(key=lambda x: datetime.strptime(x.split('.')[0], '%d-%m-%Y-%H-%M')) +for s in file_paths: + i = s.split(".")[0] + dt = datetime.strptime(i, "%d-%m-%Y-%H-%M") -for i in a: - print(i) +file_paths.sort(key=lambda x: datetime.strptime(x.split(".")[0], "%d-%m-%Y-%H-%M")) +for i in file_paths: + print(i) diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..aca0bd3 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1325 @@ +version = 1 +requires-python = ">=3.12" + +[[package]] +name = "alembic" +version = "1.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/a2/840c3b84382dce8624bc2f0ee67567fc74c32478d0c5a5aea981518c91c3/alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2", size = 1921223 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/12/58f4f11385fddafef5d6f7bfaaf2f42899c8da6b4f95c04b7c3b744851a8/alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e", size = 233217 }, +] + +[[package]] +name = "appnope" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 }, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/1d/f03bcb60c4a3212e15f99a56085d93093a497718adf828d050b9d675da81/asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0", size = 62284 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/86/4736ac618d82a20d87d2f92ae19441ebc7ac9e7a581d7e58bbe79233b24a/asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", size = 27764 }, +] + +[[package]] +name = "blinker" +version = "1.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/57/a6a1721eff09598fb01f3c7cda070c1b6a0f12d63c83236edf79a440abcc/blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83", size = 23161 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01", size = 9456 }, +] + +[[package]] +name = "boto3" +version = "1.35.46" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/ca/37365724ca403689722cb4eb2771f41102d9aa7a191ddaf530ecb8412ff4/boto3-1.35.46.tar.gz", hash = "sha256:c9bab807b372d5b076d6aeb1d6513131fa0b74e32d8895128f8568b6521296ea", size = 110992 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/e4/8e8b3c706e6af031142d9a80379a13688c73d1953f458bc34fe185d34649/boto3-1.35.46-py3-none-any.whl", hash = "sha256:a839ce09a844d92e0039f95851e88da9df80c89ebb4c7818b3e78247fd97a8a7", size = 139158 }, +] + +[[package]] +name = "botocore" +version = "1.35.46" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/6a/79967e8ad76bb2095fd1033492e4c42bc7e04b89e88dfa9a811c936757aa/botocore-1.35.46.tar.gz", hash = "sha256:8c0ff5fdd611a28f5752189d171c69690dbc484fa06d74376890bb0543ec3dc1", size = 12846505 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/03/6935c80460ec7b9ede45cb0cda79610fb6834f26d9cfc74939a46d677b22/botocore-1.35.46-py3-none-any.whl", hash = "sha256:8bbc9a55cae65a8db7f2e33ff087f4dbfc13fce868e8e3c5273ce9af367a555a", size = 12633414 }, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, + { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, + { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, + { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, + { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, + { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, + { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, + { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, + { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, + { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, + { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, + { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, + { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, + { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, + { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "comm" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180 }, +] + +[[package]] +name = "coverage" +version = "7.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/12/3669b6382792783e92046730ad3327f53b2726f0603f4c311c4da4824222/coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", size = 798716 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/e7/9291de916d084f41adddfd4b82246e68d61d6a75747f075f7e64628998d2/coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", size = 207013 }, + { url = "https://files.pythonhosted.org/packages/27/03/932c2c5717a7fa80cd43c6a07d3177076d97b79f12f40f882f9916db0063/coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", size = 207251 }, + { url = "https://files.pythonhosted.org/packages/d5/3f/0af47dcb9327f65a45455fbca846fe96eb57c153af46c4754a3ba678938a/coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", size = 240268 }, + { url = "https://files.pythonhosted.org/packages/8a/3c/37a9d81bbd4b23bc7d46ca820e16174c613579c66342faa390a271d2e18b/coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", size = 237298 }, + { url = "https://files.pythonhosted.org/packages/c0/70/6b0627e5bd68204ee580126ed3513140b2298995c1233bd67404b4e44d0e/coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", size = 239367 }, + { url = "https://files.pythonhosted.org/packages/3c/eb/634d7dfab24ac3b790bebaf9da0f4a5352cbc125ce6a9d5c6cf4c6cae3c7/coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", size = 238853 }, + { url = "https://files.pythonhosted.org/packages/d9/0d/8e3ed00f1266ef7472a4e33458f42e39492e01a64281084fb3043553d3f1/coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", size = 237160 }, + { url = "https://files.pythonhosted.org/packages/ce/9c/4337f468ef0ab7a2e0887a9c9da0e58e2eada6fc6cbee637a4acd5dfd8a9/coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", size = 238824 }, + { url = "https://files.pythonhosted.org/packages/5e/09/3e94912b8dd37251377bb02727a33a67ee96b84bbbe092f132b401ca5dd9/coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", size = 209639 }, + { url = "https://files.pythonhosted.org/packages/01/69/d4f3a4101171f32bc5b3caec8ff94c2c60f700107a6aaef7244b2c166793/coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", size = 210428 }, + { url = "https://files.pythonhosted.org/packages/c2/4d/2dede4f7cb5a70fb0bb40a57627fddf1dbdc6b9c1db81f7c4dcdcb19e2f4/coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", size = 207039 }, + { url = "https://files.pythonhosted.org/packages/3f/f9/d86368ae8c79e28f1fb458ebc76ae9ff3e8bd8069adc24e8f2fed03c58b7/coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", size = 207298 }, + { url = "https://files.pythonhosted.org/packages/64/c5/b4cc3c3f64622c58fbfd4d8b9a7a8ce9d355f172f91fcabbba1f026852f6/coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", size = 239813 }, + { url = "https://files.pythonhosted.org/packages/8a/86/14c42e60b70a79b26099e4d289ccdfefbc68624d096f4481163085aa614c/coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", size = 236959 }, + { url = "https://files.pythonhosted.org/packages/7f/f8/4436a643631a2fbab4b44d54f515028f6099bfb1cd95b13cfbf701e7f2f2/coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", size = 238950 }, + { url = "https://files.pythonhosted.org/packages/49/50/1571810ddd01f99a0a8be464a4ac8b147f322cd1e8e296a1528984fc560b/coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", size = 238610 }, + { url = "https://files.pythonhosted.org/packages/f3/8c/6312d241fe7cbd1f0cade34a62fea6f333d1a261255d76b9a87074d8703c/coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", size = 236697 }, + { url = "https://files.pythonhosted.org/packages/ce/5f/fef33dfd05d87ee9030f614c857deb6df6556b8f6a1c51bbbb41e24ee5ac/coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", size = 238541 }, + { url = "https://files.pythonhosted.org/packages/a9/64/6a984b6e92e1ea1353b7ffa08e27f707a5e29b044622445859200f541e8c/coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", size = 209707 }, + { url = "https://files.pythonhosted.org/packages/5c/60/ce5a9e942e9543783b3db5d942e0578b391c25cdd5e7f342d854ea83d6b7/coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", size = 210439 }, + { url = "https://files.pythonhosted.org/packages/78/53/6719677e92c308207e7f10561a1b16ab8b5c00e9328efc9af7cfd6fb703e/coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", size = 207784 }, + { url = "https://files.pythonhosted.org/packages/fa/dd/7054928930671fcb39ae6a83bb71d9ab5f0afb733172543ced4b09a115ca/coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", size = 208058 }, + { url = "https://files.pythonhosted.org/packages/b5/7d/fd656ddc2b38301927b9eb3aae3fe827e7aa82e691923ed43721fd9423c9/coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", size = 250772 }, + { url = "https://files.pythonhosted.org/packages/90/d0/eb9a3cc2100b83064bb086f18aedde3afffd7de6ead28f69736c00b7f302/coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", size = 246490 }, + { url = "https://files.pythonhosted.org/packages/45/44/3f64f38f6faab8a0cfd2c6bc6eb4c6daead246b97cf5f8fc23bf3788f841/coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", size = 248848 }, + { url = "https://files.pythonhosted.org/packages/5d/11/4c465a5f98656821e499f4b4619929bd5a34639c466021740ecdca42aa30/coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", size = 248340 }, + { url = "https://files.pythonhosted.org/packages/f1/96/ebecda2d016cce9da812f404f720ca5df83c6b29f65dc80d2000d0078741/coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", size = 246229 }, + { url = "https://files.pythonhosted.org/packages/16/d9/3d820c00066ae55d69e6d0eae11d6149a5ca7546de469ba9d597f01bf2d7/coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", size = 247510 }, + { url = "https://files.pythonhosted.org/packages/8f/c3/4fa1eb412bb288ff6bfcc163c11700ff06e02c5fad8513817186e460ed43/coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", size = 210353 }, + { url = "https://files.pythonhosted.org/packages/7e/77/03fc2979d1538884d921c2013075917fc927f41cd8526909852fe4494112/coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", size = 211502 }, +] + +[[package]] +name = "debugpy" +version = "1.8.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/00/5a8b5dc8f52617c5e41845e26290ebea1ba06377cc08155b6d245c27b386/debugpy-1.8.7.zip", hash = "sha256:18b8f731ed3e2e1df8e9cdaa23fb1fc9c24e570cd0081625308ec51c82efe42e", size = 4957835 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/4b/9f52ca1a799601a10cd2673503658bd8c8ecc4a7a43302ee29cf062474ec/debugpy-1.8.7-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:4d27d842311353ede0ad572600c62e4bcd74f458ee01ab0dd3a1a4457e7e3706", size = 2529803 }, + { url = "https://files.pythonhosted.org/packages/80/79/8bba39190d2ea17840925d287f1c6c3a7c60b58f5090444e9ecf176c540f/debugpy-1.8.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c1fd62ae0356e194f3e7b7a92acd931f71fe81c4b3be2c17a7b8a4b546ec2", size = 4170911 }, + { url = "https://files.pythonhosted.org/packages/3b/19/5b3d312936db8eb281310fa27903459328ed722d845d594ba5feaeb2f0b3/debugpy-1.8.7-cp312-cp312-win32.whl", hash = "sha256:2f729228430ef191c1e4df72a75ac94e9bf77413ce5f3f900018712c9da0aaca", size = 5195476 }, + { url = "https://files.pythonhosted.org/packages/9f/49/ad20b29f8c921fd5124530d3d39b8f2077efd51b71339a2eff02bba693e9/debugpy-1.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:45c30aaefb3e1975e8a0258f5bbd26cd40cde9bfe71e9e5a7ac82e79bad64e39", size = 5235031 }, + { url = "https://files.pythonhosted.org/packages/41/95/29b247518d0a6afdb5249f5d05743c9c5bfaf4bd13a85b81cb5e1dc65837/debugpy-1.8.7-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:d050a1ec7e925f514f0f6594a1e522580317da31fbda1af71d1530d6ea1f2b40", size = 2517557 }, + { url = "https://files.pythonhosted.org/packages/4d/93/026e2000a0740e2f54b198f8dc317accf3a70b6524b2b15fa8e6eca74414/debugpy-1.8.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f4349a28e3228a42958f8ddaa6333d6f8282d5edaea456070e48609c5983b7", size = 4162703 }, + { url = "https://files.pythonhosted.org/packages/c3/92/a48e653b19a171434290ecdc5935b7a292a65488139c5271d6d0eceeb0f1/debugpy-1.8.7-cp313-cp313-win32.whl", hash = "sha256:11ad72eb9ddb436afb8337891a986302e14944f0f755fd94e90d0d71e9100bba", size = 5195220 }, + { url = "https://files.pythonhosted.org/packages/4e/b3/dc3c5527edafcd1a6d0f8c4ecc6c5c9bc431f77340cf4193328e98f0ac38/debugpy-1.8.7-cp313-cp313-win_amd64.whl", hash = "sha256:2efb84d6789352d7950b03d7f866e6d180284bc02c7e12cb37b489b7083d81aa", size = 5235333 }, + { url = "https://files.pythonhosted.org/packages/51/b1/a0866521c71a6ae3d3ca320e74835163a4671b1367ba360a55a0a51e5a91/debugpy-1.8.7-py2.py3-none-any.whl", hash = "sha256:57b00de1c8d2c84a61b90880f7e5b6deaf4c312ecbde3a0e8912f2a56c4ac9ae", size = 5210683 }, +] + +[[package]] +name = "decorator" +version = "5.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, +] + +[[package]] +name = "detect-secrets" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/67/382a863fff94eae5a0cf05542179169a1c49a4c8784a9480621e2066ca7d/detect_secrets-1.5.0.tar.gz", hash = "sha256:6bb46dcc553c10df51475641bb30fd69d25645cc12339e46c824c1e0c388898a", size = 97351 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/5e/4f5fe4b89fde1dc3ed0eb51bd4ce4c0bca406246673d370ea2ad0c58d747/detect_secrets-1.5.0-py3-none-any.whl", hash = "sha256:e24e7b9b5a35048c313e983f76c4bd09dad89f045ff059e354f9943bf45aa060", size = 120341 }, +] + +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, +] + +[[package]] +name = "docstr-coverage" +version = "2.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "pyyaml" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/75/86/d3f02e5baf426eac0f039849272204649137449f050d3fe0eb104c6f399c/docstr-coverage-2.3.2.tar.gz", hash = "sha256:e99a28c502ed21ae8a310cb9e14e8de2d7cff44d365b46fa6dca6de05bf156a0", size = 26750 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/f7/7923d915a80aa2e04939260f7a000e1a353b25c7ffcf7771cd777559c27a/docstr_coverage-2.3.2-py3-none-any.whl", hash = "sha256:37a885d6560ad87e289b23bf0e54527885bacbf6b17cf55278d514dd0fef8ff5", size = 25816 }, +] + +[[package]] +name = "executing" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "flask" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/e1/d104c83026f8d35dfd2c261df7d64738341067526406b40190bc063e829a/flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842", size = 676315 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3", size = 101735 }, +] + +[[package]] +name = "flask-admin" +version = "1.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flask" }, + { name = "wtforms" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/4d/7cad383a93e3e1dd9378f1fcf05ddc532c6d921fb30c19ce8f8583630f24/Flask-Admin-1.6.1.tar.gz", hash = "sha256:24cae2af832b6a611a01d7dc35f42d266c1d6c75a426b869d8cb241b78233369", size = 6651224 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/b3/656c78dfef163517dbbc9fd106f0604e37b436ad51f9d9450b60e9407e35/Flask_Admin-1.6.1-py3-none-any.whl", hash = "sha256:fd8190f1ec3355913a22739c46ed3623f1d82b8112cde324c60a6fc9b21c9406", size = 7498141 }, +] + +[[package]] +name = "flask-sqlalchemy" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flask" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/53/b0a9fcc1b1297f51e68b69ed3b7c3c40d8c45be1391d77ae198712914392/flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312", size = 81899 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/6a/89963a5c6ecf166e8be29e0d1bf6806051ee8fe6c82e232842e3aeac9204/flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0", size = 25125 }, +] + +[[package]] +name = "geographiclib" +version = "2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/cd/90271fd195d79a9c2af0ca21632b297a6cc3e852e0413a2e4519e67be213/geographiclib-2.0.tar.gz", hash = "sha256:f7f41c85dc3e1c2d3d935ec86660dc3b2c848c83e17f9a9e51ba9d5146a15859", size = 36720 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/5a/a26132406f1f40cf51ea349a5f11b0a46cec02a2031ff82e391c2537247a/geographiclib-2.0-py3-none-any.whl", hash = "sha256:6b7225248e45ff7edcee32becc4e0a1504c606ac5ee163a5656d482e0cd38734", size = 40324 }, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, + { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, + { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, + { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, + { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, + { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, + { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, + { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, + { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, + { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, + { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, + { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, + { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, + { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, + { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, + { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, + { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, + { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, + { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, + { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, + { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, + { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, + { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, + { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, +] + +[[package]] +name = "identify" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "ipykernel" +version = "6.29.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appnope", marker = "platform_system == 'Darwin'" }, + { name = "comm" }, + { name = "debugpy" }, + { name = "ipython" }, + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "matplotlib-inline" }, + { name = "nest-asyncio" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173 }, +] + +[[package]] +name = "ipython" +version = "8.28.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/21/48db7d9dd622b9692575004c7c98f85f5629428f58596c59606d36c51b58/ipython-8.28.0.tar.gz", hash = "sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a", size = 5495762 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3a/5d8680279ada9571de8469220069d27024ee47624af534e537c9ff49a450/ipython-8.28.0-py3-none-any.whl", hash = "sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35", size = 819456 }, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, +] + +[[package]] +name = "jedi" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/99/99b493cec4bf43176b678de30f81ed003fd6a647a301b9c927280c600f0a/jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd", size = 1227821 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/9f/bc63f0f0737ad7a60800bfd472a4836661adae21f9c2535f3957b1e54ceb/jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0", size = 1569361 }, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, +] + +[[package]] +name = "jupyter-client" +version = "8.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-core" }, + { name = "python-dateutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105 }, +] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "platformdirs" }, + { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/11/b56381fa6c3f4cc5d2cf54a7dbf98ad9aa0b339ef7a601d6053538b079a7/jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9", size = 87629 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965 }, +] + +[[package]] +name = "mako" +version = "1.3.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fa/0b/29bc5a230948bf209d3ed3165006d257e547c02c3c2a96f6286320dfe8dc/mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d", size = 390206 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/22/bc14c6f02e6dccaafb3eba95764c8f096714260c2aa5f76f654fd16a23dd/Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a", size = 78557 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, +] + +[[package]] +name = "nmeasim" +version = "1.1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "geographiclib" }, + { name = "pyserial" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/3d/f041bfc5ac6a934fe83544706130927f5a894841dbfbc3f4c29d9d081b0b/nmeasim-1.1.1.0.tar.gz", hash = "sha256:ff6f74076d47032bbc6d97ad82d607412d2cf383271eb3aab50a6664bc98ea6d", size = 192132 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/a0/a68605e5f468a7e55e64806774912e20569ed35193862fb07507f741d09d/nmeasim-1.1.1.0-py3-none-any.whl", hash = "sha256:4a2e0b400ae750be27cd3e826d88e06d9f9e9bd341ac9ac11ca62af1a450d6e0", size = 188983 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "numpy" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/d1/8a730ea07f4a37d94f9172f4ce1d81064b7a64766b460378be278952de75/numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c", size = 18878063 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/7d/554a6838f37f3ada5a55f25173c619d556ae98092a6e01afb6e710501d70/numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b", size = 20848077 }, + { url = "https://files.pythonhosted.org/packages/b0/29/cb48a402ea879e645b16218718f3f7d9588a77d674a9dcf22e4c43487636/numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db", size = 13493242 }, + { url = "https://files.pythonhosted.org/packages/56/44/f899b0581766c230da42f751b7b8896d096640b19b312164c267e48d36cb/numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1", size = 5089219 }, + { url = "https://files.pythonhosted.org/packages/79/8f/b987070d45161a7a4504afc67ed38544ed2c0ed5576263599a0402204a9c/numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426", size = 6620167 }, + { url = "https://files.pythonhosted.org/packages/c4/a7/af3329fda3c3ec31d9b650e42bbcd3422fc62a765cbb1405fde4177a0996/numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0", size = 13604905 }, + { url = "https://files.pythonhosted.org/packages/9b/b4/e3c7e6fab0f77fff6194afa173d1f2342073d91b1d3b4b30b17c3fb4407a/numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df", size = 16041825 }, + { url = "https://files.pythonhosted.org/packages/e9/50/6828e66a78aa03147c111f84d55f33ce2dde547cb578d6744a3b06a0124b/numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366", size = 16409541 }, + { url = "https://files.pythonhosted.org/packages/bf/72/66af7916d9c3c6dbfbc8acdd4930c65461e1953374a2bc43d00f948f004a/numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142", size = 14081134 }, + { url = "https://files.pythonhosted.org/packages/dc/5a/59a67d84f33fe00ae74f0b5b69dd4f93a586a4aba7f7e19b54b2133db038/numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550", size = 6237784 }, + { url = "https://files.pythonhosted.org/packages/4c/79/73735a6a5dad6059c085f240a4e74c9270feccd2bc66e4d31b5ca01d329c/numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e", size = 12568254 }, + { url = "https://files.pythonhosted.org/packages/16/72/716fa1dbe92395a9a623d5049203ff8ddb0cfce65b9df9117c3696ccc011/numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d", size = 20834690 }, + { url = "https://files.pythonhosted.org/packages/1e/fb/3e85a39511586053b5c6a59a643879e376fae22230ebfef9cfabb0e032e2/numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf", size = 13507474 }, + { url = "https://files.pythonhosted.org/packages/35/eb/5677556d9ba13436dab51e129f98d4829d95cd1b6bd0e199c14485a4bdb9/numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e", size = 5074742 }, + { url = "https://files.pythonhosted.org/packages/3e/c5/6c5ef5ba41b65a7e51bed50dbf3e1483eb578055633dd013e811a28e96a1/numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3", size = 6606787 }, + { url = "https://files.pythonhosted.org/packages/08/ac/f2f29dd4fd325b379c7dc932a0ebab22f0e031dbe80b2f6019b291a3a544/numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8", size = 13601333 }, + { url = "https://files.pythonhosted.org/packages/44/26/63f5f4e5089654dfb858f4892215ed968cd1a68e6f4a83f9961f84f855cb/numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a", size = 16038090 }, + { url = "https://files.pythonhosted.org/packages/1d/21/015e0594de9c3a8d5edd24943d2bd23f102ec71aec026083f822f86497e2/numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98", size = 16410865 }, + { url = "https://files.pythonhosted.org/packages/df/01/c1bcf9e6025d79077fbf3f3ee503b50aa7bfabfcd8f4b54f5829f4c00f3f/numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe", size = 14078077 }, + { url = "https://files.pythonhosted.org/packages/ba/06/db9d127d63bd11591770ba9f3d960f8041e0f895184b9351d4b1b5b56983/numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a", size = 6234904 }, + { url = "https://files.pythonhosted.org/packages/a9/96/9f61f8f95b6e0ea0aa08633b704c75d1882bdcb331bdf8bfd63263b25b00/numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445", size = 12561910 }, + { url = "https://files.pythonhosted.org/packages/36/b8/033f627821784a48e8f75c218033471eebbaacdd933f8979c79637a1b44b/numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5", size = 20857719 }, + { url = "https://files.pythonhosted.org/packages/96/46/af5726fde5b74ed83f2f17a73386d399319b7ed4d51279fb23b721d0816d/numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0", size = 13518826 }, + { url = "https://files.pythonhosted.org/packages/db/6e/8ce677edf36da1c4dae80afe5529f47690697eb55b4864673af260ccea7b/numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17", size = 5115036 }, + { url = "https://files.pythonhosted.org/packages/6a/ba/3cce44fb1b8438042c11847048812a776f75ee0e7070179c22e4cfbf420c/numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6", size = 6628641 }, + { url = "https://files.pythonhosted.org/packages/59/c8/e722998720ccbd35ffbcf1d1b8ed0aa2304af88d3f1c38e06ebf983599b3/numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8", size = 13574803 }, + { url = "https://files.pythonhosted.org/packages/7c/8e/fc1fdd83a55476765329ac2913321c4aed5b082a7915095628c4ca30ea72/numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35", size = 16021174 }, + { url = "https://files.pythonhosted.org/packages/2a/b6/a790742aa88067adb4bd6c89a946778c1417d4deaeafce3ca928f26d4c52/numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62", size = 16400117 }, + { url = "https://files.pythonhosted.org/packages/48/6f/129e3c17e3befe7fefdeaa6890f4c4df3f3cf0831aa053802c3862da67aa/numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a", size = 14066202 }, +] + +[[package]] +name = "packaging" +version = "24.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, +] + +[[package]] +name = "parso" +version = "0.8.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "polars" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/dc/0bf6d62da6cf6fa5712e573334766484109c411a5e07bf779bdd3f2f07d2/polars-1.10.0.tar.gz", hash = "sha256:855b0fffbe4fbb1c89b4f9b4b6cc724b337f946a9ba50829eb22b8a36483b3c3", size = 4059116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/4c/f4f626732ae966795c34b30345d7fbee4097fc9d41fb09bb3ba2beaffbdf/polars-1.10.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6334aee8897a56291ab3f31b436eff3a284abef39bd333258f6071c77e45b72f", size = 32024599 }, + { url = "https://files.pythonhosted.org/packages/29/bc/5edaae9e0c6c826225dc428f1b714a01e127a8373e493aa7d85d2b182521/polars-1.10.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1ce12aed5440a531e449d7deb9e9b80851e919502853d6d0a884124ea0a0377e", size = 28220138 }, + { url = "https://files.pythonhosted.org/packages/28/da/79b54d20c59303864add504a01f927d23415579bdf0a054942bd00da69e3/polars-1.10.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:540051fe456f779b1510c173c3a614135193cf1b94812e11663f65c80dc4626d", size = 33205544 }, + { url = "https://files.pythonhosted.org/packages/fb/bf/03358a080fbd51262f54381c011183a281947e0d4e3c1049baad8204dedc/polars-1.10.0-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d00a5978137c8471c47ea162acfaf6769a21b72ab5515891f5f7aa038c3c5574", size = 29810880 }, + { url = "https://files.pythonhosted.org/packages/ed/e2/134a4c381f63e8498314f15d5f8db32bdd9ee40806aba34c3e270915a629/polars-1.10.0-cp39-abi3-win_amd64.whl", hash = "sha256:182e03bd3486490c980a59cbae0be53c0688f6f6f6a2bccc28e07cc1b7f8a4b5", size = 32868791 }, +] + +[[package]] +name = "pre-commit" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.48" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 }, +] + +[[package]] +name = "psutil" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, + { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, + { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, + { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, + { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, + { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 }, + { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699 }, + { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245 }, + { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631 }, + { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140 }, + { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762 }, + { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967 }, + { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326 }, + { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712 }, + { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155 }, + { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356 }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pynmeagps" +version = "1.0.43" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/15/1cf68a448cd37e6a8b256b6fa343b6becd22411326a6f0a5193cd31ad5b8/pynmeagps-1.0.43.tar.gz", hash = "sha256:70f61ba43c75d2fc6f81f0acc7bb52be3c1c4a1461cc50565118e4448e4af364", size = 62972 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/3b/bc39d7838666a9525fa5dafef54f3cc8c1251bd9fd95f071dc415a2d6453/pynmeagps-1.0.43-py3-none-any.whl", hash = "sha256:4977b62c1420cf74c9900cef21769f097a591bf188df66f47b87f93595cf510d", size = 44520 }, +] + +[[package]] +name = "pyserial" +version = "3.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/7d/ae3f0a63f41e4d2f6cb66a5b57197850f919f59e558159a4dd3a818f5082/pyserial-3.5.tar.gz", hash = "sha256:3c77e014170dfffbd816e6ffc205e9842efb10be9f58ec16d3e8675b4925cddb", size = 159125 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/bc/587a445451b253b285629263eb51c2d8e9bcea4fc97826266d186f96f558/pyserial-3.5-py2.py3-none-any.whl", hash = "sha256:c4451db6ba391ca6ca299fb3ec7bae67a5c55dde170964c7a14ceefec02f2cf0", size = 90585 }, +] + +[[package]] +name = "pytest" +version = "8.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "pywin32" +version = "308" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/7c/d00d6bdd96de4344e06c4afbf218bc86b54436a94c01c71a8701f613aa56/pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897", size = 5939729 }, + { url = "https://files.pythonhosted.org/packages/21/27/0c8811fbc3ca188f93b5354e7c286eb91f80a53afa4e11007ef661afa746/pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47", size = 6543015 }, + { url = "https://files.pythonhosted.org/packages/9d/0f/d40f8373608caed2255781a3ad9a51d03a594a1248cd632d6a298daca693/pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091", size = 7976033 }, + { url = "https://files.pythonhosted.org/packages/a9/a4/aa562d8935e3df5e49c161b427a3a2efad2ed4e9cf81c3de636f1fdddfd0/pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed", size = 5938579 }, + { url = "https://files.pythonhosted.org/packages/c7/50/b0efb8bb66210da67a53ab95fd7a98826a97ee21f1d22949863e6d588b22/pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4", size = 6542056 }, + { url = "https://files.pythonhosted.org/packages/26/df/2b63e3e4f2df0224f8aaf6d131f54fe4e8c96400eb9df563e2aae2e1a1f9/pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd", size = 7974986 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "pyzmq" +version = "26.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/05/bed626b9f7bb2322cdbbf7b4bd8f54b1b617b0d2ab2d3547d6e39428a48e/pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f", size = 271975 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/2f/78a766c8913ad62b28581777ac4ede50c6d9f249d39c2963e279524a1bbe/pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9", size = 1343105 }, + { url = "https://files.pythonhosted.org/packages/b7/9c/4b1e2d3d4065be715e007fe063ec7885978fad285f87eae1436e6c3201f4/pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52", size = 1008365 }, + { url = "https://files.pythonhosted.org/packages/4f/ef/5a23ec689ff36d7625b38d121ef15abfc3631a9aecb417baf7a4245e4124/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08", size = 665923 }, + { url = "https://files.pythonhosted.org/packages/ae/61/d436461a47437d63c6302c90724cf0981883ec57ceb6073873f32172d676/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5", size = 903400 }, + { url = "https://files.pythonhosted.org/packages/47/42/fc6d35ecefe1739a819afaf6f8e686f7f02a4dd241c78972d316f403474c/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae", size = 860034 }, + { url = "https://files.pythonhosted.org/packages/07/3b/44ea6266a6761e9eefaa37d98fabefa112328808ac41aa87b4bbb668af30/pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711", size = 860579 }, + { url = "https://files.pythonhosted.org/packages/38/6f/4df2014ab553a6052b0e551b37da55166991510f9e1002c89cab7ce3b3f2/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6", size = 1196246 }, + { url = "https://files.pythonhosted.org/packages/38/9d/ee240fc0c9fe9817f0c9127a43238a3e28048795483c403cc10720ddef22/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3", size = 1507441 }, + { url = "https://files.pythonhosted.org/packages/85/4f/01711edaa58d535eac4a26c294c617c9a01f09857c0ce191fd574d06f359/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b", size = 1406498 }, + { url = "https://files.pythonhosted.org/packages/07/18/907134c85c7152f679ed744e73e645b365f3ad571f38bdb62e36f347699a/pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7", size = 575533 }, + { url = "https://files.pythonhosted.org/packages/ce/2c/a6f4a20202a4d3c582ad93f95ee78d79bbdc26803495aec2912b17dbbb6c/pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a", size = 637768 }, + { url = "https://files.pythonhosted.org/packages/5f/0e/eb16ff731632d30554bf5af4dbba3ffcd04518219d82028aea4ae1b02ca5/pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b", size = 540675 }, + { url = "https://files.pythonhosted.org/packages/04/a7/0f7e2f6c126fe6e62dbae0bc93b1bd3f1099cf7fea47a5468defebe3f39d/pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726", size = 1006564 }, + { url = "https://files.pythonhosted.org/packages/31/b6/a187165c852c5d49f826a690857684333a6a4a065af0a6015572d2284f6a/pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3", size = 1340447 }, + { url = "https://files.pythonhosted.org/packages/68/ba/f4280c58ff71f321602a6e24fd19879b7e79793fb8ab14027027c0fb58ef/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50", size = 665485 }, + { url = "https://files.pythonhosted.org/packages/77/b5/c987a5c53c7d8704216f29fc3d810b32f156bcea488a940e330e1bcbb88d/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb", size = 903484 }, + { url = "https://files.pythonhosted.org/packages/29/c9/07da157d2db18c72a7eccef8e684cefc155b712a88e3d479d930aa9eceba/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187", size = 859981 }, + { url = "https://files.pythonhosted.org/packages/43/09/e12501bd0b8394b7d02c41efd35c537a1988da67fc9c745cae9c6c776d31/pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b", size = 860334 }, + { url = "https://files.pythonhosted.org/packages/eb/ff/f5ec1d455f8f7385cc0a8b2acd8c807d7fade875c14c44b85c1bddabae21/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18", size = 1196179 }, + { url = "https://files.pythonhosted.org/packages/ec/8a/bb2ac43295b1950fe436a81fc5b298be0b96ac76fb029b514d3ed58f7b27/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115", size = 1507668 }, + { url = "https://files.pythonhosted.org/packages/a9/49/dbc284ebcfd2dca23f6349227ff1616a7ee2c4a35fe0a5d6c3deff2b4fed/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e", size = 1406539 }, + { url = "https://files.pythonhosted.org/packages/00/68/093cdce3fe31e30a341d8e52a1ad86392e13c57970d722c1f62a1d1a54b6/pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5", size = 575567 }, + { url = "https://files.pythonhosted.org/packages/92/ae/6cc4657148143412b5819b05e362ae7dd09fb9fe76e2a539dcff3d0386bc/pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad", size = 637551 }, + { url = "https://files.pythonhosted.org/packages/6c/67/fbff102e201688f97c8092e4c3445d1c1068c2f27bbd45a578df97ed5f94/pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797", size = 540378 }, + { url = "https://files.pythonhosted.org/packages/3f/fe/2d998380b6e0122c6c4bdf9b6caf490831e5f5e2d08a203b5adff060c226/pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a", size = 1007378 }, + { url = "https://files.pythonhosted.org/packages/4a/f4/30d6e7157f12b3a0390bde94d6a8567cdb88846ed068a6e17238a4ccf600/pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc", size = 1329532 }, + { url = "https://files.pythonhosted.org/packages/82/86/3fe917870e15ee1c3ad48229a2a64458e36036e64b4afa9659045d82bfa8/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5", size = 653242 }, + { url = "https://files.pythonhosted.org/packages/50/2d/242e7e6ef6c8c19e6cb52d095834508cd581ffb925699fd3c640cdc758f1/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672", size = 888404 }, + { url = "https://files.pythonhosted.org/packages/ac/11/7270566e1f31e4ea73c81ec821a4b1688fd551009a3d2bab11ec66cb1e8f/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797", size = 845858 }, + { url = "https://files.pythonhosted.org/packages/91/d5/72b38fbc69867795c8711bdd735312f9fef1e3d9204e2f63ab57085434b9/pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386", size = 847375 }, + { url = "https://files.pythonhosted.org/packages/dd/9a/10ed3c7f72b4c24e719c59359fbadd1a27556a28b36cdf1cd9e4fb7845d5/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306", size = 1183489 }, + { url = "https://files.pythonhosted.org/packages/72/2d/8660892543fabf1fe41861efa222455811adac9f3c0818d6c3170a1153e3/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6", size = 1492932 }, + { url = "https://files.pythonhosted.org/packages/7b/d6/32fd69744afb53995619bc5effa2a405ae0d343cd3e747d0fbc43fe894ee/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0", size = 1392485 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "ruff" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/c7/f3367d1da5d568192968c5c9e7f3d51fb317b9ac04828493b23d8fce8ce6/ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b", size = 3146645 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/59/a0275a0913f3539498d116046dd679cd657fe3b7caf5afe1733319414932/ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628", size = 10434007 }, + { url = "https://files.pythonhosted.org/packages/cd/94/da0ba5f956d04c90dd899209904210600009dcda039ce840d83eb4298c7d/ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737", size = 10048066 }, + { url = "https://files.pythonhosted.org/packages/57/1d/e5cc149ecc46e4f203403a79ccd170fad52d316f98b87d0f63b1945567db/ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06", size = 9711389 }, + { url = "https://files.pythonhosted.org/packages/05/67/fb7ea2c869c539725a16c5bc294e9aa34f8b1b6fe702f1d173a5da517c2b/ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be", size = 10755174 }, + { url = "https://files.pythonhosted.org/packages/5f/f0/13703bc50536a0613ea3dce991116e5f0917a1f05528c6ab738b33c08d3f/ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa", size = 10196040 }, + { url = "https://files.pythonhosted.org/packages/99/c1/77b04ab20324ab03d333522ee55fb0f1c38e3ca0d326b4905f82ce6b6c70/ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495", size = 11033684 }, + { url = "https://files.pythonhosted.org/packages/f2/97/f463334dc4efeea3551cd109163df15561c18a1c3ec13d51643740fd36ba/ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598", size = 11803700 }, + { url = "https://files.pythonhosted.org/packages/b4/f8/a31d40c4bb92933d376a53e7c5d0245d9b27841357e4820e96d38f54b480/ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e", size = 11347848 }, + { url = "https://files.pythonhosted.org/packages/83/62/0c133b35ddaf91c65c30a56718b80bdef36bfffc35684d29e3a4878e0ea3/ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914", size = 12480632 }, + { url = "https://files.pythonhosted.org/packages/46/96/464058dd1d980014fb5aa0a1254e78799efb3096fc7a4823cd66a1621276/ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9", size = 10941919 }, + { url = "https://files.pythonhosted.org/packages/a0/f7/bda37ec77986a435dde44e1f59374aebf4282a5fa9cf17735315b847141f/ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4", size = 10745519 }, + { url = "https://files.pythonhosted.org/packages/c2/33/5f77fc317027c057b61a848020a47442a1cbf12e592df0e41e21f4d0f3bd/ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9", size = 10284872 }, + { url = "https://files.pythonhosted.org/packages/ff/50/98aec292bc9537f640b8d031c55f3414bf15b6ed13b3e943fed75ac927b9/ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d", size = 10600334 }, + { url = "https://files.pythonhosted.org/packages/f2/85/12607ae3201423a179b8cfadc7cb1e57d02cd0135e45bd0445acb4cef327/ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11", size = 11017333 }, + { url = "https://files.pythonhosted.org/packages/d4/7f/3b85a56879e705d5f46ec14daf8a439fca05c3081720fe3dc3209100922d/ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec", size = 8570962 }, + { url = "https://files.pythonhosted.org/packages/39/9f/c5ee2b40d377354dabcc23cff47eb299de4b4d06d345068f8f8cc1eadac8/ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2", size = 9365544 }, + { url = "https://files.pythonhosted.org/packages/89/8b/ee1509f60148cecba644aa718f6633216784302458340311898aaf0b1bed/ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e", size = 8695763 }, +] + +[[package]] +name = "s3transfer" +version = "0.10.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/a8/e0a98fd7bd874914f0608ef7c90ffde17e116aefad765021de0f012690a2/s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c", size = 144591 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/c0/b0fba8259b61c938c9733da9346b9f93e00881a9db22aafdd72f6ae0ec05/s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d", size = 82625 }, +] + +[[package]] +name = "schedule" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/91/b525790063015759f34447d4cf9d2ccb52cdee0f1dd6ff8764e863bcb74c/schedule-1.2.2.tar.gz", hash = "sha256:15fe9c75fe5fd9b9627f3f19cc0ef1420508f9f9a46f45cd0769ef75ede5f0b7", size = 26452 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/a7/84c96b61fd13205f2cafbe263cdb2745965974bdf3e0078f121dfeca5f02/schedule-1.2.2-py3-none-any.whl", hash = "sha256:5bef4a2a0183abf44046ae0d164cadcac21b1db011bdd8102e4a0c1e91e06a7d", size = 12220 }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.36" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/65/9cbc9c4c3287bed2499e05033e207473504dc4df999ce49385fb1f8b058a/sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5", size = 9574485 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/bf/005dc47f0e57556e14512d5542f3f183b94fde46e15ff1588ec58ca89555/SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4", size = 2092378 }, + { url = "https://files.pythonhosted.org/packages/94/65/f109d5720779a08e6e324ec89a744f5f92c48bd8005edc814bf72fbb24e5/SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855", size = 2082778 }, + { url = "https://files.pythonhosted.org/packages/60/f6/d9aa8c49c44f9b8c9b9dada1f12fa78df3d4c42aa2de437164b83ee1123c/SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53", size = 3232191 }, + { url = "https://files.pythonhosted.org/packages/8a/ab/81d4514527c068670cb1d7ab62a81a185df53a7c379bd2a5636e83d09ede/SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a", size = 3243044 }, + { url = "https://files.pythonhosted.org/packages/35/b4/f87c014ecf5167dc669199cafdb20a7358ff4b1d49ce3622cc48571f811c/SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686", size = 3178511 }, + { url = "https://files.pythonhosted.org/packages/ea/09/badfc9293bc3ccba6ede05e5f2b44a760aa47d84da1fc5a326e963e3d4d9/SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588", size = 3205147 }, + { url = "https://files.pythonhosted.org/packages/c8/60/70e681de02a13c4b27979b7b78da3058c49bacc9858c89ba672e030f03f2/SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e", size = 2062709 }, + { url = "https://files.pythonhosted.org/packages/b7/ed/f6cd9395e41bfe47dd253d74d2dfc3cab34980d4e20c8878cb1117306085/SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5", size = 2088433 }, + { url = "https://files.pythonhosted.org/packages/78/5c/236398ae3678b3237726819b484f15f5c038a9549da01703a771f05a00d6/SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef", size = 2087651 }, + { url = "https://files.pythonhosted.org/packages/a8/14/55c47420c0d23fb67a35af8be4719199b81c59f3084c28d131a7767b0b0b/SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8", size = 2078132 }, + { url = "https://files.pythonhosted.org/packages/3d/97/1e843b36abff8c4a7aa2e37f9bea364f90d021754c2de94d792c2d91405b/SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b", size = 3164559 }, + { url = "https://files.pythonhosted.org/packages/7b/c5/07f18a897b997f6d6b234fab2bf31dccf66d5d16a79fe329aefc95cd7461/SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2", size = 3177897 }, + { url = "https://files.pythonhosted.org/packages/b3/cd/e16f3cbefd82b5c40b33732da634ec67a5f33b587744c7ab41699789d492/SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf", size = 3111289 }, + { url = "https://files.pythonhosted.org/packages/15/85/5b8a3b0bc29c9928aa62b5c91fcc8335f57c1de0a6343873b5f372e3672b/SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c", size = 3139491 }, + { url = "https://files.pythonhosted.org/packages/a1/95/81babb6089938680dfe2cd3f88cd3fd39cccd1543b7cb603b21ad881bff1/SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436", size = 2060439 }, + { url = "https://files.pythonhosted.org/packages/c1/ce/5f7428df55660d6879d0522adc73a3364970b5ef33ec17fa125c5dbcac1d/SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88", size = 2084574 }, + { url = "https://files.pythonhosted.org/packages/b8/49/21633706dd6feb14cd3f7935fc00b60870ea057686035e1a99ae6d9d9d53/SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e", size = 1883787 }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pure-eval" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, +] + +[[package]] +name = "tnc-edge-service" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "alembic" }, + { name = "boto3" }, + { name = "click" }, + { name = "flask" }, + { name = "flask-admin" }, + { name = "flask-sqlalchemy" }, + { name = "geographiclib" }, + { name = "nmeasim" }, + { name = "psycopg2-binary" }, + { name = "pynmeagps" }, + { name = "requests" }, + { name = "schedule" }, + { name = "sqlalchemy" }, + { name = "wheel" }, +] + +[package.dev-dependencies] +dev = [ + { name = "detect-secrets" }, + { name = "docstr-coverage" }, + { name = "ipykernel" }, + { name = "pandas" }, + { name = "polars" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "alembic", specifier = ">=1.13.3" }, + { name = "boto3", specifier = ">=1.35.46" }, + { name = "click", specifier = ">=8.1.7" }, + { name = "flask", specifier = ">=3.0.3" }, + { name = "flask-admin", specifier = ">=1.6.1" }, + { name = "flask-sqlalchemy", specifier = ">=3.1.1" }, + { name = "geographiclib", specifier = ">=2.0" }, + { name = "nmeasim", specifier = ">=1.1.1.0" }, + { name = "psycopg2-binary", specifier = ">=2.9.10" }, + { name = "pynmeagps", specifier = ">=1.0.43" }, + { name = "requests", specifier = ">=2.32.3" }, + { name = "schedule", specifier = ">=1.2.2" }, + { name = "sqlalchemy", specifier = ">=2.0.36" }, + { name = "wheel", specifier = ">=0.44.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "detect-secrets", specifier = ">=1.5.0" }, + { name = "docstr-coverage", specifier = ">=2.3.2" }, + { name = "ipykernel", specifier = ">=6.29.5" }, + { name = "pandas", specifier = ">=2.2.3" }, + { name = "polars", specifier = ">=1.10.0" }, + { name = "pre-commit", specifier = ">=4.0.1" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, + { name = "ruff", specifier = ">=0.7.0" }, +] + +[[package]] +name = "tornado" +version = "6.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/66/398ac7167f1c7835406888a386f6d0d26ee5dbf197d8a571300be57662d3/tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9", size = 500623 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/d9/c33be3c1a7564f7d42d87a8d186371a75fd142097076767a5c27da941fef/tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8", size = 435924 }, + { url = "https://files.pythonhosted.org/packages/2e/0f/721e113a2fac2f1d7d124b3279a1da4c77622e104084f56119875019ffab/tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14", size = 433883 }, + { url = "https://files.pythonhosted.org/packages/13/cf/786b8f1e6fe1c7c675e79657448178ad65e41c1c9765ef82e7f6f765c4c5/tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4", size = 437224 }, + { url = "https://files.pythonhosted.org/packages/e4/8e/a6ce4b8d5935558828b0f30f3afcb2d980566718837b3365d98e34f6067e/tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842", size = 436597 }, + { url = "https://files.pythonhosted.org/packages/22/d4/54f9d12668b58336bd30defe0307e6c61589a3e687b05c366f804b7faaf0/tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3", size = 436797 }, + { url = "https://files.pythonhosted.org/packages/cf/3f/2c792e7afa7dd8b24fad7a2ed3c2f24a5ec5110c7b43a64cb6095cc106b8/tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f", size = 437516 }, + { url = "https://files.pythonhosted.org/packages/71/63/c8fc62745e669ac9009044b889fc531b6f88ac0f5f183cac79eaa950bb23/tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4", size = 436958 }, + { url = "https://files.pythonhosted.org/packages/94/d4/f8ac1f5bd22c15fad3b527e025ce219bd526acdbd903f52053df2baecc8b/tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698", size = 436882 }, + { url = "https://files.pythonhosted.org/packages/4b/3e/a8124c21cc0bbf144d7903d2a0cadab15cadaf683fa39a0f92bc567f0d4d/tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d", size = 438092 }, + { url = "https://files.pythonhosted.org/packages/d9/2f/3f2f05e84a7aff787a96d5fb06821323feb370fe0baed4db6ea7b1088f32/tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7", size = 438532 }, +] + +[[package]] +name = "tqdm" +version = "4.66.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd", size = 78351 }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "tzdata" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +] + +[[package]] +name = "virtualenv" +version = "20.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/7f/192dd6ab6d91ebea7adf6c030eaf549b1ec0badda9f67a77b633602f66ac/virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2", size = 6483858 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/15/828ec11907aee2349a9342fa71fba4ba7f3af938162a382dd7da339dea16/virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655", size = 3110969 }, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, +] + +[[package]] +name = "werkzeug" +version = "3.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/e2/6dbcaab07560909ff8f654d3a2e5a60552d937c909455211b1b36d7101dc/werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306", size = 803966 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c", size = 227554 }, +] + +[[package]] +name = "wheel" +version = "0.44.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/a0/95e9e962c5fd9da11c1e28aa4c0d8210ab277b1ada951d2aee336b505813/wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49", size = 100733 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f", size = 67059 }, +] + +[[package]] +name = "wtforms" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/e4/633d080897e769ed5712dcfad626e55dbd6cf45db0ff4d9884315c6a82da/wtforms-3.2.1.tar.gz", hash = "sha256:df3e6b70f3192e92623128123ec8dca3067df9cfadd43d59681e210cfb8d4682", size = 137801 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/c9/2088fb5645cd289c99ebe0d4cdcc723922a1d8e1beaefb0f6f76dff9b21c/wtforms-3.2.1-py3-none-any.whl", hash = "sha256:583bad77ba1dd7286463f21e11aa3043ca4869d03575921d1a1698d0715e0fd4", size = 152454 }, +] diff --git a/vector/__init__.py b/vector/__init__.py index f8cafd9..35b2cb1 100644 --- a/vector/__init__.py +++ b/vector/__init__.py @@ -1,17 +1,21 @@ - - -from .gps import GpsVector - +from .catchcountA import CatchCountA +from .elogtimegaps import ElogTimeGapsVector +from .equipment_outage_agg import EquipmentOutageAggVector from .fish_ai import FishAiEventsComeInFourHourBurstsVector - +from .gps import GpsVector from .internet import InternetVector - -from .equipment_outage_agg import EquipmentOutageAggVector - +from .tegrastats import TegrastatsVector from .thalos_mount import ThalosMountVector from .thalos_vids_exist import ThalosVideosExistVector -from .elogtimegaps import ElogTimeGapsVector - -from .catchcountA import CatchCountA -from .tegrastats import TegrastatsVector +__all__ = [ + "CatchCountA", + "ElogTimeGapsVector", + "EquipmentOutageAggVector", + "FishAiEventsComeInFourHourBurstsVector", + "GpsVector", + "InternetVector", + "TegrastatsVector", + "ThalosMountVector", + "ThalosVideosExistVector", +] diff --git a/vector/catchcountA.py b/vector/catchcountA.py index 3d29717..f077317 100644 --- a/vector/catchcountA.py +++ b/vector/catchcountA.py @@ -1,74 +1,82 @@ -from datetime import datetime, timezone, timedelta -from model import Base, OndeckData, VideoFile, DeckhandEventView, Track -from model import RiskVector, Test -from pathlib import Path -from sqlalchemy.orm import Session -from statistics import mean, fmean - import json import math -import numpy as np import os -import pandas as pa -import re +from datetime import UTC, datetime, timedelta +from pathlib import Path +from statistics import fmean + +import numpy as np +import pandas as pd import sqlalchemy as sa -import subprocess +from sqlalchemy.orm import Session + +from model import Base, DeckhandEventView, OndeckData, RiskVector, Test, Track, VideoFile -class CatchCountA(): +class CatchCountA: # tests = relationship("Test") def __init__(self, session: Session, rv) -> None: self.session: Session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.window_minutes = config['window_minutes'] - self.ai_table = config['ai_table'] + self.window_minutes = config["window_minutes"] + self.ai_table = config["ai_table"] self.confidence_filter = None - if 'confidence_filter' in config.keys(): - self.confidence_filter = config['confidence_filter'] + if "confidence_filter" in config.keys(): + self.confidence_filter = config["confidence_filter"] self.ok_p_coeff = 0.2 - if 'ok_p_coeff' in config.keys(): - self.ok_p_coeff = config['ok_p_coeff'] - + if "ok_p_coeff" in config.keys(): + self.ok_p_coeff = config["ok_p_coeff"] + print(self.rv) print(self.ai_table) - - def execute(self, expected_timedelta): - datetime_to = datetime.now(tz=timezone.utc) - datetime_from = datetime_to - timedelta(minutes = self.window_minutes) + datetime_to = datetime.now(tz=UTC) + datetime_from = datetime_to - timedelta(minutes=self.window_minutes) - - result = Test(name=f"catch count A test from {datetime_from:%Y-%m-%d %H:%M} to {datetime_to:%Y-%m-%d %H:%M}", vector=self.rv) + result = Test( + name=f"catch count A test from {datetime_from:%Y-%m-%d %H:%M} to {datetime_to:%Y-%m-%d %H:%M}", + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - - datas = [] - recent_elogs: list[DeckhandEventView] = self.session.query(DeckhandEventView).where( - DeckhandEventView.datetime > datetime_to - 2*timedelta(minutes = self.window_minutes)).all() + datas = [] - if self.ai_table == 'ondeckdata': - ondeck_datas = self.session.query(OndeckData) \ - .join(OndeckData.video_file) \ - .options(sa.orm.joinedload(OndeckData.video_file)) \ + recent_elogs: list[DeckhandEventView] = ( + self.session.query(DeckhandEventView) + .where( + DeckhandEventView.datetime + > datetime_to - 2 * timedelta(minutes=self.window_minutes) + ) + .all() + ) + + if self.ai_table == "ondeckdata": + ondeck_datas = ( + self.session.query(OndeckData) + .join(OndeckData.video_file) + .options(sa.orm.joinedload(OndeckData.video_file)) .where( VideoFile.start_datetime < datetime_to, - VideoFile.start_datetime >= datetime_from) \ - .order_by(OndeckData.datetime).all() - + VideoFile.start_datetime >= datetime_from, + ) + .order_by(OndeckData.datetime) + .all() + ) + ondeck_datas: list[OndeckData] = list(ondeck_datas) expected_videos = self.window_minutes / 5.0 - errored = len(list(filter(lambda x: x.status != 'done', ondeck_datas))) + errored = len(list(filter(lambda x: x.status != "done", ondeck_datas))) # print(f"ondeck errored: {errored}") @@ -80,10 +88,10 @@ def execute(self, expected_timedelta): s = file.stat() if s.st_size <= 0: print("empty file", file) - + except Exception as e: print("exception", e) - + fish_counts = [] is_fishings = [] dts = [] @@ -93,32 +101,39 @@ def execute(self, expected_timedelta): continue fish_counts.append(row.overallcatches) dts.append(row.video_file.start_datetime) - is_fishing = any(map(lambda elog: \ - elog.systemstarthauldatetime < row.video_file.start_datetime and \ - row.video_file.start_datetime < elog.systemendhauldatetime, recent_elogs)) + is_fishing = any( + map( + lambda elog: elog.systemstarthauldatetime < row.video_file.start_datetime + and row.video_file.start_datetime < elog.systemendhauldatetime, + recent_elogs, + ) + ) is_fishings.append(int(is_fishing)) - a = pa.DataFrame({ - 'datetime': dts, - "fish_counts": fish_counts, - "is_fishings": is_fishings, - }) - + a = pd.DataFrame( + { + "datetime": dts, + "fish_counts": fish_counts, + "is_fishings": is_fishings, + } + ) + # from matplotlib import pyplot # pyplot.axis() # pyplot.plot(a['datetime'],a['fish_counts']) # pyplot.plot(a['datetime'],a['is_fishings']) # pyplot.show() - + if not np.any(np.diff(is_fishings)): # this means there is no overlap with the elogs, so the is_fishings data is a flat line # running a p_coeff when one input is a flat line is meaningless # so this test can't continue - result.detail = "elog reports a flat is_fishing variable over time. p_coeff can't work" + result.detail = ( + "elog reports a flat is_fishing variable over time. p_coeff can't work" + ) self.session.commit() return - if not np.any(np.diff(fish_counts)): result.detail = "ondeck reports a flat fish count over time. p_coeff can't work" self.session.commit() @@ -128,20 +143,24 @@ def execute(self, expected_timedelta): print(p_coeffs) p_coeff = p_coeffs[0][1] - - + result.score = math.sqrt(self.ok_p_coeff - p_coeff) if p_coeff <= self.ok_p_coeff else 0 - + self.session.commit() - elif self.ai_table == 'tracks': - tracks_rows = self.session.execute(sa.text('select t.*, v.start_datetime from tracks t \ + elif self.ai_table == "tracks": + tracks_rows = self.session.execute( + sa.text( + "select t.*, v.start_datetime from tracks t \ join video_files v on t.video_uri = v.decrypted_path \ where v.start_datetime > :datetime_from \ and v.start_datetime <= :datetime_to \ - order by t.datetime asc;'), { - 'datetime_from': datetime_from, - 'datetime_to': datetime_to, - }) + order by t.datetime asc;" + ), + { + "datetime_from": datetime_from, + "datetime_to": datetime_to, + }, + ) tracks: list[Track] = list(tracks_rows) expected_videos = self.window_minutes / 5.0 @@ -164,24 +183,25 @@ def execute(self, expected_timedelta): fish_counts[row.start_datetime] = 0 fish_counts[row.start_datetime] += 1 - fishCountS =pa.Series(fish_counts) + fishCountS = pd.Series(fish_counts) fishCountS.sort_index(inplace=True) is_fishings = {} for start_datetime in fish_counts.keys(): - is_fishing = any(map(lambda elog: \ - elog.systemstarthauldatetime < start_datetime and \ - start_datetime < elog.systemendhauldatetime, recent_elogs)) + is_fishing = any( + map( + lambda elog: elog.systemstarthauldatetime < start_datetime + and start_datetime < elog.systemendhauldatetime, + recent_elogs, + ) + ) is_fishings[start_datetime] = int(is_fishing) - isFishingS = pa.Series(is_fishings) + isFishingS = pd.Series(is_fishings) isFishingS.sort_index(inplace=True) - a = pa.DataFrame({ - "fish_counts": fishCountS, - "is_fishings": isFishingS - }) + a = pd.DataFrame({"fish_counts": fishCountS, "is_fishings": isFishingS}) # print(a) # from matplotlib import pyplot @@ -189,16 +209,17 @@ def execute(self, expected_timedelta): # pyplot.plot(isFishingS) # pyplot.plot(fishCountS) # pyplot.show() - + if not np.any(np.diff(isFishingS.values)): # this means there is no overlap with the elogs, so the is_fishings data is a flat line # running a p_coeff when one input is a flat line is meaningless # so this test can't continue - result.detail = "elog reports a flat is_fishing variable over time. p_coeff can't work" + result.detail = ( + "elog reports a flat is_fishing variable over time. p_coeff can't work" + ) self.session.commit() return - if not np.any(np.diff(fishCountS.values)): result.detail = "ondeck reports a flat fish count over time. p_coeff can't work" self.session.commit() @@ -208,54 +229,49 @@ def execute(self, expected_timedelta): print("p_coeffs:", p_coeffs) p_coeff = p_coeffs[0][1] - - + result.score = math.sqrt(self.ok_p_coeff - p_coeff) if p_coeff <= self.ok_p_coeff else 0 - + self.session.commit() return - - # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) session = sessionmaker() # results = list(session.query(RiskVector).filter(RiskVector.name == ThalosVideosExistVector.__name__)) - session.execute('delete from tests where vector_id = -1;') - session.execute('delete from vectors where id = -1;') + session.execute("delete from tests where vector_id = -1;") + session.execute("delete from vectors where id = -1;") rv = RiskVector() rv.id = -1 rv.name = CatchCountA.__name__ - rv.schedule_string = 'every 1 minutes' + rv.schedule_string = "every 1 minutes" rv.configblob = '{"window_minutes": 60000, "ai_table":"ondeckdata"}' rv.tests = [] tmv = CatchCountA(session, rv=rv) tmv.execute(timedelta(minutes=5)) - - main() \ No newline at end of file + main() diff --git a/vector/elogtimegaps.py b/vector/elogtimegaps.py index ccd29bf..ca3a4e0 100644 --- a/vector/elogtimegaps.py +++ b/vector/elogtimegaps.py @@ -1,99 +1,94 @@ - -from model import Base, RiskVector, Test +import json +import os +from datetime import UTC, datetime, timedelta import sqlalchemy as sa from sqlalchemy.orm.session import Session -# from sqlalchemy.orm import session -from model.internetdata import InternetData - -import json -import subprocess - -import re -import codecs -import os +from model import Base, RiskVector, Test -from datetime import datetime, timedelta, timezone +# from sqlalchemy.orm import session -class ElogTimeGapsVector(): +class ElogTimeGapsVector: # tests = relationship("Test") def __init__(self, session: Session, rv) -> None: self.session: Session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) print(self.rv) - - def execute(self, expected_timedelta: timedelta): - datetime_to = datetime.now(tz=timezone.utc) + datetime_to = datetime.now(tz=UTC) datetime_from = datetime_to - expected_timedelta - last_departure_res = self.session.execute(sa.text("""select max(datetime) last_departure from port_departures;""")) + last_departure_res = self.session.execute( + sa.text("""select max(datetime) last_departure from port_departures;""") + ) last_departure: datetime = last_departure_res.first()[0] - - result = Test(name="elog time gap vector at %s"%(datetime_to.strftime('%Y-%m-%d %H:%M:%SZ')), vector=self.rv) + + result = Test( + name="elog time gap vector at %s" % (datetime_to.strftime("%Y-%m-%d %H:%M:%SZ")), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - - res = self.session.execute(sa.text(""" - select coalesce(max(score), 0) - from elog_time_gap_score - where ts_prev >= :recent_departure ;"""), { - "recent_departure": last_departure - }) + + res = self.session.execute( + sa.text(""" + select coalesce(max(score), 0) + from elog_time_gap_score + where ts_prev >= :recent_departure ;"""), + {"recent_departure": last_departure}, + ) result.score = res.first()[0] - + self.session.commit() return result # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) session = sessionmaker() # results = list(session.query(RiskVector).filter(RiskVector.name == ThalosVideosExistVector.__name__)) - session.execute(sa.text('delete from tests where vector_id = -1;')) - session.execute(sa.text('delete from vectors where id = -1;')) + session.execute(sa.text("delete from tests where vector_id = -1;")) + session.execute(sa.text("delete from vectors where id = -1;")) rv = RiskVector() rv.id = -1 rv.name = ElogTimeGapsVector.__name__ - rv.schedule_string = 'every 1 minutes' - rv.configblob = '{}' + rv.schedule_string = "every 1 minutes" + rv.configblob = "{}" rv.tests = [] tmv = ElogTimeGapsVector(session, rv=rv) tmv.execute(timedelta(minutes=5)) - main() diff --git a/vector/equipment_outage_agg.py b/vector/equipment_outage_agg.py index b5e62c8..7f90aaf 100644 --- a/vector/equipment_outage_agg.py +++ b/vector/equipment_outage_agg.py @@ -1,70 +1,68 @@ - -from model import RiskVector, Test - -from sqlalchemy.orm.session import Session -from sqlalchemy.orm.query import Query -from model import Test -from vector import InternetVector - import json +from datetime import UTC, datetime, timedelta -from datetime import datetime, timedelta, timezone +from sqlalchemy.orm.query import Query +from sqlalchemy.orm.session import Session +from model import RiskVector, Test -class EquipmentOutageAggVector(): +class EquipmentOutageAggVector: rv: RiskVector sessin: Session def __init__(self, session: Session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.observed_riskvector_ids = config['observed_riskvector_ids'] + self.observed_riskvector_ids = config["observed_riskvector_ids"] print(self.rv) - def execute(self, expected_timedelta: timedelta): - - datetime_to = datetime.now(tz=timezone.utc) + datetime_to = datetime.now(tz=UTC) datetime_from = datetime_to - expected_timedelta - result = Test(name="equipment outage aggregator from %s to %s"%(datetime_from, datetime_to), vector=self.rv) + result = Test( + name="equipment outage aggregator from %s to %s" % (datetime_from, datetime_to), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - - q: Query[Test] = self.session.query(Test).filter( - Test.datetime >= datetime_from, - Test.datetime <= datetime_to, - Test.vector_id.in_(self.observed_riskvector_ids), - ).order_by(Test.datetime) - - tests: list[Test] = list(q.all()) + q: Query[Test] = ( + self.session.query(Test) + .filter( + Test.datetime >= datetime_from, + Test.datetime <= datetime_to, + Test.vector_id.in_(self.observed_riskvector_ids), + ) + .order_by(Test.datetime) + ) - groups : dict[int, list[Test]] = {} - group_scores : dict[int, int] = {} + tests: list[Test] = list(q.all()) + + groups: dict[int, list[Test]] = {} + group_scores: dict[int, int] = {} for test in tests: - if test.vector_id not in groups.keys(): + if test.vector_id not in groups: groups[test.vector_id] = [] groups[test.vector_id].append(test) - - for vector_id in groups.keys(): + for vector_id in groups: group: list[Test] = groups[vector_id] expweighted = 0.0 outage = 0.0 - # find all sequential outages. + # find all sequential outages. # Determine a score based on how many and how long the outages are. # longer sequences have a much higher weight by cubing its length # divide by a constant scaling factor, then equalize to 0 0.0: outage += 1 else: @@ -72,22 +70,23 @@ def execute(self, expected_timedelta: timedelta): # this is the end of a sequence, cube its length expweighted += outage * outage * outage / 200.0 outage = 0 - + if outage > 0: expweighted += outage * outage * outage / 200.0 - - print("expweighted: %s outage: %d "%(expweighted,outage)) - - group_scores[vector_id] = 1.0 - 1.0/(expweighted+1.0) - - result.detail = "vector_id=score: " + ", ".join([ "{}={}".format(k, i) for (k,i) in group_scores.items()]) + + print("expweighted: %s outage: %d " % (expweighted, outage)) + + group_scores[vector_id] = 1.0 - 1.0 / (expweighted + 1.0) + + result.detail = "vector_id=score: " + ", ".join( + [f"{k}={i}" for (k, i) in group_scores.items()] + ) result.score = max(group_scores.values()) self.session.commit() return result -if __name__ == '__main__': +if __name__ == "__main__": pass - diff --git a/vector/fish_ai.py b/vector/fish_ai.py index fc8ce26..0d4075a 100644 --- a/vector/fish_ai.py +++ b/vector/fish_ai.py @@ -1,46 +1,45 @@ - -from functools import reduce -from model import RiskVector, Test - -from sqlalchemy.orm import session -from model.aifishdata import AifishData - -from model.test import T import json -from datetime import datetime, timezone, timedelta +import os +from datetime import UTC, datetime, timedelta + from dateutil.parser import isoparse +from sqlalchemy.orm import session -import os -import math +from model import RiskVector, Test +from model.aifishdata import AifishData -class FishAiEventsComeInFourHourBurstsVector(): +class FishAiEventsComeInFourHourBurstsVector: rv: RiskVector - # tests = relationship("Test") def __init__(self, s: session, rv) -> None: self.session = s self.config(rv) - + def config(self, rv): self.rv = rv confblob = json.loads(rv.configblob) - self.target_category_id = confblob['target_category_id'] - self.video_fps = confblob['video_fps'] - self.event_grouping_timedelta = timedelta(seconds=confblob['event_grouping_timedelta_seconds']) - self.expected_gap_between_groups = timedelta(seconds=confblob['expected_gap_between_groups_seconds']) + self.target_category_id = confblob["target_category_id"] + self.video_fps = confblob["video_fps"] + self.event_grouping_timedelta = timedelta( + seconds=confblob["event_grouping_timedelta_seconds"] + ) + self.expected_gap_between_groups = timedelta( + seconds=confblob["expected_gap_between_groups_seconds"] + ) print(self.rv) print(confblob) - - def execute(self, expected_timedelta): - - datetime_to = datetime.now(tz=timezone.utc) + datetime_to = datetime.now(tz=UTC) datetime_from = datetime_to - expected_timedelta - - fishAiDatas = self.session.query(AifishData).filter(AifishData.datetime > datetime_from).filter(AifishData.datetime < datetime_to) + + fishAiDatas = ( + self.session.query(AifishData) + .filter(AifishData.datetime > datetime_from) + .filter(AifishData.datetime < datetime_to) + ) scores_per_file = [] @@ -49,76 +48,86 @@ def execute(self, expected_timedelta): cocofilestat = os.stat(cocofilename) with open(cocofilename) as f: raw = json.load(f) - starttime = isoparse(raw['info']['date_created']) + starttime = isoparse(raw["info"]["date_created"]) endtime = fishAiData.datetime - annos = raw['annotations'] + annos = raw["annotations"] tracks_set = {} for anno in annos: - if not anno['category_id'] == self.target_category_id: + if anno["category_id"] != self.target_category_id: continue - track_id = anno['attributes']['track_id'] + track_id = anno["attributes"]["track_id"] if track_id not in tracks_set: tracks_set[track_id] = { - 'track_id': track_id, + "track_id": track_id, "mintime": datetime(9999, 12, 31, 23, 59, 59), - "maxtime": datetime.fromtimestamp(0) + "maxtime": datetime.fromtimestamp(0), } track = tracks_set[track_id] - frameTime = frameToTime(starttime, self.video_fps, anno['image_id']) - if frameTime < track['mintime']: - track['mintime'] = frameTime - if frameTime > track['maxtime']: - track['maxtime'] = frameTime + frameTime = frameToTime(starttime, self.video_fps, anno["image_id"]) + track["mintime"] = min(frameTime, track["mintime"]) + track["maxtime"] = max(frameTime, track["maxtime"]) tracksByStartTime = list(tracks_set.values()) - tracksByStartTime.sort(key=lambda x: x['mintime']) - + tracksByStartTime.sort(key=lambda x: x["mintime"]) + # greedy left-to-right grouping algorithm: - groups=[] + groups = [] for t in tracksByStartTime: if len(groups) == 0: groups.append(dict(t)) continue - if groups[-1]['maxtime'] + self.event_grouping_timedelta >= t['mintime']: - latertime = t['maxtime'] if t['maxtime'] > groups[-1]['maxtime'] else groups[-1]['maxtime'] - groups[-1]['maxtime'] = latertime + if groups[-1]["maxtime"] + self.event_grouping_timedelta >= t["mintime"]: + latertime = max(groups[-1]["maxtime"], t["maxtime"]) + groups[-1]["maxtime"] = latertime else: groups.append(dict(t)) - + score = 0 if len(groups) > 1: - mingap=None - for i in range(len(groups) - 1 ): - curr_gap = groups[i+1]['mintime'] - groups[i]['maxtime'] + mingap = None + for i in range(len(groups) - 1): + curr_gap = groups[i + 1]["mintime"] - groups[i]["maxtime"] mingap = curr_gap if mingap == None or curr_gap < mingap else mingap - + if mingap < self.expected_gap_between_groups: - score = 1.0 / ( ( 10.0 / self.expected_gap_between_groups.seconds ) * ( mingap.seconds - self.expected_gap_between_groups.seconds ) - 1.0) + 1.0 + score = ( + 1.0 + / ( + (10.0 / self.expected_gap_between_groups.seconds) + * (mingap.seconds - self.expected_gap_between_groups.seconds) + - 1.0 + ) + + 1.0 + ) scores_per_file.append(score) - if len(scores_per_file) > 0: - t = Test(name="Higher score from a short gap between ai detection events. Test bounds from %s to %s"%(datetime_from,datetime_to), vector=self.rv, score=sum(scores_per_file)) + t = Test( + name="Higher score from a short gap between ai detection events. Test bounds from %s to %s" + % (datetime_from, datetime_to), + vector=self.rv, + score=sum(scores_per_file), + ) self.session.add(t) - + self.session.commit() -def frameToTime(starttime, video_fps, frameno ): - return starttime + timedelta(seconds=float(frameno)/video_fps) +def frameToTime(starttime, video_fps, frameno): + return starttime + timedelta(seconds=float(frameno) / video_fps) # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': +if __name__ == "__main__": """ Test """ - - + + import sqlite3 + from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from model import Base as ModelBase - import sqlite3 engine = create_engine("sqlite:///db.db", echo=True) SessionMaker = sessionmaker(engine) @@ -128,9 +137,10 @@ def frameToTime(starttime, video_fps, frameno ): with sqlite3.connect("db.db") as conn: with SessionMaker() as s: print("start of cron") - q = s.query(RiskVector).filter(RiskVector.name == FishAiEventsComeInFourHourBurstsVector.__name__) + q = s.query(RiskVector).filter( + RiskVector.name == FishAiEventsComeInFourHourBurstsVector.__name__ + ) for rv in q.all(): f = FishAiEventsComeInFourHourBurstsVector(s, rv) f.execute((datetime.now() - timedelta(weeks=500), datetime.now())) - diff --git a/vector/gps.py b/vector/gps.py index 111ce5c..cafe8d1 100644 --- a/vector/gps.py +++ b/vector/gps.py @@ -1,16 +1,14 @@ +import json +from datetime import UTC, datetime + from pynmeagps import NMEAReader +from sqlalchemy.orm import session from model import RiskVector, Test - -from sqlalchemy.orm import session from model.gpsdata import GpsData -from model.test import T -import json -from datetime import datetime, timezone - -class GpsVector(): +class GpsVector: rv: RiskVector boundarysegments = [] @@ -19,25 +17,27 @@ class GpsVector(): def __init__(self, session: session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.boundarysegments = boundingSegmentsFromVertices(config['boundary_vertices']) + self.boundarysegments = boundingSegmentsFromVertices(config["boundary_vertices"]) print(self.rv) print(self.boundarysegments) - def execute(self, expected_timedelta, gpsDataSelect): - datetime_to = datetime.now(tz=timezone.utc) + datetime_to = datetime.now(tz=UTC) datetime_from = datetime_to - expected_timedelta - - last = self.session.query(Test)\ - .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to)\ - .order_by(Test.datetime_to.desc())\ - .limit(1).all() - - if len(list(last)) : + + last = ( + self.session.query(Test) + .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to) + .order_by(Test.datetime_to.desc()) + .limit(1) + .all() + ) + + if len(list(last)): last_datetime = last[0].datetime_to if datetime_to - last_datetime < expected_timedelta * 2: datetime_from = last_datetime @@ -47,30 +47,37 @@ def execute(self, expected_timedelta, gpsDataSelect): else: print("no previous run found, using expected_timedelta") - result = Test(name="gps test from %s to %s"%(datetime_from,datetime_to), - vector=self.rv, datetime_from=datetime_from, datetime_to=datetime_to) + result = Test( + name="gps test from %s to %s" % (datetime_from, datetime_to), + vector=self.rv, + datetime_from=datetime_from, + datetime_to=datetime_to, + ) self.session.add(result) self.session.commit() # print(result) - - gpsDatas = self.session.query(GpsData).filter(GpsData.datetime > datetime_from).filter(GpsData.datetime < datetime_to) + gpsDatas = ( + self.session.query(GpsData) + .filter(GpsData.datetime > datetime_from) + .filter(GpsData.datetime < datetime_to) + ) gpsPointsOutOfBounds = 0 for gpsData in gpsDatas: - if hasattr(gpsData, 'sentence'): - nmea = NMEAReader.parse(gpsData.sentence) + if hasattr(gpsData, "sentence"): + nmea = NMEAReader.parse(gpsData.sentence) point = (nmea.lat, nmea.lon) - elif hasattr(gpsData, 'lat'): + elif hasattr(gpsData, "lat"): point = (gpsData.lat, gpsData.lon) else: continue if not pointInBoundingBox(point, self.boundarysegments): gpsPointsOutOfBounds += 1 - - result.score = -1.0/(gpsPointsOutOfBounds+1.0) + 1.0 - + + result.score = -1.0 / (gpsPointsOutOfBounds + 1.0) + 1.0 + self.session.commit() return result @@ -80,7 +87,7 @@ def boundingSegmentsFromVertices(vertices): ret = [] first_bs = None last_bs = None - for (x, y) in vertices: + for x, y in vertices: if first_bs is None: first_bs = (x, y) else: @@ -90,9 +97,9 @@ def boundingSegmentsFromVertices(vertices): ret.append((last_bs, first_bs)) return ret + def pointInBoundingBox(point, boundarysegments): - """ - https://en.wikipedia.org/wiki/Point_in_polygon#Ray_casting_algorithm + """https://en.wikipedia.org/wiki/Point_in_polygon#Ray_casting_algorithm 1. assume that (361,361) is outside of the gps fence 1. make segment from (361,361) to gps coord 1. check intersection for all boundary segments @@ -100,24 +107,24 @@ def pointInBoundingBox(point, boundarysegments): """ cnt = 0 for seg in boundarysegments: - if intersects( ((361.0,361.0), point), seg): + if intersects(((361.0, 361.0), point), seg): cnt += 1 - + return cnt % 2 == 1 def intersects(seg1, seg2): # slope = rise/run # slope = (y2-y1)/(x2-x1) - slope1 = (seg1[1][1]-seg1[0][1])/(seg1[1][0]-seg1[0][0]) - slope2 = (seg2[1][1]-seg2[0][1])/(seg2[1][0]-seg2[0][0]) + slope1 = (seg1[1][1] - seg1[0][1]) / (seg1[1][0] - seg1[0][0]) + slope2 = (seg2[1][1] - seg2[0][1]) / (seg2[1][0] - seg2[0][0]) # print("slope1", slope1, "slope2", slope2) - if abs( slope1 - slope2 ) < 0.0001: + if abs(slope1 - slope2) < 0.0001: # these lines are nearly parallel. parallel lines don't intersect. # also, this algorithm does 1/(s1-s2) later, so let's not divide by 0 return False - + # run mx+b math to find the intersecting x coordinate # m1 * (x-x1) + y1 = m2*(x-x2) + y2 # slope1 * (isectx - seg1[0][0]) + seg1[0][1] == slope2 * (isectx - seg2[0][0]) + seg2[0][1] @@ -127,64 +134,69 @@ def intersects(seg1, seg2): # slope1 * isectx - slope2 * isectx == slope1 * seg1[0][0] - seg1[0][1] - slope2 * seg2[0][0] + seg2[0][1] # / (slope1 - slope2) on both sides - isectx = (slope1 * seg1[0][0] - seg1[0][1] - slope2 * seg2[0][0] + seg2[0][1]) / (slope1 - slope2) + isectx = (slope1 * seg1[0][0] - seg1[0][1] - slope2 * seg2[0][0] + seg2[0][1]) / ( + slope1 - slope2 + ) # print("isectx", isectx) - - + # I don't actually care what the y coordinate is. I only care if the intersection is on both lines. # I can find that just by comparing x boundaries - between1 = (seg1[0][0] >= isectx and isectx >= seg1[1][0]) or \ - (seg1[0][0] <= isectx and isectx <= seg1[1][0]) - - between2 = (seg2[0][0] >= isectx and isectx >= seg2[1][0]) or \ - (seg2[0][0] <= isectx and isectx <= seg2[1][0]) + between1 = (seg1[0][0] >= isectx and isectx >= seg1[1][0]) or ( + seg1[0][0] <= isectx and isectx <= seg1[1][0] + ) + + between2 = (seg2[0][0] >= isectx and isectx >= seg2[1][0]) or ( + seg2[0][0] <= isectx and isectx <= seg2[1][0] + ) return between1 and between2 - + + # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': +if __name__ == "__main__": a = [ - NMEAReader.parse('$GPGGA,210230,3855.4487,N,09446.0071,W,1,07,1.1,370.5,M,-29.5,M,,*7A'), - NMEAReader.parse('$GPGSV,2,1,08,02,74,042,45,04,18,190,36,07,67,279,42,12,29,323,36*77'), - NMEAReader.parse('$GPGSV,2,2,08,15,30,050,47,19,09,158,,26,12,281,40,27,38,173,41*7B'), - NMEAReader.parse('$GPRMC,210230,A,3855.4487,N,09446.0071,W,0.0,076.2,130495,003.8,E*69'), + NMEAReader.parse("$GPGGA,210230,3855.4487,N,09446.0071,W,1,07,1.1,370.5,M,-29.5,M,,*7A"), + NMEAReader.parse("$GPGSV,2,1,08,02,74,042,45,04,18,190,36,07,67,279,42,12,29,323,36*77"), + NMEAReader.parse("$GPGSV,2,2,08,15,30,050,47,19,09,158,,26,12,281,40,27,38,173,41*7B"), + NMEAReader.parse("$GPRMC,210230,A,3855.4487,N,09446.0071,W,0.0,076.2,130495,003.8,E*69"), ] for i in a: print(i) - print(intersects(((0,0), (10, 20)), ((0,0), (20, 10)))) - print(intersects(((0,1), (10, 20)), ((1,0), (20, 10)))) - print(intersects(((1,0), (10, 20)), ((0,1), (20, 10)))) - print(intersects(((0,0), (10, 20)), ((0,0), (-20, 10)))) - print(intersects(((0,0), (10, 20)), ((-20, 10), (0,0)))) - print(intersects(((0,1), (10, 20)), ((0,0), (-20, 10)))) - print(intersects(((0,1), (10, 20)), ((-20, 10), (0,0)))) - - bv = [[36.9756611, -122.0273566], - [36.9758839, -122.0255113], - [36.9736554, -122.0240521], - [36.9694039, -122.0231509], - [36.9686324, -122.0227218], - [36.9683924, -122.0248246], - [36.9690267, -122.0263481], - [36.9734497, -122.0270348]] + print(intersects(((0, 0), (10, 20)), ((0, 0), (20, 10)))) + print(intersects(((0, 1), (10, 20)), ((1, 0), (20, 10)))) + print(intersects(((1, 0), (10, 20)), ((0, 1), (20, 10)))) + print(intersects(((0, 0), (10, 20)), ((0, 0), (-20, 10)))) + print(intersects(((0, 0), (10, 20)), ((-20, 10), (0, 0)))) + print(intersects(((0, 1), (10, 20)), ((0, 0), (-20, 10)))) + print(intersects(((0, 1), (10, 20)), ((-20, 10), (0, 0)))) + + bv = [ + [36.9756611, -122.0273566], + [36.9758839, -122.0255113], + [36.9736554, -122.0240521], + [36.9694039, -122.0231509], + [36.9686324, -122.0227218], + [36.9683924, -122.0248246], + [36.9690267, -122.0263481], + [36.9734497, -122.0270348], + ] bs = boundingSegmentsFromVertices(bv) print("segments", bs) # see visualization_of_gps_test.jpg to look at what this test is doing - print("in box", pointInBoundingBox((36.970,-122.022), bs)) - print("in box", pointInBoundingBox((36.972,-122.022), bs)) - print("in box", pointInBoundingBox((36.975,-122.022), bs)) - print("in box", pointInBoundingBox((36.976,-122.022), bs)) - print("in box", pointInBoundingBox((36.970,-122.024), bs)) - print("in box", pointInBoundingBox((36.972,-122.024), bs)) - print("in box", pointInBoundingBox((36.975,-122.024), bs)) - print("in box", pointInBoundingBox((36.976,-122.024), bs)) - print("in box", pointInBoundingBox((36.970,-122.026), bs)) - print("in box", pointInBoundingBox((36.972,-122.026), bs)) - print("in box", pointInBoundingBox((36.975,-122.026), bs)) - print("in box", pointInBoundingBox((36.976,-122.026), bs)) - print("in box", pointInBoundingBox((36.970,-122.028), bs)) - print("in box", pointInBoundingBox((36.972,-122.028), bs)) - print("in box", pointInBoundingBox((36.975,-122.028), bs)) - print("in box", pointInBoundingBox((36.976,-122.028), bs)) - + print("in box", pointInBoundingBox((36.970, -122.022), bs)) + print("in box", pointInBoundingBox((36.972, -122.022), bs)) + print("in box", pointInBoundingBox((36.975, -122.022), bs)) + print("in box", pointInBoundingBox((36.976, -122.022), bs)) + print("in box", pointInBoundingBox((36.970, -122.024), bs)) + print("in box", pointInBoundingBox((36.972, -122.024), bs)) + print("in box", pointInBoundingBox((36.975, -122.024), bs)) + print("in box", pointInBoundingBox((36.976, -122.024), bs)) + print("in box", pointInBoundingBox((36.970, -122.026), bs)) + print("in box", pointInBoundingBox((36.972, -122.026), bs)) + print("in box", pointInBoundingBox((36.975, -122.026), bs)) + print("in box", pointInBoundingBox((36.976, -122.026), bs)) + print("in box", pointInBoundingBox((36.970, -122.028), bs)) + print("in box", pointInBoundingBox((36.972, -122.028), bs)) + print("in box", pointInBoundingBox((36.975, -122.028), bs)) + print("in box", pointInBoundingBox((36.976, -122.028), bs)) diff --git a/vector/internet.py b/vector/internet.py index 20b7d61..6cc16a7 100644 --- a/vector/internet.py +++ b/vector/internet.py @@ -1,49 +1,50 @@ - -from model import RiskVector, Test - -from sqlalchemy.orm import session -from model.internetdata import InternetData - import json +import re import subprocess +from datetime import UTC, datetime, timedelta -import re -import codecs +from sqlalchemy.orm import session + +from model import Test +from model.internetdata import InternetData -from datetime import datetime, timedelta, timezone -class InternetVector(): +class InternetVector: target_ips = [] # tests = relationship("Test") def __init__(self, session: session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.target_ips: list[str] = config['target_ips'] - self.run_traceroute: bool = config['run_traceroute'] + self.target_ips: list[str] = config["target_ips"] + self.run_traceroute: bool = config["run_traceroute"] print(self.rv) print(self.target_ips) - - def execute(self, expected_timedelta: timedelta): - datetime_to = datetime.now(tz=timezone.utc) + datetime_to = datetime.now(tz=UTC) datetime_from = datetime_to - expected_timedelta - last = self.session.query(Test)\ - .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to)\ - .order_by(Test.datetime_to.desc())\ - .limit(1).all() - - result = Test(name="internet test at %s"%(datetime_to.strftime('%Y-%m-%d %H:%M:%SZ')), vector=self.rv) + last = ( + self.session.query(Test) + .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to) + .order_by(Test.datetime_to.desc()) + .limit(1) + .all() + ) + + result = Test( + name="internet test at %s" % (datetime_to.strftime("%Y-%m-%d %H:%M:%SZ")), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - + datas = [] for ip in self.target_ips: @@ -52,53 +53,55 @@ def execute(self, expected_timedelta: timedelta): datas.append(data) self.session.add(data) self.session.commit() - + data = ping(ip) datas.append(data) self.session.add(data) self.session.commit() - - f = filter(lambda data: data.returncode != 0 or data.packetloss and data.packetloss > 33.4 , datas) - result.score = -1.0/(len(list(f))+1.0) + 1.0 - + + f = filter( + lambda data: data.returncode != 0 or data.packetloss and data.packetloss > 33.4, datas + ) + result.score = -1.0 / (len(list(f)) + 1.0) + 1.0 + self.session.commit() return result + def traceroute(ip): - cmd = "traceroute -m 12 %s | grep -E '^\s*[0-9][0-9]*\s*' | grep -v '\* \* \*' | awk '{{print $2 \" \" $3}}' "%(ip) - p = subprocess.run(cmd, shell=True, capture_output=True, text=True) + cmd = ( + "traceroute -m 12 %s | grep -E '^\\s*[0-9][0-9]*\\s*' | grep -v '\\* \\* \\*' | awk '{{print $2 \" \" $3}}' " + % (ip) + ) + p = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False) lines = json.dumps(p.stdout.strip().split("\n")) - - + return InternetData(returncode=p.returncode, traceroute=lines) - + def ping(ip): - cmd = "ping -c 3 -W 5 -q %s "%(ip) - p = subprocess.run(cmd, shell=True, capture_output=True, text=True) - + cmd = "ping -c 3 -W 5 -q %s " % (ip) + p = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False) + data = InternetData(returncode=p.returncode) for line in p.stdout.strip().split("\n"): # print(line) - m = re.search('([\d\.]+)\% packet loss', line) + m = re.search(r"([\d\.]+)\% packet loss", line) if m: # print("loss", m[1]) data.packetloss = float(m[1]) continue - m = re.search('min/avg/.*= [\d\.]+/([\d\.]+)/', line) + m = re.search(r"min/avg/.*= [\d\.]+/([\d\.]+)/", line) if m: # print("ping", m[1]) data.ping = float(m[1]) continue return data - -# test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - - - print(traceroute('1.1.1.1')) - print(ping('1.1.1.1')) +# test by running directly with `python3 -m vector.fname` +if __name__ == "__main__": + print(traceroute("1.1.1.1")) + print(ping("1.1.1.1")) diff --git a/vector/tegrastats.py b/vector/tegrastats.py index 95f9d09..3a4cf5d 100644 --- a/vector/tegrastats.py +++ b/vector/tegrastats.py @@ -1,83 +1,81 @@ - -from model import RiskVector, Test +import json +from datetime import UTC, datetime, timedelta from sqlalchemy.orm import session -from model.internetdata import InternetData - -import json -import subprocess -import re -import codecs +from model import Test -from datetime import datetime, timedelta, timezone -class TegrastatsVector(): +class TegrastatsVector: namedpipe = "" # tests = relationship("Test") def __init__(self, session: session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.namedpipe: list[str] = config['namedpipe'] + self.namedpipe: list[str] = config["namedpipe"] print(self.namedpipe) - - def execute(self, expected_timedelta: timedelta): - datetime_to = datetime.now(tz=timezone.utc) + datetime_to = datetime.now(tz=UTC) datetime_from = datetime_to - expected_timedelta - last = self.session.query(Test)\ - .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to)\ - .order_by(Test.datetime_to.desc())\ - .limit(1).all() - - result = Test(name="tegrastats test at %s"%(datetime_to.strftime('%Y-%m-%d %H:%M:%SZ')), vector=self.rv) + last = ( + self.session.query(Test) + .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to) + .order_by(Test.datetime_to.desc()) + .limit(1) + .all() + ) + + result = Test( + name="tegrastats test at %s" % (datetime_to.strftime("%Y-%m-%d %H:%M:%SZ")), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - + datas = [] for statsline in tegrastats(self.namedpipe): print(statsline) - + result.score = 0.0 - + self.session.commit() return result + def tegrastats(namedpipe): with open(namedpipe) as f: for l in f.readlines(): - yield(l) - + yield (l) -if __name__ == '__main__': +if __name__ == "__main__": import os + from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) - @click.option('--namedpipe') + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) + @click.option("--namedpipe") def main(dbname, dbuser, namedpipe): - for i in tegrastats(namedpipe): print(i) main() - diff --git a/vector/thalos_mount.py b/vector/thalos_mount.py index b7de8aa..49c9ba4 100644 --- a/vector/thalos_mount.py +++ b/vector/thalos_mount.py @@ -1,96 +1,84 @@ - -from model import Base, RiskVector, Test - -from sqlalchemy.orm.session import Session - -from datetime import datetime, timedelta, timezone - +import os +from datetime import UTC, datetime, timedelta from pathlib import Path -import os -import json +from sqlalchemy.orm.session import Session +from model import Base, RiskVector, Test -class ThalosMountVector(): +class ThalosMountVector: rv: RiskVector session: Session - schedule_string: str = 'every 10 minutes' + schedule_string: str = "every 10 minutes" def __init__(self, session: Session, rv) -> None: self.session = session self.config(rv) - - + def config(self, rv): self.rv = rv # config = json.loads(rv.configblob) print(self.rv) - def execute(self, expected_timedelta: timedelta): - - now = datetime.now(); + now = datetime.now() datetime_from = now - expected_timedelta - nowstr = now.astimezone(timezone.utc).strftime('%Y-%m-%d %H:%M:%SZ') - result = Test(name="thalos mounted network dir, run at %s "%(nowstr), vector=self.rv) + nowstr = now.astimezone(UTC).strftime("%Y-%m-%d %H:%M:%SZ") + result = Test(name="thalos mounted network dir, run at %s " % (nowstr), vector=self.rv) - self.session.add(result) self.session.commit() result.score = 1.0 try: - thalosdir = Path('/thalos/') + thalosdir = Path("/thalos/") for boatpath in thalosdir.iterdir(): for camdirs in (boatpath / "videos").iterdir(): - datedirs = [ datedir.name for datedir in camdirs.iterdir() ] + datedirs = [datedir.name for datedir in camdirs.iterdir()] if len(datedirs) > 0: result.score -= 0.125 - if now.astimezone(timezone.utc).strftime('%d-%m-%Y') in datedirs: + if now.astimezone(UTC).strftime("%d-%m-%Y") in datedirs: result.score -= 0.125 if result.score < 1.0: result.score -= 0.5 except Exception as e: print("error", type(e), e) result.detail = str(e) - finally: + finally: self.session.commit() return result - - - # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) session = sessionmaker() - results = list(session.query(RiskVector).filter(RiskVector.name == ThalosMountVector.__name__)) + results = list( + session.query(RiskVector).filter(RiskVector.name == ThalosMountVector.__name__) + ) tmv = ThalosMountVector(session, rv=results[0]) tmv.execute(None, None) - main() diff --git a/vector/thalos_vids_exist.py b/vector/thalos_vids_exist.py index 66a2949..84e400c 100644 --- a/vector/thalos_vids_exist.py +++ b/vector/thalos_vids_exist.py @@ -1,86 +1,99 @@ - -from model import Base, RiskVector, Test - -from sqlalchemy.orm.session import Session - -from datetime import datetime, timedelta, timezone +import os import time - +from datetime import UTC, datetime, timedelta from pathlib import Path -import os -import json +from flask.config import Config as FlaskConfig +from sqlalchemy.orm.session import Session -MAGIC_20_MINUTES_IN_SECONDS = 20*60.0 +from model import Base, RiskVector, Test -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +MAGIC_20_MINUTES_IN_SECONDS = 20 * 60.0 +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +thalosviddir = flaskconfig.get("THALOS_VIDEO_DIR") -thalosviddir = flaskconfig.get('THALOS_VIDEO_DIR') -class ThalosVideosExistVector(): +class ThalosVideosExistVector: rv: RiskVector session: Session - schedule_string: str = 'every 5 minutes' + schedule_string: str = "every 5 minutes" def __init__(self, session: Session, rv) -> None: self.session = session self.config(rv) - - + def config(self, rv): self.rv = rv # config = json.loads(rv.configblob) print(self.rv) - def execute(self, expected_timedelta: timedelta): - if time.monotonic() < MAGIC_20_MINUTES_IN_SECONDS: # to recent from system boot time. don't run. return - now = datetime.now().astimezone(timezone.utc); + now = datetime.now().astimezone(UTC) datetime_from = now - expected_timedelta - nowfloor5min = now.replace(minute=(now.minute//5)*5, second=0, microsecond=0) + nowfloor5min = now.replace(minute=(now.minute // 5) * 5, second=0, microsecond=0) nowfloorminus10min = nowfloor5min - timedelta(minutes=5) nowfloorminus15min = nowfloor5min - timedelta(minutes=10) - - nowstr = nowfloorminus15min.strftime('%d-%m-%Y-%H-%M') - result = Test(name="thalos video files check, looking for %s "%(nowstr), vector=self.rv) + nowstr = nowfloorminus15min.strftime("%d-%m-%Y-%H-%M") + result = Test(name="thalos video files check, looking for %s " % (nowstr), vector=self.rv) - self.session.add(result) self.session.commit() result.score = 1.0 errors = [] - for cam in ['cam1', 'cam2']: + for cam in ["cam1", "cam2"]: try: - mp4vid = Path(thalosviddir + '/' + cam + '/' + nowfloorminus15min.strftime('%d-%m-%Y') + '/' + nowfloorminus15min.strftime('%H') + '/' + nowstr + ".mp4.done") + mp4vid = Path( + thalosviddir + + "/" + + cam + + "/" + + nowfloorminus15min.strftime("%d-%m-%Y") + + "/" + + nowfloorminus15min.strftime("%H") + + "/" + + nowstr + + ".mp4.done" + ) st = mp4vid.stat() # score based on size? I guess? larger than 1MiB is like 65% confident that the file is ok if st.st_size > 0: - result.score -= 0.25 * ( 1.0 - (1.0 / (1.0 + st.st_size / 500000.0 )) ) + result.score -= 0.25 * (1.0 - (1.0 / (1.0 + st.st_size / 500000.0))) except Exception as e: print("error", type(e), e) errors.append(str(e)) try: - avivid = Path(thalosviddir + '/' + cam + '/' + nowfloorminus15min.strftime('%d-%m-%Y') + '/' + nowfloorminus15min.strftime('%H') + '/' + nowstr + ".avi.done") + avivid = Path( + thalosviddir + + "/" + + cam + + "/" + + nowfloorminus15min.strftime("%d-%m-%Y") + + "/" + + nowfloorminus15min.strftime("%H") + + "/" + + nowstr + + ".avi.done" + ) st = avivid.stat() - + # score based on size? I guess? larger than 1MiB is like 65% confident that the file is ok if st.st_size > 0: - result.score -= 0.25 * ( 1.0 - (1.0 / (1.0 + st.st_size / 500000.0 )) ) + result.score -= 0.25 * (1.0 - (1.0 / (1.0 + st.st_size / 500000.0))) except Exception as e: print("error", type(e), e) errors.append(str(e)) - if len(errors)> 0: + if len(errors) > 0: result.detail = "\n".join(errors) self.session.commit() @@ -88,26 +101,25 @@ def execute(self, expected_timedelta: timedelta): # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) @@ -116,12 +128,11 @@ def main(dbname, dbuser): rv = RiskVector() rv.id = -1 rv.name = ThalosVideosExistVector.__name__ - rv.schedule_string = 'every 1 minutes' - rv.configblob = '{}' + rv.schedule_string = "every 1 minutes" + rv.configblob = "{}" rv.tests = [] tmv = ThalosVideosExistVector(session, rv=rv) tmv.execute(timedelta(minutes=5)) - main() diff --git a/vector_schedule.py b/vector_schedule.py index 6510a6e..39423e1 100644 --- a/vector_schedule.py +++ b/vector_schedule.py @@ -1,77 +1,70 @@ -import json -import io - -from flask import Flask -from flask_admin import Admin - -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session import os - -from model import Base as ModelBase, RiskVector, RiskVectorModelView, Test, TestModelView -from vector import GpsVector, FishAiEventsComeInFourHourBurstsVector, InternetVector, EquipmentOutageAggVector, ThalosMountVector, ThalosVideosExistVector, ElogTimeGapsVector,CatchCountA - -import sqlite3 -from datetime import datetime, timedelta, timezone - -import click - -import schedule import re import time +from datetime import timedelta - +import boto3 +import click +import schedule from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +from model import Base as ModelBase +from model import RiskVector +from vector import ( + CatchCountA, + ElogTimeGapsVector, + EquipmentOutageAggVector, + FishAiEventsComeInFourHourBurstsVector, + GpsVector, + InternetVector, + ThalosMountVector, + ThalosVideosExistVector, +) +flaskconfig = FlaskConfig(root_path="") -import boto3 +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -s3 = boto3.resource('s3') -bucket = s3.Bucket('51-gema-dev-dp-raw') +s3 = boto3.resource("s3") +bucket = s3.Bucket("51-gema-dev-dp-raw") def parse_and_schedule(vector: RiskVector, execute_func, *args): - if not vector.schedule_string: return - if m := re.match('every (\\d+) minutes', vector.schedule_string ): - + if m := re.match("every (\\d+) minutes", vector.schedule_string): d = timedelta(minutes=int(m.group(1))) schedule.every(int(m.group(1))).minutes.do(execute_func, d, *args) - elif m := re.match('every (\\d+) hours', vector.schedule_string ): - + elif m := re.match("every (\\d+) hours", vector.schedule_string): d = timedelta(hours=int(m.group(1))) schedule.every(int(m.group(1))).hours.do(execute_func, d, *args) else: - click.echo("VECTOR NOT SCHEDULED: {}".format(vector.name)) - + click.echo(f"VECTOR NOT SCHEDULED: {vector.name}") @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): # engine = create_engine("sqlite:///db.db", echo=True) - # print(os.environ, dbuser, dbname) - engine = create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + engine = create_engine(f"postgresql+psycopg2://{dbuser}@/{dbname}", echo=True) SessionMaker = sessionmaker(engine) ModelBase.metadata.create_all(engine) with SessionMaker() as session: print("start of cron") - + q = session.query(RiskVector) - + all_vectors = [] gps_vectors = [] @@ -86,7 +79,6 @@ def main(dbname, dbuser): for v in q.all(): print("start of vector", v) all_vectors.append(v) - if v.name == GpsVector.__name__: g = GpsVector(session, v) @@ -100,7 +92,6 @@ def main(dbname, dbuser): # res = f.execute(daterange) # print("end of vector", res) - if v.name == InternetVector.__name__: f = InternetVector(session, v) inet_vectors.append(f) @@ -121,14 +112,14 @@ def main(dbname, dbuser): parse_and_schedule(v, tmv.execute) # res = eov.execute(daterange) # print("end of vector", res) - + if v.name == ThalosVideosExistVector.__name__: tve = ThalosVideosExistVector(session, v) tve_vectors.append(tve) parse_and_schedule(v, tve.execute) # res = eov.execute(daterange) # print("end of vector", res) - + if v.name == ElogTimeGapsVector.__name__: eltg = ElogTimeGapsVector(session, v) eltg_vectors.append(eltg) @@ -139,11 +130,9 @@ def main(dbname, dbuser): cca_vectors.append(cca) parse_and_schedule(v, cca.execute) - for v in all_vectors: pass - while 1: n = schedule.idle_seconds() if n is None: @@ -151,9 +140,10 @@ def main(dbname, dbuser): break elif n > 0: # sleep exactly the right amount of time - click.echo("sleeping for: {}".format(n)) + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': - main() \ No newline at end of file + +if __name__ == "__main__": + main() diff --git a/video_fetch.py b/video_fetch.py index eb8dbc5..03e5fa2 100644 --- a/video_fetch.py +++ b/video_fetch.py @@ -1,53 +1,60 @@ - -from datetime import datetime,timezone,timedelta -import click -import codecs import os -from pathlib import Path -import psycopg2 -from psycopg2.pool import SimpleConnectionPool import re -import schedule import subprocess import time +from datetime import UTC, datetime, timedelta +from pathlib import Path - +import click +import psycopg2 +import schedule from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from psycopg2.pool import SimpleConnectionPool + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def depth_first_video_files(cameradir: Path): try: date_dirs = [x for x in cameradir.iterdir() if x.is_dir()] - date_dirs.sort(key=lambda x: datetime.strptime(x.name, '%d-%m-%Y'), reverse=True) + date_dirs.sort(key=lambda x: datetime.strptime(x.name, "%d-%m-%Y"), reverse=True) for date_dir in date_dirs: hour_dirs = [x for x in date_dir.iterdir() if x.is_dir()] hour_dirs.sort(key=lambda x: int(x.name), reverse=True) for hour_dir in hour_dirs: - vid_files = [x for x in hour_dir.iterdir() if x.is_file() and re.match('.*-(\d+)\.', x.name)] - vid_files.sort(key=lambda x: re.match('.*-(\d+)\.', x.name)[1], reverse=True) + vid_files = [ + x for x in hour_dir.iterdir() if x.is_file() and re.match(r".*-(\d+)\.", x.name) + ] + vid_files.sort(key=lambda x: re.match(r".*-(\d+)\.", x.name)[1], reverse=True) for v in vid_files: if v.name.endswith(".avi.done") or v.name.endswith(".avi"): yield v except GeneratorExit: return + def is_gpg(f: Path, passphrase_file: str): - cmd = "cat %s | gpg --pinentry-mode loopback --passphrase-fd 0 \ - --list-packets %s "%( - passphrase_file, - str(f.absolute()) - ) - p = subprocess.run(cmd, shell=True, capture_output=True, text=True) + cmd = ( + "cat %s | gpg --pinentry-mode loopback --passphrase-fd 0 \ + --list-packets %s " + % (passphrase_file, str(f.absolute())) + ) + p = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False) return p.returncode == 0 -def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, passphrase_file: str, thalos_video_suffix: str): - + +def video_fetch( + cpool: SimpleConnectionPool, + thalos_dir: Path, + output_dir: Path, + passphrase_file: str, + thalos_video_suffix: str, +): for cameradir in filter(lambda x: x.is_dir(), thalos_dir.iterdir()): - new_vids: list[Path] = [] discovered_matching_last_modified = 0 @@ -57,39 +64,51 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, try: with conn.cursor() as cur: for vid_file in depth_first_video_files(cameradir): - vid_file_absolute_str = str(vid_file.absolute()) - vid_file_done_alt_str = vid_file_absolute_str[0:-len('.done')] if vid_file_absolute_str.endswith('.done') else vid_file_absolute_str+".done" + vid_file_done_alt_str = ( + vid_file_absolute_str[0 : -len(".done")] + if vid_file_absolute_str.endswith(".done") + else vid_file_absolute_str + ".done" + ) start_datetime: datetime = datetime.strptime( - vid_file.name[0:len('20-07-2023-22-20')], - '%d-%m-%Y-%H-%M') - start_datetime = start_datetime.replace(tzinfo=timezone.utc) + vid_file.name[0 : len("20-07-2023-22-20")], "%d-%m-%Y-%H-%M" + ) + start_datetime = start_datetime.replace(tzinfo=UTC) if last_start_datetime is None: last_start_datetime = start_datetime - if start_datetime + timedelta(days=2) < last_start_datetime : + if start_datetime + timedelta(days=2) < last_start_datetime: # ok, we're too far back in time now. No reason to keep going back # I'm done searching. break s = vid_file.stat() - last_modified: datetime = datetime.fromtimestamp(s.st_mtime, tz=timezone.utc) - cur.execute("select original_path, last_modified from video_files where original_path in (%s, %s);", - (vid_file_absolute_str,vid_file_done_alt_str,)) + last_modified: datetime = datetime.fromtimestamp(s.st_mtime, tz=UTC) + cur.execute( + "select original_path, last_modified from video_files where original_path in (%s, %s);", + ( + vid_file_absolute_str, + vid_file_done_alt_str, + ), + ) rows = list(cur) if len(rows) == 0: # we have never seen this file before! new_vids.append(vid_file) - cur.execute("insert into video_files \ + cur.execute( + "insert into video_files \ (original_path, last_modified, start_datetime, cam_name) \ - values (%s, %s, %s, %s);", ( - vid_file_absolute_str, last_modified, start_datetime, cameradir.name)) + values (%s, %s, %s, %s);", + (vid_file_absolute_str, last_modified, start_datetime, cameradir.name), + ) conn.commit() elif rows[0][1] != last_modified: # found it, update the lastmodified - cur.execute("update video_files set last_modified = %s where original_path in (%s, %s);", - (last_modified, vid_file_absolute_str, vid_file_done_alt_str)) + cur.execute( + "update video_files set last_modified = %s where original_path in (%s, %s);", + (last_modified, vid_file_absolute_str, vid_file_done_alt_str), + ) conn.commit() elif discovered_matching_last_modified > 3: # I found files 4 where the lastmodified matches @@ -98,32 +117,41 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, else: # I can only be here if a row was found and the last_modified matches discovered_matching_last_modified += 1 - + finally: cpool.putconn(conn) if len(new_vids) == 0: # there are 0 videos for this camera. Skip this camera continue - click.echo("working on {} new videos".format(len(new_vids))) + click.echo(f"working on {len(new_vids)} new videos") new_vids.reverse() for new_vid in new_vids: - new_vid_absolute_str = str(new_vid.absolute()) - new_vid_done_alt_str = new_vid_absolute_str[0:-len('.done')] if new_vid_absolute_str.endswith('.done') else new_vid_absolute_str+".done" + new_vid_done_alt_str = ( + new_vid_absolute_str[0 : -len(".done")] + if new_vid_absolute_str.endswith(".done") + else new_vid_absolute_str + ".done" + ) s = new_vid.stat() - last_modified = datetime.fromtimestamp(s.st_mtime, tz=timezone.utc) + last_modified = datetime.fromtimestamp(s.st_mtime, tz=UTC) conn: psycopg2.connection = cpool.getconn() try: with conn.cursor() as cur: - cur.execute("select original_path, last_modified, start_datetime, \ + cur.execute( + "select original_path, last_modified, start_datetime, \ decrypted_path, decrypted_datetime, stdout, stderr \ - from video_files where original_path in ( %s, %s );", (new_vid_absolute_str, new_vid_done_alt_str,)) - #schema: (original_path, last_modified, start_datetime, decrypted_path, decrypted_datetime, stdout, stderr) + from video_files where original_path in ( %s, %s );", + ( + new_vid_absolute_str, + new_vid_done_alt_str, + ), + ) + # schema: (original_path, last_modified, start_datetime, decrypted_path, decrypted_datetime, stdout, stderr) rows = list(cur) if len(rows) == 1 and rows[0][3] is not None: # this script has already decrypted this video @@ -132,9 +160,14 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, # compute the output filename start_time: datetime = rows[0][2] - start_time = start_time.astimezone(timezone.utc) + start_time = start_time.astimezone(UTC) # print(start_time) - str_start_time = start_time.isoformat().replace('-', '').replace(':', '').replace('+0000', 'Z') + str_start_time = ( + start_time.isoformat() + .replace("-", "") + .replace(":", "") + .replace("+0000", "Z") + ) output_filename = str_start_time + "_" + cameradir.name + ".avi" # if output_filename.endswith('.done'): # output_filename = output_filename[0:-5] @@ -143,64 +176,74 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, # gpg decrypt the video cmd = None if is_gpg(new_vid, passphrase_file): - cmd = "cat %s | gpg --batch --yes \ + cmd = ( + "cat %s | gpg --batch --yes \ --pinentry-mode loopback --passphrase-fd 0 \ - --decrypt --output %s %s "%( - passphrase_file, - str(output_file.absolute()), - new_vid_absolute_str - ) + --decrypt --output %s %s " + % (passphrase_file, str(output_file.absolute()), new_vid_absolute_str) + ) else: - cmd = "cp %s %s"%( - new_vid_absolute_str, - str(output_file.absolute()) - ) - p = subprocess.run(cmd, shell=True, capture_output=True, text=True) + cmd = "cp %s %s" % (new_vid_absolute_str, str(output_file.absolute())) + p = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False) if p.returncode == 0: - cur.execute("update video_files set decrypted_path = %s, \ + cur.execute( + "update video_files set decrypted_path = %s, \ decrypted_datetime = %s, stdout = %s, stderr = %s \ - where original_path in ( %s, %s );", ( - str(output_file.absolute()), datetime.now(tz=timezone.utc), - p.stdout, p.stderr, - new_vid_absolute_str, new_vid_done_alt_str) + where original_path in ( %s, %s );", + ( + str(output_file.absolute()), + datetime.now(tz=UTC), + p.stdout, + p.stderr, + new_vid_absolute_str, + new_vid_done_alt_str, + ), ) conn.commit() else: - cur.execute("update video_files set decrypted_path = %s, \ + cur.execute( + "update video_files set decrypted_path = %s, \ decrypted_datetime = %s, stdout = %s, stderr = %s \ - where original_path in ( %s, %s );", ( - None, datetime.now(tz=timezone.utc), - p.stdout, p.stderr, - new_vid_absolute_str, new_vid_done_alt_str) + where original_path in ( %s, %s );", + ( + None, + datetime.now(tz=UTC), + p.stdout, + p.stderr, + new_vid_absolute_str, + new_vid_done_alt_str, + ), ) conn.commit() finally: cpool.putconn(conn) -@click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--thalos_video_dir', default=flaskconfig.get('THALOS_VIDEO_DIR')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--passphrase_file', default=flaskconfig.get('VIDEO_PASSPHRASE_FILE')) -@click.option('--thalos_video_suffix', default=flaskconfig.get('THALOS_VIDEO_SUFFIX')) -@click.option('--print_latest', is_flag=True) -def main(dbname, dbuser, thalos_video_dir, output_dir, passphrase_file, thalos_video_suffix, print_latest): +@click.command() +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--thalos_video_dir", default=flaskconfig.get("THALOS_VIDEO_DIR")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--passphrase_file", default=flaskconfig.get("VIDEO_PASSPHRASE_FILE")) +@click.option("--thalos_video_suffix", default=flaskconfig.get("THALOS_VIDEO_SUFFIX")) +@click.option("--print_latest", is_flag=True) +def main( + dbname, dbuser, thalos_video_dir, output_dir, passphrase_file, thalos_video_suffix, print_latest +): thalos_dir = Path(thalos_video_dir) output_dir = Path(output_dir) if print_latest: for cameradir in filter(lambda x: x.is_dir(), thalos_dir.iterdir()): - i=0 + i = 0 for vid_file in depth_first_video_files(cameradir): if i > 1: break s = vid_file.stat() - last_modified = datetime.fromtimestamp(s.st_mtime, tz=timezone.utc) - click.echo("{} ({})".format(str(vid_file.absolute()), str(last_modified))) - i+=1 + last_modified = datetime.fromtimestamp(s.st_mtime, tz=UTC) + click.echo(f"{str(vid_file.absolute())} ({str(last_modified)})") + i += 1 return cpool = SimpleConnectionPool(1, 1, database=dbname, user=dbuser) @@ -209,10 +252,13 @@ def runonce(cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix) video_fetch(cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix) return schedule.CancelJob - schedule.every(1).seconds.do(runonce, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix ) - - schedule.every(5).minutes.do(video_fetch, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix ) + schedule.every(1).seconds.do( + runonce, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix + ) + schedule.every(5).minutes.do( + video_fetch, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix + ) while 1: n = schedule.idle_seconds() @@ -222,9 +268,10 @@ def runonce(cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix) break elif n > 0: # sleep exactly the right amount of time - click.echo("sleeping for: {}".format(n)) + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main()