From d59ad6c7c65280c1ce636b6cbe7dd9cc607abaec Mon Sep 17 00:00:00 2001 From: Guzman Lopez Date: Tue, 5 Nov 2024 11:55:32 +0000 Subject: [PATCH] add ruff formatting and minor improvements --- gps_fetch.py | 4 +- migrations/versions/04eaff9bcc55_.py | 5 +- .../17911f3ffb3b_new_vector_rows_1.py | 20 +-- .../47ff3fca73a4_vector_schedulestring.py | 9 +- .../495235ece5f0_ondeckdata_unique.py | 11 +- .../58dd42108a22_new_vid_file_table.py | 24 ++-- .../versions/5e4898954923_ondeckdata_table.py | 22 +-- .../versions/5fdb864a1bbb_refactor_aifish.py | 65 +++++---- .../643148911953_deckhand_json_views.py | 10 +- .../versions/677a2f2884e1_s3uploadstable.py | 14 +- .../versions/81b92a299311_gps_data_types.py | 27 ++-- .../versions/8304966281aa_reencode_files.py | 34 +++-- .../versions/97b633de0899_video_cam_name.py | 9 +- .../b2f76c38a4a0_deckhand_gaps_score.py | 10 +- .../b78dce0f5492_ondeck_json_columns.py | 33 +++-- ...08d4e11cc7_ondeckdata_more_data_columns.py | 20 ++- .../bbe04841c70d_port_departures_view.py | 20 +-- .../d974c1aea745_elog_gaps_score_update.py | 6 +- .../versions/e718ddd7c0bd_add_track_table.py | 36 +++-- .../ecb326942445_starttime_on_videos.py | 22 +-- .../f48359cf7456_ondeckdata_status.py | 10 +- migrations/versions/f835aa8c569a_second.py | 131 ++++++++++-------- .../f9dbf07180af_test_from_to_columns.py | 13 +- ...dfd9e708602_add_elog_timegap_vector_row.py | 22 +-- reencode.py | 4 +- run_aifish.py | 3 +- run_ondeck.py | 4 +- vector/fish_ai.py | 4 +- 28 files changed, 337 insertions(+), 255 deletions(-) diff --git a/gps_fetch.py b/gps_fetch.py index 0823bde..bb08a40 100644 --- a/gps_fetch.py +++ b/gps_fetch.py @@ -62,7 +62,9 @@ def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): new_file: Path = dt_index[new_dt.astimezone(UTC)] with new_file.open() as data: line = data.readline() - m = re.match(r"([+-]?(\d+(\.\d*)?|\.\d+)).*,.*?([+-]?(\d+(\.\d*)?|\.\d+))", line) + m = re.match( + r"([+-]?(\d+(\.\d*)?|\.\d+)).*,.*?([+-]?(\d+(\.\d*)?|\.\d+))", line + ) if m: lat = m[1] lon = m[4] diff --git a/migrations/versions/04eaff9bcc55_.py b/migrations/versions/04eaff9bcc55_.py index 4ddeca2..c631e88 100644 --- a/migrations/versions/04eaff9bcc55_.py +++ b/migrations/versions/04eaff9bcc55_.py @@ -1,14 +1,13 @@ """first Revision ID: 04eaff9bcc55 -Revises: +Revises: Create Date: 2023-04-10 12:56:26.377798 """ - # revision identifiers, used by Alembic. -revision = '04eaff9bcc55' +revision = "04eaff9bcc55" down_revision = None branch_labels = None depends_on = None diff --git a/migrations/versions/17911f3ffb3b_new_vector_rows_1.py b/migrations/versions/17911f3ffb3b_new_vector_rows_1.py index 310d269..257e39d 100644 --- a/migrations/versions/17911f3ffb3b_new_vector_rows_1.py +++ b/migrations/versions/17911f3ffb3b_new_vector_rows_1.py @@ -5,12 +5,13 @@ Create Date: 2023-06-02 14:22:38.910122 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '17911f3ffb3b' -down_revision = 'f835aa8c569a' +revision = "17911f3ffb3b" +down_revision = "f835aa8c569a" branch_labels = None depends_on = None @@ -23,16 +24,15 @@ def upgrade() -> None: found_id = row[0] if found_id is None: - op.get_bind().execute('insert into vectors (name, configblob) values (\'InternetVector\', \'{"target_ips":["8.8.8.8","1.1.1.1","208.67.222.222","9.9.9.9"],"run_traceroute":false}\');') - - + op.get_bind().execute( + 'insert into vectors (name, configblob) values (\'InternetVector\', \'{"target_ips":["8.8.8.8","1.1.1.1","208.67.222.222","9.9.9.9"],"run_traceroute":false}\');' + ) def downgrade() -> None: + op.get_bind().execute( + "delete from tests where vector_id = (select id from vectors where name = 'InternetVector');" + ) - op.get_bind().execute("delete from tests where vector_id = (select id from vectors where name = 'InternetVector');") - - t = sa.table('vectors') + t = sa.table("vectors") op.get_bind().execute("delete from vectors where name = 'InternetVector';") - - diff --git a/migrations/versions/47ff3fca73a4_vector_schedulestring.py b/migrations/versions/47ff3fca73a4_vector_schedulestring.py index 403fcf3..cf6679f 100644 --- a/migrations/versions/47ff3fca73a4_vector_schedulestring.py +++ b/migrations/versions/47ff3fca73a4_vector_schedulestring.py @@ -5,23 +5,24 @@ Create Date: 2023-06-06 13:07:09.704169 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '47ff3fca73a4' -down_revision = '5e4898954923' +revision = "47ff3fca73a4" +down_revision = "5e4898954923" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column('vectors', sa.Column('schedule_string', sa.String(), nullable=True)) + op.add_column("vectors", sa.Column("schedule_string", sa.String(), nullable=True)) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('vectors', 'schedule_string') + op.drop_column("vectors", "schedule_string") # ### end Alembic commands ### diff --git a/migrations/versions/495235ece5f0_ondeckdata_unique.py b/migrations/versions/495235ece5f0_ondeckdata_unique.py index 41e3d2b..2cb9fe4 100644 --- a/migrations/versions/495235ece5f0_ondeckdata_unique.py +++ b/migrations/versions/495235ece5f0_ondeckdata_unique.py @@ -5,17 +5,17 @@ Create Date: 2023-10-10 15:43:07.752816 """ + from alembic import op # revision identifiers, used by Alembic. -revision = '495235ece5f0' -down_revision = 'f48359cf7456' +revision = "495235ece5f0" +down_revision = "f48359cf7456" branch_labels = None depends_on = None def upgrade() -> None: - op.execute(""" with duped as ( select distinct on (video_uri) video_uri, id @@ -26,9 +26,8 @@ def upgrade() -> None: where video_uri in (select video_uri from duped) and id not in (select id from duped) ;""") - op.create_unique_constraint(None, 'ondeckdata', ['video_uri']) - + op.create_unique_constraint(None, "ondeckdata", ["video_uri"]) def downgrade() -> None: - op.drop_constraint(None, 'ondeckdata', type_='unique') + op.drop_constraint(None, "ondeckdata", type_="unique") diff --git a/migrations/versions/58dd42108a22_new_vid_file_table.py b/migrations/versions/58dd42108a22_new_vid_file_table.py index 86f9d52..9c68cdb 100644 --- a/migrations/versions/58dd42108a22_new_vid_file_table.py +++ b/migrations/versions/58dd42108a22_new_vid_file_table.py @@ -5,28 +5,30 @@ Create Date: 2023-06-16 18:15:23.314916 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '58dd42108a22' -down_revision = 'f9dbf07180af' +revision = "58dd42108a22" +down_revision = "f9dbf07180af" branch_labels = None depends_on = None def upgrade() -> None: - op.create_table('video_files', - sa.Column('original_path', sa.String(), nullable=False), - sa.Column('last_modified', sa.DateTime(timezone=True), nullable=False), - sa.Column('decrypted_path', sa.String(), nullable=True), - sa.Column('decrypted_datetime', sa.DateTime(timezone=True), nullable=True), - sa.Column('stdout', sa.String(), nullable=True), - sa.Column('stderr', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('original_path') + op.create_table( + "video_files", + sa.Column("original_path", sa.String(), nullable=False), + sa.Column("last_modified", sa.DateTime(timezone=True), nullable=False), + sa.Column("decrypted_path", sa.String(), nullable=True), + sa.Column("decrypted_datetime", sa.DateTime(timezone=True), nullable=True), + sa.Column("stdout", sa.String(), nullable=True), + sa.Column("stderr", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("original_path"), ) pass def downgrade() -> None: - op.drop_table('video_files') + op.drop_table("video_files") diff --git a/migrations/versions/5e4898954923_ondeckdata_table.py b/migrations/versions/5e4898954923_ondeckdata_table.py index ef949b6..978cebb 100644 --- a/migrations/versions/5e4898954923_ondeckdata_table.py +++ b/migrations/versions/5e4898954923_ondeckdata_table.py @@ -5,29 +5,33 @@ Create Date: 2023-06-05 14:09:26.594081 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '5e4898954923' -down_revision = '17911f3ffb3b' +revision = "5e4898954923" +down_revision = "17911f3ffb3b" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('ondeckdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('video_uri', sa.String(), nullable=True), - sa.Column('cocoannotations_uri', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "ondeckdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("video_uri", sa.String(), nullable=True), + sa.Column("cocoannotations_uri", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('ondeckdata') + op.drop_table("ondeckdata") # ### end Alembic commands ### diff --git a/migrations/versions/5fdb864a1bbb_refactor_aifish.py b/migrations/versions/5fdb864a1bbb_refactor_aifish.py index 43f0507..7d36048 100644 --- a/migrations/versions/5fdb864a1bbb_refactor_aifish.py +++ b/migrations/versions/5fdb864a1bbb_refactor_aifish.py @@ -5,43 +5,58 @@ Create Date: 2023-12-12 12:43:34.309532 """ + import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = '5fdb864a1bbb' -down_revision = 'e718ddd7c0bd' +revision = "5fdb864a1bbb" +down_revision = "e718ddd7c0bd" branch_labels = None depends_on = None def upgrade() -> None: - - op.create_table('aifishdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('video_uri', sa.String(), nullable=True), - sa.Column('processing_uri', sa.String(), nullable=True), - sa.Column('output_uri', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.Column('count', sa.Integer(), nullable=True), - sa.Column('runtimems', sa.REAL(), nullable=True), - sa.Column('detection_confidence', sa.REAL(), nullable=True), - sa.Column('status', sa.String(), nullable=True), - sa.ForeignKeyConstraint(['video_uri'], ['video_files.decrypted_path'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('video_uri') + op.create_table( + "aifishdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("video_uri", sa.String(), nullable=True), + sa.Column("processing_uri", sa.String(), nullable=True), + sa.Column("output_uri", sa.String(), nullable=True), + sa.Column( + "datetime", + sa.DateTime(timezone=True), + server_default=sa.text("CURRENT_TIMESTAMP"), + nullable=True, + ), + sa.Column("count", sa.Integer(), nullable=True), + sa.Column("runtimems", sa.REAL(), nullable=True), + sa.Column("detection_confidence", sa.REAL(), nullable=True), + sa.Column("status", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["video_uri"], + ["video_files.decrypted_path"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("video_uri"), ) - op.drop_table('fishaidata') - + op.drop_table("fishaidata") def downgrade() -> None: - op.create_table('fishaidata', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('video_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('cocoannotations_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('datetime', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='fishaidata_pkey') + op.create_table( + "fishaidata", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("video_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("cocoannotations_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "datetime", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("CURRENT_TIMESTAMP"), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="fishaidata_pkey"), ) - op.drop_table('aifishdata') + op.drop_table("aifishdata") diff --git a/migrations/versions/643148911953_deckhand_json_views.py b/migrations/versions/643148911953_deckhand_json_views.py index 2ee0eb2..7df0cfb 100644 --- a/migrations/versions/643148911953_deckhand_json_views.py +++ b/migrations/versions/643148911953_deckhand_json_views.py @@ -5,11 +5,12 @@ Create Date: 2023-08-16 11:38:18.120705 """ + from alembic import op # revision identifiers, used by Alembic. -revision = '643148911953' -down_revision = '677a2f2884e1' +revision = "643148911953" +down_revision = "677a2f2884e1" branch_labels = None depends_on = None @@ -50,8 +51,7 @@ def upgrade() -> None: """) - def downgrade() -> None: - op.get_bind().execute('DROP VIEW deckhandevents_mostrecentlonglineevent_jsonextracted;') - op.get_bind().execute('DROP VIEW deckhandevents_mostrecenteventid_nophoto;') + op.get_bind().execute("DROP VIEW deckhandevents_mostrecentlonglineevent_jsonextracted;") + op.get_bind().execute("DROP VIEW deckhandevents_mostrecenteventid_nophoto;") pass diff --git a/migrations/versions/677a2f2884e1_s3uploadstable.py b/migrations/versions/677a2f2884e1_s3uploadstable.py index d79a40b..02fab78 100644 --- a/migrations/versions/677a2f2884e1_s3uploadstable.py +++ b/migrations/versions/677a2f2884e1_s3uploadstable.py @@ -5,12 +5,13 @@ Create Date: 2023-08-02 17:08:34.590190 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '677a2f2884e1' -down_revision = '97b633de0899' +revision = "677a2f2884e1" +down_revision = "97b633de0899" branch_labels = None depends_on = None @@ -18,14 +19,15 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('s3uploads', - sa.Column('datetime', sa.DateTime(timezone=True), nullable=False), - sa.Column('tablename', sa.String(), nullable=False), + op.create_table( + "s3uploads", + sa.Column("datetime", sa.DateTime(timezone=True), nullable=False), + sa.Column("tablename", sa.String(), nullable=False), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('s3uploads') + op.drop_table("s3uploads") # ### end Alembic commands ### diff --git a/migrations/versions/81b92a299311_gps_data_types.py b/migrations/versions/81b92a299311_gps_data_types.py index 9999205..33f4c31 100644 --- a/migrations/versions/81b92a299311_gps_data_types.py +++ b/migrations/versions/81b92a299311_gps_data_types.py @@ -5,12 +5,13 @@ Create Date: 2023-07-26 16:51:17.527649 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '81b92a299311' -down_revision = 'ecb326942445' +revision = "81b92a299311" +down_revision = "ecb326942445" branch_labels = None depends_on = None @@ -18,20 +19,22 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.execute("delete from gpsdata;") - op.add_column('gpsdata', sa.Column('gps_datetime', sa.DateTime(timezone=True), nullable=False)) - op.add_column('gpsdata', sa.Column('lat', sa.Float(), nullable=False)) - op.add_column('gpsdata', sa.Column('lon', sa.Float(), nullable=False)) - op.drop_column('gpsdata', 'sentence') - op.drop_column('gpsdata', 'id') + op.add_column("gpsdata", sa.Column("gps_datetime", sa.DateTime(timezone=True), nullable=False)) + op.add_column("gpsdata", sa.Column("lat", sa.Float(), nullable=False)) + op.add_column("gpsdata", sa.Column("lon", sa.Float(), nullable=False)) + op.drop_column("gpsdata", "sentence") + op.drop_column("gpsdata", "id") # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.execute("delete from gpsdata;") - op.add_column('gpsdata', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False)) - op.add_column('gpsdata', sa.Column('sentence', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.drop_column('gpsdata', 'lon') - op.drop_column('gpsdata', 'lat') - op.drop_column('gpsdata', 'gps_datetime') + op.add_column("gpsdata", sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False)) + op.add_column( + "gpsdata", sa.Column("sentence", sa.VARCHAR(), autoincrement=False, nullable=True) + ) + op.drop_column("gpsdata", "lon") + op.drop_column("gpsdata", "lat") + op.drop_column("gpsdata", "gps_datetime") # ### end Alembic commands ### diff --git a/migrations/versions/8304966281aa_reencode_files.py b/migrations/versions/8304966281aa_reencode_files.py index b3f9a17..f979ade 100644 --- a/migrations/versions/8304966281aa_reencode_files.py +++ b/migrations/versions/8304966281aa_reencode_files.py @@ -5,25 +5,39 @@ Create Date: 2023-09-20 15:15:56.043600 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '8304966281aa' -down_revision = 'd974c1aea745' +revision = "8304966281aa" +down_revision = "d974c1aea745" branch_labels = None depends_on = None def upgrade() -> None: - op.add_column('video_files', sa.Column('reencoded_path', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.add_column('video_files', sa.Column('reencoded_datetime', sa.DateTime(timezone=True), autoincrement=False, nullable=True)) - op.add_column('video_files', sa.Column('reencoded_stdout', sa.VARCHAR(), autoincrement=False, nullable=True)) - op.add_column('video_files', sa.Column('reencoded_stderr', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.add_column( + "video_files", sa.Column("reencoded_path", sa.VARCHAR(), autoincrement=False, nullable=True) + ) + op.add_column( + "video_files", + sa.Column( + "reencoded_datetime", sa.DateTime(timezone=True), autoincrement=False, nullable=True + ), + ) + op.add_column( + "video_files", + sa.Column("reencoded_stdout", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "video_files", + sa.Column("reencoded_stderr", sa.VARCHAR(), autoincrement=False, nullable=True), + ) def downgrade() -> None: - op.drop_column('video_files', 'reencoded_stderr') - op.drop_column('video_files', 'reencoded_stdout') - op.drop_column('video_files', 'reencoded_datetime') - op.drop_column('video_files', 'reencoded_path') + op.drop_column("video_files", "reencoded_stderr") + op.drop_column("video_files", "reencoded_stdout") + op.drop_column("video_files", "reencoded_datetime") + op.drop_column("video_files", "reencoded_path") diff --git a/migrations/versions/97b633de0899_video_cam_name.py b/migrations/versions/97b633de0899_video_cam_name.py index deee67f..c2b5072 100644 --- a/migrations/versions/97b633de0899_video_cam_name.py +++ b/migrations/versions/97b633de0899_video_cam_name.py @@ -5,23 +5,24 @@ Create Date: 2023-07-27 15:50:13.450935 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = '97b633de0899' -down_revision = '81b92a299311' +revision = "97b633de0899" +down_revision = "81b92a299311" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column('video_files', sa.Column('cam_name', sa.VARCHAR(), nullable=True)) + op.add_column("video_files", sa.Column("cam_name", sa.VARCHAR(), nullable=True)) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('video_files', 'cam_name') + op.drop_column("video_files", "cam_name") # ### end Alembic commands ### diff --git a/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py b/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py index 698624c..0a9b2cb 100644 --- a/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py +++ b/migrations/versions/b2f76c38a4a0_deckhand_gaps_score.py @@ -5,11 +5,12 @@ Create Date: 2023-09-19 12:48:47.152161 """ + from alembic import op # revision identifiers, used by Alembic. -revision = 'b2f76c38a4a0' -down_revision = 'bbe04841c70d' +revision = "b2f76c38a4a0" +down_revision = "bbe04841c70d" branch_labels = None depends_on = None @@ -30,7 +31,6 @@ def upgrade() -> None: end; $$;""") - op.get_bind().execute("""CREATE OR REPLACE VIEW elog_time_gap_score as ( with paired_seq_deckhandevents as ( with A as ( @@ -66,5 +66,5 @@ def upgrade() -> None: def downgrade() -> None: - op.get_bind().execute('drop view elog_time_gap_score') - op.get_bind().execute('drop function elog_time_gap_sigmoid') + op.get_bind().execute("drop view elog_time_gap_score") + op.get_bind().execute("drop function elog_time_gap_sigmoid") diff --git a/migrations/versions/b78dce0f5492_ondeck_json_columns.py b/migrations/versions/b78dce0f5492_ondeck_json_columns.py index 3a19a80..1ce2427 100644 --- a/migrations/versions/b78dce0f5492_ondeck_json_columns.py +++ b/migrations/versions/b78dce0f5492_ondeck_json_columns.py @@ -5,34 +5,33 @@ Create Date: 2023-08-16 14:16:31.080353 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = 'b78dce0f5492' -down_revision = '643148911953' +revision = "b78dce0f5492" +down_revision = "643148911953" branch_labels = None depends_on = None def upgrade() -> None: + op.add_column("ondeckdata", sa.Column("overallcount", sa.Integer(), nullable=True)) + op.add_column("ondeckdata", sa.Column("overallruntimems", sa.REAL(), nullable=True)) + op.add_column("ondeckdata", sa.Column("tracked_confidence", sa.REAL(), nullable=True)) - op.add_column('ondeckdata', sa.Column('overallcount', sa.Integer(), nullable=True)) - op.add_column('ondeckdata', sa.Column('overallruntimems', sa.REAL(), nullable=True)) - op.add_column('ondeckdata', sa.Column('tracked_confidence', sa.REAL(), nullable=True)) - - - op.create_unique_constraint('uq_video_files_decrypted_path', 'video_files', ['decrypted_path']) - - op.get_bind().execute('delete from ondeckdata where id in (select ondeckdata.id from ondeckdata left join video_files on video_uri = decrypted_path where decrypted_path is null);') - op.create_foreign_key(None, 'ondeckdata', 'video_files', ['video_uri'], ['decrypted_path']) + op.create_unique_constraint("uq_video_files_decrypted_path", "video_files", ["decrypted_path"]) + op.get_bind().execute( + "delete from ondeckdata where id in (select ondeckdata.id from ondeckdata left join video_files on video_uri = decrypted_path where decrypted_path is null);" + ) + op.create_foreign_key(None, "ondeckdata", "video_files", ["video_uri"], ["decrypted_path"]) def downgrade() -> None: - op.drop_constraint(None, 'ondeckdata', type_='foreignkey') - op.drop_constraint('uq_video_files_decrypted_path', 'video_files', type_='unique') - op.drop_column('ondeckdata', 'tracked_confidence') - op.drop_column('ondeckdata', 'overallruntimems') - op.drop_column('ondeckdata', 'overallcount') - + op.drop_constraint(None, "ondeckdata", type_="foreignkey") + op.drop_constraint("uq_video_files_decrypted_path", "video_files", type_="unique") + op.drop_column("ondeckdata", "tracked_confidence") + op.drop_column("ondeckdata", "overallruntimems") + op.drop_column("ondeckdata", "overallcount") diff --git a/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py b/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py index c0a6764..e54d48a 100644 --- a/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py +++ b/migrations/versions/ba08d4e11cc7_ondeckdata_more_data_columns.py @@ -5,26 +5,24 @@ Create Date: 2023-10-11 17:33:42.633350 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = 'ba08d4e11cc7' -down_revision = '495235ece5f0' +revision = "ba08d4e11cc7" +down_revision = "495235ece5f0" branch_labels = None depends_on = None def upgrade() -> None: - op.add_column('ondeckdata', sa.Column('overallcatches', sa.Integer(), nullable=True)) - op.add_column('ondeckdata', sa.Column('overalldiscards', sa.Integer(), nullable=True)) - op.add_column('ondeckdata', sa.Column('detection_confidence', sa.REAL(), nullable=True)) - + op.add_column("ondeckdata", sa.Column("overallcatches", sa.Integer(), nullable=True)) + op.add_column("ondeckdata", sa.Column("overalldiscards", sa.Integer(), nullable=True)) + op.add_column("ondeckdata", sa.Column("detection_confidence", sa.REAL(), nullable=True)) def downgrade() -> None: - op.drop_column('ondeckdata', 'detection_confidence') - op.drop_column('ondeckdata', 'overalldiscards') - op.drop_column('ondeckdata', 'overallcatches') - - + op.drop_column("ondeckdata", "detection_confidence") + op.drop_column("ondeckdata", "overalldiscards") + op.drop_column("ondeckdata", "overallcatches") diff --git a/migrations/versions/bbe04841c70d_port_departures_view.py b/migrations/versions/bbe04841c70d_port_departures_view.py index 5155585..b7b5271 100644 --- a/migrations/versions/bbe04841c70d_port_departures_view.py +++ b/migrations/versions/bbe04841c70d_port_departures_view.py @@ -5,20 +5,22 @@ Create Date: 2023-09-19 11:59:42.945969 """ + from alembic import op # revision identifiers, used by Alembic. -revision = 'bbe04841c70d' -down_revision = 'b78dce0f5492' +revision = "bbe04841c70d" +down_revision = "b78dce0f5492" branch_labels = None depends_on = None def upgrade() -> None: - - op.get_bind().execute('create table if not exists port_location (port_location point);') - op.get_bind().execute('truncate port_location;') - op.get_bind().execute('insert into port_location (port_location) values (point(9.4241879,-84.1833372));') + op.get_bind().execute("create table if not exists port_location (port_location point);") + op.get_bind().execute("truncate port_location;") + op.get_bind().execute( + "insert into port_location (port_location) values (point(9.4241879,-84.1833372));" + ) op.get_bind().execute("""CREATE OR REPLACE VIEW port_departures as ( with A as ( @@ -40,6 +42,6 @@ def upgrade() -> None: def downgrade() -> None: - op.get_bind().execute('drop view port_arrivals;') - op.get_bind().execute('drop view port_departures;') - op.get_bind().execute('drop table port_location;') + op.get_bind().execute("drop view port_arrivals;") + op.get_bind().execute("drop view port_departures;") + op.get_bind().execute("drop table port_location;") diff --git a/migrations/versions/d974c1aea745_elog_gaps_score_update.py b/migrations/versions/d974c1aea745_elog_gaps_score_update.py index 117d2f1..7985000 100644 --- a/migrations/versions/d974c1aea745_elog_gaps_score_update.py +++ b/migrations/versions/d974c1aea745_elog_gaps_score_update.py @@ -5,16 +5,16 @@ Create Date: 2023-09-19 13:16:37.865465 """ + from alembic import op # revision identifiers, used by Alembic. -revision = 'd974c1aea745' -down_revision = 'b2f76c38a4a0' +revision = "d974c1aea745" +down_revision = "b2f76c38a4a0" branch_labels = None depends_on = None - def upgrade() -> None: op.get_bind().execute("""CREATE OR REPLACE VIEW elog_time_gap_score as ( with paired_seq_deckhandevents as ( diff --git a/migrations/versions/e718ddd7c0bd_add_track_table.py b/migrations/versions/e718ddd7c0bd_add_track_table.py index fabc99f..62cb39e 100644 --- a/migrations/versions/e718ddd7c0bd_add_track_table.py +++ b/migrations/versions/e718ddd7c0bd_add_track_table.py @@ -5,32 +5,38 @@ Create Date: 2023-12-05 16:55:46.938879 """ + import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = 'e718ddd7c0bd' -down_revision = 'fdfd9e708602' +revision = "e718ddd7c0bd" +down_revision = "fdfd9e708602" branch_labels = None depends_on = None def upgrade() -> None: - - op.create_table('tracks', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('video_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('cocoannotations_uri', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('track_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('first_framenum', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('last_framenum', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('confidences', postgresql.ARRAY(sa.REAL()), autoincrement=False, nullable=True), - sa.Column('datetime', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='tracks_pkey') + op.create_table( + "tracks", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("video_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("cocoannotations_uri", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("track_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("first_framenum", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("last_framenum", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("confidences", postgresql.ARRAY(sa.REAL()), autoincrement=False, nullable=True), + sa.Column( + "datetime", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("CURRENT_TIMESTAMP"), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="tracks_pkey"), ) def downgrade() -> None: - op.drop_table('tracks') - + op.drop_table("tracks") diff --git a/migrations/versions/ecb326942445_starttime_on_videos.py b/migrations/versions/ecb326942445_starttime_on_videos.py index da14f9a..8b0162e 100644 --- a/migrations/versions/ecb326942445_starttime_on_videos.py +++ b/migrations/versions/ecb326942445_starttime_on_videos.py @@ -11,8 +11,8 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = 'ecb326942445' -down_revision = '1d1b10e054af' +revision = "ecb326942445" +down_revision = "1d1b10e054af" branch_labels = None depends_on = None @@ -20,8 +20,9 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.add_column( - 'video_files', - sa.Column('start_datetime', sa.DateTime(timezone=True), autoincrement=False, nullable=True)) + "video_files", + sa.Column("start_datetime", sa.DateTime(timezone=True), autoincrement=False, nullable=True), + ) op.execute(""" update video_files set start_datetime = to_timestamp( @@ -32,14 +33,17 @@ def upgrade() -> None: 'DD-MM-YYYY-HH24-MI TZH TZM' ) """) - op.alter_column('video_files', 'start_datetime', - existing_type=postgresql.TIMESTAMP(timezone=True), - nullable=False, - autoincrement=False) + op.alter_column( + "video_files", + "start_datetime", + existing_type=postgresql.TIMESTAMP(timezone=True), + nullable=False, + autoincrement=False, + ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('video_files', 'start_datetime') + op.drop_column("video_files", "start_datetime") # ### end Alembic commands ### diff --git a/migrations/versions/f48359cf7456_ondeckdata_status.py b/migrations/versions/f48359cf7456_ondeckdata_status.py index a86fd2b..9197882 100644 --- a/migrations/versions/f48359cf7456_ondeckdata_status.py +++ b/migrations/versions/f48359cf7456_ondeckdata_status.py @@ -5,22 +5,22 @@ Create Date: 2023-10-09 17:35:01.581320 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = 'f48359cf7456' -down_revision = '8304966281aa' +revision = "f48359cf7456" +down_revision = "8304966281aa" branch_labels = None depends_on = None def upgrade() -> None: - - op.add_column('ondeckdata', sa.Column('status', sa.String(), nullable=True)) + op.add_column("ondeckdata", sa.Column("status", sa.String(), nullable=True)) op.execute("update ondeckdata set status = 'done';") def downgrade() -> None: - op.drop_column('ondeckdata', 'status') + op.drop_column("ondeckdata", "status") diff --git a/migrations/versions/f835aa8c569a_second.py b/migrations/versions/f835aa8c569a_second.py index 8192bf5..bb4e3bf 100644 --- a/migrations/versions/f835aa8c569a_second.py +++ b/migrations/versions/f835aa8c569a_second.py @@ -5,80 +5,103 @@ Create Date: 2023-05-16 11:44:58.986312 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = 'f835aa8c569a' -down_revision = '04eaff9bcc55' +revision = "f835aa8c569a" +down_revision = "04eaff9bcc55" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('boatschedules', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('sentence', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "boatschedules", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("sentence", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('deckhandevents', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('jsonblob', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "deckhandevents", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("jsonblob", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('fishaidata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('video_uri', sa.String(), nullable=True), - sa.Column('cocoannotations_uri', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "fishaidata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("video_uri", sa.String(), nullable=True), + sa.Column("cocoannotations_uri", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('gpsdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('sentence', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "gpsdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("sentence", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('internetdata', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('traceroute', sa.String(), nullable=True), - sa.Column('ping', sa.Float(), nullable=True), - sa.Column('packetloss', sa.Float(), nullable=True), - sa.Column('returncode', sa.Integer(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "internetdata", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("traceroute", sa.String(), nullable=True), + sa.Column("ping", sa.Float(), nullable=True), + sa.Column("packetloss", sa.Float(), nullable=True), + sa.Column("returncode", sa.Integer(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('vectors', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('configblob', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "vectors", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("configblob", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('tests', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('type', sa.Enum('one', 'two', 'three', name='t'), nullable=True), - sa.Column('vector_id', sa.Integer(), nullable=True), - sa.Column('score', sa.Float(), nullable=True), - sa.Column('detail', sa.String(), nullable=True), - sa.Column('datetime', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), - sa.ForeignKeyConstraint(['vector_id'], ['vectors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "tests", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("type", sa.Enum("one", "two", "three", name="t"), nullable=True), + sa.Column("vector_id", sa.Integer(), nullable=True), + sa.Column("score", sa.Float(), nullable=True), + sa.Column("detail", sa.String(), nullable=True), + sa.Column( + "datetime", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=True + ), + sa.ForeignKeyConstraint( + ["vector_id"], + ["vectors.id"], + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('tests') - op.drop_table('vectors') - op.drop_table('internetdata') - op.drop_table('gpsdata') - op.drop_table('fishaidata') - op.drop_table('deckhandevents') - op.drop_table('boatschedules') - op.execute('drop type t;') + op.drop_table("tests") + op.drop_table("vectors") + op.drop_table("internetdata") + op.drop_table("gpsdata") + op.drop_table("fishaidata") + op.drop_table("deckhandevents") + op.drop_table("boatschedules") + op.execute("drop type t;") # ### end Alembic commands ### diff --git a/migrations/versions/f9dbf07180af_test_from_to_columns.py b/migrations/versions/f9dbf07180af_test_from_to_columns.py index 81ee1a7..f59c83b 100644 --- a/migrations/versions/f9dbf07180af_test_from_to_columns.py +++ b/migrations/versions/f9dbf07180af_test_from_to_columns.py @@ -5,25 +5,26 @@ Create Date: 2023-06-06 13:12:18.789652 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = 'f9dbf07180af' -down_revision = '47ff3fca73a4' +revision = "f9dbf07180af" +down_revision = "47ff3fca73a4" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column('tests', sa.Column('datetime_from', sa.DateTime(timezone=True), nullable=True)) - op.add_column('tests', sa.Column('datetime_to', sa.DateTime(timezone=True), nullable=True)) + op.add_column("tests", sa.Column("datetime_from", sa.DateTime(timezone=True), nullable=True)) + op.add_column("tests", sa.Column("datetime_to", sa.DateTime(timezone=True), nullable=True)) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('tests', 'datetime_to') - op.drop_column('tests', 'datetime_from') + op.drop_column("tests", "datetime_to") + op.drop_column("tests", "datetime_from") # ### end Alembic commands ### diff --git a/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py b/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py index ea1c527..55b41ed 100644 --- a/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py +++ b/migrations/versions/fdfd9e708602_add_elog_timegap_vector_row.py @@ -5,12 +5,13 @@ Create Date: 2023-11-07 16:50:44.303059 """ + import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. -revision = 'fdfd9e708602' -down_revision = 'ba08d4e11cc7' +revision = "fdfd9e708602" +down_revision = "ba08d4e11cc7" branch_labels = None depends_on = None @@ -18,19 +19,22 @@ def upgrade() -> None: # stmt = sa.select(sa.table('vectors')).where(name="ElogTimeGapsVector") found_id = None - for row in op.get_bind().execute("select id, name from vectors where name = 'ElogTimeGapsVector';"): + for row in op.get_bind().execute( + "select id, name from vectors where name = 'ElogTimeGapsVector';" + ): if row: found_id = row[0] if found_id is None: - op.get_bind().execute('insert into vectors (name, configblob, schedule_string) values (\'ElogTimeGapsVector\', \'{}\', \'every 4 hours\');') - - + op.get_bind().execute( + "insert into vectors (name, configblob, schedule_string) values ('ElogTimeGapsVector', '{}', 'every 4 hours');" + ) def downgrade() -> None: - op.get_bind().execute("delete from tests where vector_id = (select id from vectors where name = 'ElogTimeGapsVector');") + op.get_bind().execute( + "delete from tests where vector_id = (select id from vectors where name = 'ElogTimeGapsVector');" + ) - t = sa.table('vectors') + t = sa.table("vectors") op.get_bind().execute("delete from vectors where name = 'ElogTimeGapsVector';") - diff --git a/reencode.py b/reencode.py index 4dc016a..ee79c36 100644 --- a/reencode.py +++ b/reencode.py @@ -89,7 +89,9 @@ def run_reencode(output_dir: Path, sessionmaker: SessionMaker): update_reencoded_path = None - p: CompletedProcess[str] = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False) + p: CompletedProcess[str] = subprocess.run( + cmd, shell=True, capture_output=True, text=True, check=False + ) if ( p.returncode == 0 and p.stderr.find("No such file") < 0 diff --git a/run_aifish.py b/run_aifish.py index e5ad2e7..e827496 100644 --- a/run_aifish.py +++ b/run_aifish.py @@ -394,7 +394,8 @@ def lost_inprogress(sessionmaker: SessionMaker, aifish_processing_dir: Path): 'journalctl -o short-iso -u aifish_model.service | grep systemd | grep Started | tail -n 1 | sed "s/edge.*//"', shell=True, text=True, - capture_output=True, check=False, + capture_output=True, + check=False, ) last_start_time_dt = parser.parse(last_start_time_s.stdout) diff --git a/run_ondeck.py b/run_ondeck.py index 5cef6e2..23abc62 100644 --- a/run_ondeck.py +++ b/run_ondeck.py @@ -107,7 +107,9 @@ def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalo ) if engine: cmd += " --model %s" % (str(engine.absolute()),) - p: CompletedProcess[str] = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False) + p: CompletedProcess[str] = subprocess.run( + cmd, shell=True, capture_output=True, text=True, check=False + ) if p.returncode == 0: with sessionmaker() as session: parse_json(session, decrypted_path, json_out_file) diff --git a/vector/fish_ai.py b/vector/fish_ai.py index 2e62088..0d4075a 100644 --- a/vector/fish_ai.py +++ b/vector/fish_ai.py @@ -76,9 +76,7 @@ def execute(self, expected_timedelta): groups.append(dict(t)) continue if groups[-1]["maxtime"] + self.event_grouping_timedelta >= t["mintime"]: - latertime = ( - max(groups[-1]["maxtime"], t["maxtime"]) - ) + latertime = max(groups[-1]["maxtime"], t["maxtime"]) groups[-1]["maxtime"] = latertime else: groups.append(dict(t))