From b74ca971789a740bfc6f2bd024b52857e0e81155 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 11 Aug 2022 15:27:18 +0200 Subject: [PATCH 001/132] Close #89 --- config/config.default.json | 4 +- ...efault.json => helios.config.default.json} | 0 .../.gitkeep | 0 logs/{vbdsd => helios}/.gitkeep | 0 packages/core/main.py | 20 +-- packages/core/modules/__init__.py | 2 +- .../modules/{vbdsd.py => helios_thread.py} | 126 +++++++++--------- .../core/modules/measurement_conditions.py | 20 +-- .../utils/interfaces/config_validation.py | 8 +- .../core/utils/interfaces/state_interface.py | 2 +- .../measurement-decision-status.tsx | 17 +-- .../ui/src/components/configuration/index.ts | 4 +- .../sections/config-section-general.tsx | 2 +- ...on-vbdsd.tsx => config-section-helios.tsx} | 20 +-- .../config-section-measurement-triggers.tsx | 8 +- packages/ui/src/custom-types.ts | 12 +- packages/ui/src/tabs/configuration-tab.tsx | 4 +- .../functional-utils/parse-number-types.ts | 16 ++- run_headless_vbdsd_thread.py | 2 +- ...bdsd_images.py => remove_helios_images.py} | 2 +- ...sd_images.py => sparsify_helios_images.py} | 2 +- scripts/take_exposure_image_series.py | 63 +++++---- tests/cli/test_cli_config.py | 2 +- tests/{vbdsd => helios}/__init__.py | 0 tests/helios/test_helios.py | 10 ++ .../test_take_helios_picture.py} | 22 +-- tests/integrity/test_default_config.py | 8 +- tests/vbdsd/test_vbdsd.py | 12 -- 28 files changed, 189 insertions(+), 199 deletions(-) rename config/{vbdsd.config.default.json => helios.config.default.json} (100%) rename logs/{vbdsd-autoexposure => helios-autoexposure}/.gitkeep (100%) rename logs/{vbdsd => helios}/.gitkeep (100%) rename packages/core/modules/{vbdsd.py => helios_thread.py} (73%) rename packages/ui/src/components/configuration/sections/{config-section-vbdsd.tsx => config-section-helios.tsx} (87%) rename scripts/{remove_vbdsd_images.py => remove_helios_images.py} (85%) rename scripts/{sparsify_vbdsd_images.py => sparsify_helios_images.py} (92%) rename tests/{vbdsd => helios}/__init__.py (100%) create mode 100644 tests/helios/test_helios.py rename tests/{vbdsd/test_take_vbdsd_picture.py => helios/test_take_helios_picture.py} (70%) delete mode 100644 tests/vbdsd/test_vbdsd.py diff --git a/config/config.default.json b/config/config.default.json index 22e5196d..1a0c116e 100644 --- a/config/config.default.json +++ b/config/config.default.json @@ -34,7 +34,7 @@ "measurement_triggers": { "consider_time": true, "consider_sun_elevation": true, - "consider_vbdsd": false, + "consider_helios": false, "start_time": { "hour": 7, "minute": 0, @@ -48,5 +48,5 @@ "min_sun_elevation": 0 }, "tum_plc": null, - "vbdsd": null + "helios": null } diff --git a/config/vbdsd.config.default.json b/config/helios.config.default.json similarity index 100% rename from config/vbdsd.config.default.json rename to config/helios.config.default.json diff --git a/logs/vbdsd-autoexposure/.gitkeep b/logs/helios-autoexposure/.gitkeep similarity index 100% rename from logs/vbdsd-autoexposure/.gitkeep rename to logs/helios-autoexposure/.gitkeep diff --git a/logs/vbdsd/.gitkeep b/logs/helios/.gitkeep similarity index 100% rename from logs/vbdsd/.gitkeep rename to logs/helios/.gitkeep diff --git a/packages/core/main.py b/packages/core/main.py index f567384b..311d6b98 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -31,7 +31,7 @@ def run(): modules.opus_measurement.OpusMeasurement(_CONFIG), modules.system_checks.SystemChecks(_CONFIG), ] - vbdsd_thread = modules.vbdsd.VBDSD_Thread() + helios_thread_instance = modules.helios_thread.HeliosThread() current_exceptions = StateInterface.read(persistent=True)["current_exceptions"] @@ -47,18 +47,18 @@ def run(): continue if not _CONFIG["general"]["test_mode"]: - # Start or stop VBDSD in a thread - vbdsd_should_be_running = ( - _CONFIG["vbdsd"] is not None - and _CONFIG["measurement_triggers"]["consider_vbdsd"] + # Start or stop Helios in a thread + helios_should_be_running = ( + _CONFIG["helios"] is not None + and _CONFIG["measurement_triggers"]["consider_helios"] ) - if vbdsd_should_be_running and not vbdsd_thread.is_running(): - vbdsd_thread.start() - if not vbdsd_should_be_running and vbdsd_thread.is_running(): - vbdsd_thread.stop() + if helios_should_be_running and not helios_thread_instance.is_running(): + helios_thread_instance.start() + if not helios_should_be_running and helios_thread_instance.is_running(): + helios_thread_instance.stop() else: logger.info("pyra-core in test mode") - logger.debug("Skipping VBDSD_Thread in test mode") + logger.debug("Skipping HeliosThread in test mode") new_exception = None try: diff --git a/packages/core/modules/__init__.py b/packages/core/modules/__init__.py index 0a662298..6cdae050 100644 --- a/packages/core/modules/__init__.py +++ b/packages/core/modules/__init__.py @@ -1,8 +1,8 @@ from . import ( enclosure_control, + helios_thread, measurement_conditions, opus_measurement, sun_tracking, system_checks, - vbdsd, ) diff --git a/packages/core/modules/vbdsd.py b/packages/core/modules/helios_thread.py similarity index 73% rename from packages/core/modules/vbdsd.py rename to packages/core/modules/helios_thread.py index 63b2e0e8..d5a0cc79 100644 --- a/packages/core/modules/vbdsd.py +++ b/packages/core/modules/helios_thread.py @@ -14,12 +14,12 @@ ImageProcessing, ) -logger = Logger(origin="vbdsd") +logger = Logger(origin="helios") dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) -IMG_DIR = os.path.join(PROJECT_DIR, "logs", "vbdsd") -AUTOEXPOSURE_IMG_DIR = os.path.join(PROJECT_DIR, "logs", "vbdsd-autoexposure") +IMG_DIR = os.path.join(PROJECT_DIR, "logs", "helios") +AUTOEXPOSURE_IMG_DIR = os.path.join(PROJECT_DIR, "logs", "helios-autoexposure") _CONFIG = None @@ -27,7 +27,7 @@ class CameraError(Exception): pass -class _VBDSD: +class _Helios: cam = None current_exposure = None last_autoexposure_time = 0 @@ -36,27 +36,27 @@ class _VBDSD: @staticmethod def init(camera_id: int, retries: int = 5): # TODO: Why is this necessary? - _VBDSD.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) - _VBDSD.cam.release() + _Helios.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) + _Helios.cam.release() for _ in range(retries): - _VBDSD.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) - if _VBDSD.cam.isOpened(): + _Helios.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) + if _Helios.cam.isOpened(): - if _VBDSD.available_exposures is None: - _VBDSD.available_exposures = _VBDSD.get_available_exposures() + if _Helios.available_exposures is None: + _Helios.available_exposures = _Helios.get_available_exposures() logger.debug( - f"determined available exposures: {_VBDSD.available_exposures}" + f"determined available exposures: {_Helios.available_exposures}" ) assert ( - len(_VBDSD.available_exposures) > 0 + len(_Helios.available_exposures) > 0 ), "did not find any available exposures" - _VBDSD.current_exposure = min(_VBDSD.available_exposures) - _VBDSD.update_camera_settings( + _Helios.current_exposure = min(_Helios.available_exposures) + _Helios.update_camera_settings( width=1280, height=720, - exposure=min(_VBDSD.available_exposures), + exposure=min(_Helios.available_exposures), brightness=64, contrast=64, saturation=0, @@ -70,16 +70,16 @@ def init(camera_id: int, retries: int = 5): @staticmethod def deinit(): - if _VBDSD.cam is not None: - _VBDSD.cam.release() - _VBDSD.cam = None + if _Helios.cam is not None: + _Helios.cam.release() + _Helios.cam = None @staticmethod def get_available_exposures() -> list[int]: possible_values = [] for exposure in range(-20, 20): - _VBDSD.cam.set(cv.CAP_PROP_EXPOSURE, exposure) - if _VBDSD.cam.get(cv.CAP_PROP_EXPOSURE) == exposure: + _Helios.cam.set(cv.CAP_PROP_EXPOSURE, exposure) + if _Helios.cam.get(cv.CAP_PROP_EXPOSURE) == exposure: possible_values.append(exposure) return possible_values @@ -109,9 +109,9 @@ def update_camera_settings( for property_name in properties: key, value = properties[property_name] if value is not None: - _VBDSD.cam.set(key, value) + _Helios.cam.set(key, value) if property_name not in ["width", "height"]: - new_value = _VBDSD.cam.get(key) + new_value = _Helios.cam.get(key) assert ( new_value == value ), f"could not set {property_name} to {value}, value is still at {new_value}" @@ -119,15 +119,15 @@ def update_camera_settings( # throw away some images after changing settings. I don't know # why this is necessary, but it resolved a lot of issues for _ in range(2): - _VBDSD.cam.read() + _Helios.cam.read() @staticmethod def take_image(retries: int = 10, trow_away_white_images: bool = True) -> cv.Mat: - assert _VBDSD.cam is not None, "camera is not initialized yet" - if not _VBDSD.cam.isOpened(): + assert _Helios.cam is not None, "camera is not initialized yet" + if not _Helios.cam.isOpened(): raise CameraError("camera is not open") for _ in range(retries + 1): - ret, frame = _VBDSD.cam.read() + ret, frame = _Helios.cam.read() if ret: if trow_away_white_images and np.mean(frame) > 240: # image is mostly white @@ -142,9 +142,9 @@ def adjust_exposure(): mean pixel value color is closest to 100 """ exposure_results = [] - for e in _VBDSD.available_exposures: - _VBDSD.update_camera_settings(exposure=e) - img = _VBDSD.take_image(trow_away_white_images=False) + for e in _Helios.available_exposures: + _Helios.update_camera_settings(exposure=e) + img = _Helios.take_image(trow_away_white_images=False) mean_color = round(np.mean(img), 3) exposure_results.append({"exposure": e, "mean": mean_color}) img = ImageProcessing.add_text_to_image( @@ -155,11 +155,11 @@ def adjust_exposure(): logger.debug(f"exposure results: {exposure_results}") new_exposure = min(exposure_results, key=lambda r: abs(r["mean"] - 50))["exposure"] - _VBDSD.update_camera_settings(exposure=new_exposure) + _Helios.update_camera_settings(exposure=new_exposure) - if new_exposure != _VBDSD.current_exposure: - logger.info(f"changing exposure: {_VBDSD.current_exposure} -> {new_exposure}") - _VBDSD.current_exposure = new_exposure + if new_exposure != _Helios.current_exposure: + logger.info(f"changing exposure: {_Helios.current_exposure} -> {new_exposure}") + _Helios.current_exposure = new_exposure @staticmethod def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: @@ -190,7 +190,7 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: # TODO: the values below should be adjusted by looking at the ifgs directly status = 1 if (edge_fraction > 0.02) else 0 - logger.debug(f"exposure = {_VBDSD.current_exposure}, edge_fraction = {edge_fraction}") + logger.debug(f"exposure = {_Helios.current_exposure}, edge_fraction = {edge_fraction}") if save_image: image_timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") @@ -208,15 +208,15 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: def run(save_image: bool) -> int: # run autoexposure function every 3 minutes now = time.time() - if (now - _VBDSD.last_autoexposure_time) > 180: - _VBDSD.adjust_exposure() - _VBDSD.last_autoexposure_time = now + if (now - _Helios.last_autoexposure_time) > 180: + _Helios.adjust_exposure() + _Helios.last_autoexposure_time = now - frame = _VBDSD.take_image() - return _VBDSD.determine_frame_status(frame, save_image) + frame = _Helios.take_image() + return _Helios.determine_frame_status(frame, save_image) -class VBDSD_Thread: +class HeliosThread: def __init__(self): self.__thread = None self.__shared_queue = queue.Queue() @@ -226,7 +226,7 @@ def start(self): Start a thread using the multiprocessing library """ logger.info("Starting thread") - self.__thread = threading.Thread(target=VBDSD_Thread.main, args=(self.__shared_queue,)) + self.__thread = threading.Thread(target=HeliosThread.main, args=(self.__shared_queue,)) self.__thread.start() def is_running(self): @@ -244,7 +244,7 @@ def stop(self): self.__thread.join() logger.debug('Setting state to "null"') - StateInterface.update({"vbdsd_indicates_good_conditions": None}) + StateInterface.update({"helios_indicates_good_conditions": None}) self.__thread = None @@ -255,10 +255,10 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = # headless mode = don't use logger, just print messages to console, always save images if headless: - logger = Logger(origin="vbdsd", just_print=True) + logger = Logger(origin="helios", just_print=True) _CONFIG = ConfigInterface.read() - status_history = RingList(_CONFIG["vbdsd"]["evaluation_size"]) + status_history = RingList(_CONFIG["helios"]["evaluation_size"]) current_state = None repeated_camera_error_count = 0 @@ -267,7 +267,7 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = # Check for termination try: if shared_queue.get(block=False) == "stop": - _VBDSD.deinit() + _Helios.deinit() break except queue.Empty: pass @@ -277,15 +277,15 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = _CONFIG = ConfigInterface.read() # init camera connection - if _VBDSD.cam is None: - logger.info(f"Initializing VBDSD camera") - _VBDSD.init(_CONFIG["vbdsd"]["camera_id"]) + if _Helios.cam is None: + logger.info(f"Initializing Helios camera") + _Helios.init(_CONFIG["helios"]["camera_id"]) # reinit if parameter changes - new_size = _CONFIG["vbdsd"]["evaluation_size"] + new_size = _CONFIG["helios"]["evaluation_size"] if status_history.maxsize() != new_size: logger.debug( - "Size of VBDSD history has changed: " + "Size of Helios history has changed: " + f"{status_history.maxsize()} -> {new_size}" ) status_history.reinitialize(new_size) @@ -296,19 +296,19 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = ): logger.debug("Current sun elevation below minimum: Waiting 5 minutes") if current_state != None: - StateInterface.update({"vbdsd_indicates_good_conditions": False}) + StateInterface.update({"helios_indicates_good_conditions": False}) current_state = None # reinit for next day - _VBDSD.reinit_settings() + _Helios.reinit_settings() time.sleep(300) continue # take a picture and process it: status is in [0, 1] # a CameraError is allowed to happen 3 times in a row # at the 4th time the camera is not able to take an image - # an Exception will be raised (and VBDSD will be restarted) + # an Exception will be raised (and Helios will be restarted) try: - status = _VBDSD.run(headless or _CONFIG["vbdsd"]["save_images"]) + status = _Helios.run(headless or _CONFIG["helios"]["save_images"]) repeated_camera_error_count = 0 except CameraError as e: repeated_camera_error_count += 1 @@ -317,34 +317,34 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = else: logger.debug( f"camera occured ({repeated_camera_error_count} time(s) in a row). " - + "sleeping 15 seconds, reconnecting camera" + + "sleeping 15 seconds, reinitializing Helios" ) - _VBDSD.deinit() + _Helios.deinit() time.sleep(15) continue # append sun status to status history status_history.append(0 if (status == -1) else status) logger.debug( - f"New VBDSD status: {status}. Current history: {status_history.get()}" + f"New Helios status: {status}. Current history: {status_history.get()}" ) # evaluate sun state only if list is filled new_state = None if status_history.size() == status_history.maxsize(): score = status_history.sum() / status_history.size() - new_state = score > _CONFIG["vbdsd"]["measurement_threshold"] + new_state = score > _CONFIG["helios"]["measurement_threshold"] if current_state != new_state: logger.info( f"State change: {'BAD -> GOOD' if (new_state == True) else 'GOOD -> BAD'}" ) - StateInterface.update({"vbdsd_indicates_good_conditions": new_state}) + StateInterface.update({"helios_indicates_good_conditions": new_state}) current_state = new_state # wait rest of loop time elapsed_time = time.time() - start_time - time_to_wait = _CONFIG["vbdsd"]["seconds_per_interval"] - elapsed_time + time_to_wait = _CONFIG["helios"]["seconds_per_interval"] - elapsed_time if time_to_wait > 0: logger.debug( f"Finished iteration, waiting {round(time_to_wait, 2)} second(s)." @@ -356,8 +356,8 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = except Exception as e: status_history.empty() - _VBDSD.deinit() + _Helios.deinit() - logger.error(f"error in VBDSD thread: {repr(e)}") - logger.info(f"sleeping 30 seconds, reinitializing VBDSD thread") + logger.error(f"error in HeliosThread: {repr(e)}") + logger.info(f"sleeping 30 seconds, reinitializing HeliosThread") time.sleep(30) diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index 377f0755..1e841779 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -51,14 +51,14 @@ def run(self, new_config: dict): def _get_automatic_decision(self) -> bool: triggers = self._CONFIG["measurement_triggers"] - if self._CONFIG["vbdsd"] is None: - triggers["consider_vbdsd"] = False + if self._CONFIG["helios"] is None: + triggers["consider_helios"] = False if not any( [ triggers["consider_sun_elevation"], triggers["consider_time"], - triggers["consider_vbdsd"], + triggers["consider_helios"], ] ): return False @@ -86,17 +86,17 @@ def _get_automatic_decision(self) -> bool: if not time_is_valid: return False - if triggers["consider_vbdsd"]: - logger.info("VBDSD as a trigger is considered.") - vbdsd_result = StateInterface.read()["vbdsd_indicates_good_conditions"] + if triggers["consider_helios"]: + logger.info("Helios as a trigger is considered.") + helios_result = StateInterface.read()["helios_indicates_good_conditions"] - if vbdsd_result is None: - logger.debug(f"VBDSD does not nave enough images yet.") + if helios_result is None: + logger.debug(f"Helios does not nave enough images yet.") return False logger.debug( - f"VBDSD indicates {'good' if vbdsd_result else 'bad'} sun conditions." + f"Helios indicates {'good' if helios_result else 'bad'} sun conditions." ) - return vbdsd_result + return helios_result return True diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 8d7b9836..b36d1699 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -89,7 +89,7 @@ class _Schemas: { "consider_time": {"type": "boolean"}, "consider_sun_elevation": {"type": "boolean"}, - "consider_vbdsd": {"type": "boolean"}, + "consider_helios": {"type": "boolean"}, "start_time": _Schemas.time_dict, "stop_time": _Schemas.time_dict, "min_sun_elevation": {"type": "number"}, @@ -102,7 +102,7 @@ class _Schemas: "controlled_by_user": {"type": "boolean"}, } ), - "vbdsd": NULLABLE_DICT_SCHEMA( + "helios": NULLABLE_DICT_SCHEMA( { "camera_id": {"type": "integer"}, "evaluation_size": {"type": "integer"}, @@ -160,7 +160,7 @@ class _Schemas: { "consider_time": _Schemas.boolean, "consider_sun_elevation": _Schemas.boolean, - "consider_vbdsd": _Schemas.boolean, + "consider_helios": _Schemas.boolean, "start_time": _Schemas.time_dict, "stop_time": _Schemas.time_dict, "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, @@ -173,7 +173,7 @@ class _Schemas: "controlled_by_user": {"type": "boolean"}, } ), - "vbdsd": NULLABLE_DICT_SCHEMA( + "helios": NULLABLE_DICT_SCHEMA( { "camera_id": {"type": "integer", "min": 0, "max": 999999}, "evaluation_size": {"type": "integer", "min": 1, "max": 100}, diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 592fc6ca..97bc528c 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -30,7 +30,7 @@ def initialize() -> None: # write initial state.json file new_state = { - "vbdsd_indicates_good_conditions": None, + "helios_indicates_good_conditions": None, "measurements_should_be_running": False, "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), "os_state": { diff --git a/packages/ui/src/components/automation/measurement-decision-status.tsx b/packages/ui/src/components/automation/measurement-decision-status.tsx index 63a2f718..f83f8fe6 100644 --- a/packages/ui/src/components/automation/measurement-decision-status.tsx +++ b/packages/ui/src/components/automation/measurement-decision-status.tsx @@ -30,9 +30,9 @@ function MeasurementTriggerInfo() { const measurementTriggers = reduxUtils.useTypedSelector( (s) => s.config.central?.measurement_triggers ); - const centralConfigVBDSD = reduxUtils.useTypedSelector((s) => s.config.central?.vbdsd); + const centralConfigHelios = reduxUtils.useTypedSelector((s) => s.config.central?.helios); - if (measurementTriggers === undefined || centralConfigVBDSD === undefined) { + if (measurementTriggers === undefined || centralConfigHelios === undefined) { return <>; } return ( @@ -63,13 +63,14 @@ function MeasurementTriggerInfo() { , ], [ - measurementTriggers.consider_vbdsd, - 'VBDSD Result', + measurementTriggers.consider_helios, + 'Helios Result', <> - {!measurementTriggers.consider_vbdsd && 'ignored'} - {measurementTriggers.consider_vbdsd && centralConfigVBDSD === null && ( - not configured! - )} + {!measurementTriggers.consider_helios && 'ignored'} + {measurementTriggers.consider_helios && + centralConfigHelios === null && ( + not configured! + )} , ], ].map((row: any, i) => ( diff --git a/packages/ui/src/components/configuration/index.ts b/packages/ui/src/components/configuration/index.ts index 87dc87f4..8ba00f2c 100644 --- a/packages/ui/src/components/configuration/index.ts +++ b/packages/ui/src/components/configuration/index.ts @@ -8,7 +8,7 @@ import ConfigSectionErrorEmail from './sections/config-section-error-email'; import ConfigSectionCamtracker from './sections/config-section-camtracker'; import ConfigSectionMeasurementTriggers from './sections/config-section-measurement-triggers'; import ConfigSectionOpus from './sections/config-section-opus'; -import ConfigSectionVbdsd from './sections/config-section-vbdsd'; +import ConfigSectionHelios from './sections/config-section-helios'; import ConfigSectionTumPlc from './sections/config-section-tum-plc'; export default { @@ -23,5 +23,5 @@ export default { ConfigSectionMeasurementTriggers, ConfigSectionOpus, ConfigSectionTumPlc, - ConfigSectionVbdsd, + ConfigSectionHelios, }; diff --git a/packages/ui/src/components/configuration/sections/config-section-general.tsx b/packages/ui/src/components/configuration/sections/config-section-general.tsx index 336423e1..2bc30cff 100644 --- a/packages/ui/src/components/configuration/sections/config-section-general.tsx +++ b/packages/ui/src/components/configuration/sections/config-section-general.tsx @@ -44,7 +44,7 @@ export default function ConfigSectionGeneral() { />
{ICONS.info}
The TUM PLC will start its - operation one degree earlier. VBDSD will start at this angle. + operation one degree earlier. Helios will start at this angle.
); diff --git a/packages/ui/src/components/configuration/sections/config-section-vbdsd.tsx b/packages/ui/src/components/configuration/sections/config-section-helios.tsx similarity index 87% rename from packages/ui/src/components/configuration/sections/config-section-vbdsd.tsx rename to packages/ui/src/components/configuration/sections/config-section-helios.tsx index 48aee588..332e4e26 100644 --- a/packages/ui/src/components/configuration/sections/config-section-vbdsd.tsx +++ b/packages/ui/src/components/configuration/sections/config-section-helios.tsx @@ -2,9 +2,9 @@ import { customTypes } from '../../../custom-types'; import { configurationComponents, essentialComponents } from '../..'; import { reduxUtils } from '../../../utils'; -export default function ConfigSectionVbdsd() { - const centralSectionConfig = reduxUtils.useTypedSelector((s) => s.config.central?.vbdsd); - const localSectionConfig = reduxUtils.useTypedSelector((s) => s.config.local?.vbdsd); +export default function ConfigSectionHelios() { + const centralSectionConfig = reduxUtils.useTypedSelector((s) => s.config.central?.helios); + const localSectionConfig = reduxUtils.useTypedSelector((s) => s.config.local?.helios); const dispatch = reduxUtils.useTypedDispatch(); const update = (c: customTypes.partialConfig) => @@ -12,7 +12,7 @@ export default function ConfigSectionVbdsd() { function addDefault() { update({ - vbdsd: { + helios: { camera_id: 0, evaluation_size: 15, seconds_per_interval: 6, @@ -24,7 +24,7 @@ export default function ConfigSectionVbdsd() { function setNull() { update({ - vbdsd: null, + helios: null, }); } @@ -66,14 +66,14 @@ export default function ConfigSectionVbdsd() { update({ vbdsd: { camera_id: v } })} + setValue={(v: number) => update({ helios: { camera_id: v } })} oldValue={centralSectionConfig !== null ? centralSectionConfig.camera_id : 'null'} numeric /> update({ vbdsd: { seconds_per_interval: v } })} + setValue={(v: any) => update({ helios: { seconds_per_interval: v } })} oldValue={ centralSectionConfig !== null ? centralSectionConfig.seconds_per_interval @@ -85,7 +85,7 @@ export default function ConfigSectionVbdsd() { update({ vbdsd: { evaluation_size: v } })} + setValue={(v: any) => update({ helios: { evaluation_size: v } })} oldValue={ centralSectionConfig !== null ? centralSectionConfig.evaluation_size : 'null' } @@ -95,7 +95,7 @@ export default function ConfigSectionVbdsd() { update({ vbdsd: { measurement_threshold: v } })} + setValue={(v: any) => update({ helios: { measurement_threshold: v } })} oldValue={ centralSectionConfig !== null ? centralSectionConfig.measurement_threshold @@ -106,7 +106,7 @@ export default function ConfigSectionVbdsd() { update({ vbdsd: { save_images: v } })} + setValue={(v: boolean) => update({ helios: { save_images: v } })} oldValue={centralSectionConfig?.save_images === true} /> diff --git a/packages/ui/src/components/configuration/sections/config-section-measurement-triggers.tsx b/packages/ui/src/components/configuration/sections/config-section-measurement-triggers.tsx index 406c4cf9..a473b2e8 100644 --- a/packages/ui/src/components/configuration/sections/config-section-measurement-triggers.tsx +++ b/packages/ui/src/components/configuration/sections/config-section-measurement-triggers.tsx @@ -39,10 +39,10 @@ export default function ConfigSectionMeasurementTriggers() { />
update({ measurement_triggers: { consider_vbdsd: v } })} - oldValue={centralSectionConfig.consider_vbdsd} + title="Consider Helios" + value={localSectionConfig.consider_helios} + setValue={(v: boolean) => update({ measurement_triggers: { consider_helios: v } })} + oldValue={centralSectionConfig.consider_helios} />
s.config.central); @@ -109,7 +109,7 @@ export default function ConfigurationTab() { )} {activeKey === 'tum_plc' && } - {activeKey === 'vbdsd' && } + {activeKey === 'helios' && } {configIsDiffering && ( int: """ exposure_results = [] for e in range(-12, 0): - VBDSD.update_camera_settings(exposure=e) - image = VBDSD.take_image() + Helios.update_camera_settings(exposure=e) + image = Helios.take_image() exposure_results.append({"exposure": e, "mean": np.mean(image)}) print(exposure_results) return min(exposure_results, key=lambda r: abs(r["mean"] - 100))["exposure"] if __name__ == "__main__": - print(f"Initializing VBDSD camera") + print(f"Initializing Helios camera") - VBDSD.init_cam() + Helios.init_cam() print(f"successfully initialized camera") - best_exposure = VBDSD.get_best_exposure() + best_exposure = Helios.get_best_exposure() print(f"best_exposure = {best_exposure}") - VBDSD.update_camera_settings(exposure=best_exposure) + Helios.update_camera_settings(exposure=best_exposure) - sample_image = VBDSD.take_image() + sample_image = Helios.take_image() print(np.mean(sample_image)) cv.imwrite(f"sample-image-exposure-{best_exposure}.jpg", sample_image) - VBDSD.cam.release() + Helios.cam.release() cv.destroyAllWindows() diff --git a/tests/cli/test_cli_config.py b/tests/cli/test_cli_config.py index 1777661c..b5b2ec5a 100644 --- a/tests/cli/test_cli_config.py +++ b/tests/cli/test_cli_config.py @@ -102,7 +102,7 @@ def transform(o: dict, i: int): def test_add_default_config(original_config): - cases = {"vbdsd": None, "tum_plc": None} + cases = {"helios": None, "tum_plc": None} for c in cases: with open(os.path.join(PROJECT_DIR, "config", f"{c}.config.default.json"), "r") as f: diff --git a/tests/vbdsd/__init__.py b/tests/helios/__init__.py similarity index 100% rename from tests/vbdsd/__init__.py rename to tests/helios/__init__.py diff --git a/tests/helios/test_helios.py b/tests/helios/test_helios.py new file mode 100644 index 00000000..8cb1de2d --- /dev/null +++ b/tests/helios/test_helios.py @@ -0,0 +1,10 @@ +from packages.core.modules.helios_thread import HeliosThread +import time + + +def test_helios(): + """Pictures are saved in C:\pyra-4\runtime-data\helios""" + helios = HeliosThread() + helios.start() + time.sleep(30) + helios.stop() diff --git a/tests/vbdsd/test_take_vbdsd_picture.py b/tests/helios/test_take_helios_picture.py similarity index 70% rename from tests/vbdsd/test_take_vbdsd_picture.py rename to tests/helios/test_take_helios_picture.py index e0a3ee52..e6874808 100644 --- a/tests/vbdsd/test_take_vbdsd_picture.py +++ b/tests/helios/test_take_helios_picture.py @@ -1,25 +1,17 @@ import cv2 as cv -import os import time import datetime import astropy.units as astropy_units from packages.core.utils import ( ConfigInterface, - StateInterface, - Logger, - RingList, Astronomy, ) -from packages.core.utils.astronomy import Astronomy - - -from packages.core.utils import ConfigInterface def test_picture(): _CONFIG = ConfigInterface().read() - cam = cv.VideoCapture(_CONFIG["vbdsd"]["camera_id"]) # + cam = cv.VideoCapture(_CONFIG["helios"]["camera_id"]) # cam.set(3, 1280) # width cam.set(4, 720) # height @@ -29,7 +21,6 @@ def test_picture(): cam.set(12, 0) # saturation cam.set(14, 0) # gain - current_sun_angle = Astronomy.get_current_sun_elevation() diff = 0 if current_sun_angle < 4 * astropy_units.deg: @@ -40,18 +31,17 @@ def test_picture(): exp = -11 + diff else: exp = -12 + diff - - cam.set(15,exp) + + cam.set(15, exp) for i in range(5): ret, frame = cam.read() - - path = "C:\\pyra-4\\runtime-data\\vbdsd\\test_{}.jpg".format(str(datetime.datetime.now().strftime("%H-%M-%S"))) - + path = "C:\\pyra-4\\runtime-data\\helios\\test_{}.jpg".format( + str(datetime.datetime.now().strftime("%H-%M-%S")) + ) cv.imwrite(path, frame) time.sleep(5) cam.release() - \ No newline at end of file diff --git a/tests/integrity/test_default_config.py b/tests/integrity/test_default_config.py index c6d793d0..b5c7300f 100644 --- a/tests/integrity/test_default_config.py +++ b/tests/integrity/test_default_config.py @@ -9,7 +9,7 @@ DEFAULT_CONFIG_PATH_TUM_PLC = os.path.join( PROJECT_DIR, "config", "tum_plc.config.default.json" ) -DEFAULT_CONFIG_PATH_VBDSD = os.path.join(PROJECT_DIR, "config", "vbdsd.config.default.json") +DEFAULT_CONFIG_PATH_HELIOS = os.path.join(PROJECT_DIR, "config", "helios.config.default.json") def test_default_config(): @@ -19,8 +19,8 @@ def test_default_config(): with open(DEFAULT_CONFIG_PATH_TUM_PLC, "r") as f: DEFAULT_CONFIG_TUM_PLC: dict = json.load(f) - with open(DEFAULT_CONFIG_PATH_VBDSD, "r") as f: - DEFAULT_CONFIG_VBDSD: dict = json.load(f) + with open(DEFAULT_CONFIG_PATH_HELIOS, "r") as f: + DEFAULT_CONFIG_HELIOS: dict = json.load(f) for k1 in DEFAULT_CONFIG.keys(): if DEFAULT_CONFIG[k1] is not None: @@ -33,5 +33,5 @@ def test_default_config(): DEFAULT_CONFIG["tum_plc"] = DEFAULT_CONFIG_TUM_PLC Validation.check(DEFAULT_CONFIG) - DEFAULT_CONFIG["vbdsd"] = DEFAULT_CONFIG_VBDSD + DEFAULT_CONFIG["helios"] = DEFAULT_CONFIG_HELIOS Validation.check(DEFAULT_CONFIG) diff --git a/tests/vbdsd/test_vbdsd.py b/tests/vbdsd/test_vbdsd.py deleted file mode 100644 index f7e6c22c..00000000 --- a/tests/vbdsd/test_vbdsd.py +++ /dev/null @@ -1,12 +0,0 @@ -from packages.core.modules.vbdsd import VBDSD_Thread -import time - - -def test_vbdsd(): - """Pictures are saved in C:\pyra-4\runtime-data\vbdsd""" - vbdsd = VBDSD_Thread() - vbdsd.start() - time.sleep(30) - vbdsd.stop() - - From cf742efc1bd1b31549076e3f76b7df42556fd074 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 11 Aug 2022 16:02:19 +0200 Subject: [PATCH 002/132] #87 - add section about repo management --- README.md | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 15be3c1c..ddb1164c 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Dependency management using https://python-poetry.org/. # create a virtual environment (copy of the python interpreter) python3.10 -m venv .venv -# activate virtual environment +# activate the virtual environment source .venv/bin/activate # unix .venv\Scripts\activate.bat # windows @@ -27,7 +27,7 @@ poetry install Two types of config files: -1. **`setup.json`** contains all information about the static setup: Which parts does the enclosure consist of? This should be written once and only changes when the hardware changes. +1. **`setup.json`** contains all information about the static setup: Which parts does the enclosure consist of? This should be written once and only change when the hardware changes. 2. **`parameters.json`** contains all dynamic parameters that can be set when operating pyra. This should be manipulated either via the CLI (coming soon) or the graphical user interface (coming soon, similar to Pyra version <= 3). For each file, there is a `*.default.json` file present in the repository. A full reference can be found here soon. @@ -55,4 +55,16 @@ _documentation coming soon_ Less Secure Apps have been deactivated. https://support.google.com/accounts/answer/6010255?hl=de&visit_id=637914296292859831-802637670&p=less-secure-apps&rd=1 -Solution: Use "App passwords", requires 2FA +Solution: Use "App passwords", which requires 2FA + +
+ +## Repository Management & CI + +**Branches:** `development-...`, `integration-x.y.z`, `main`, `release`, `prerelease` + +**Hierarchy:** `development-...` contains stuff in active development and will be merged into `integration-x.y.z`. `integration-x.y.z`: Is used during active integration on the stations and will be merged into `main`. `main` contains the latest running version that passed the integration and will be merged into `release` once enough changes have accumulated. Any branch can be released into `prerelease` to run the CI-Pipeline on demand. `prerelease` will not be merged into anything else and is just used for development purposes. + +**Continuous Integration:** The CI-Pipeline runs every time a commit/a series of commits is added to the `release` branch. The CI compiles and bundles the frontend code into an installable windows-application. Then it creates a new release draft and attaches the `.msi` file to the draft. We can then manually add the release description and submit the release. + +**Testing (not in an active CI):** We could add automated tests to the main- and integration branches. However, most things we could test make use of OPUS, Camtracker, Helios, or the enclosure, hence we can only do a subset of our tests in an isolated CI environment without the system present. From bfd1cae71816aaaf8025058ffdd3903904cfb3af Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 11 Aug 2022 16:09:14 +0200 Subject: [PATCH 003/132] #87 - add paragraph about issues Closes #87 --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ddb1164c..a82b4b0c 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ _documentation coming soon_ Less Secure Apps have been deactivated. https://support.google.com/accounts/answer/6010255?hl=de&visit_id=637914296292859831-802637670&p=less-secure-apps&rd=1 -Solution: Use "App passwords", which requires 2FA +Solution: Use "App passwords", which require 2FA
@@ -68,3 +68,5 @@ Solution: Use "App passwords", which requires 2FA **Continuous Integration:** The CI-Pipeline runs every time a commit/a series of commits is added to the `release` branch. The CI compiles and bundles the frontend code into an installable windows-application. Then it creates a new release draft and attaches the `.msi` file to the draft. We can then manually add the release description and submit the release. **Testing (not in an active CI):** We could add automated tests to the main- and integration branches. However, most things we could test make use of OPUS, Camtracker, Helios, or the enclosure, hence we can only do a subset of our tests in an isolated CI environment without the system present. + +**Issues:** Things we work on are managed via issues - which are bundled into milestones (each milestone represents a release). The issues should be closed once they are on the `main` branch via commit messages ("closes #87", "fixes #70", etc. see [this list of keywords](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)). Issues that have been finished but are not on the `main` branch yet, can be labeled using the white label "implemented". This way, we can oversee incompleted issues, but don't forget to merge them. From ed547244132489703c3d4ba7b7b12e34fc407e0c Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 12:25:23 +0200 Subject: [PATCH 004/132] #92 (1) - set up UploadThread class --- packages/core/main.py | 54 ++++++++++++++----- packages/core/modules/__init__.py | 1 + packages/core/modules/helios_thread.py | 4 +- packages/core/modules/upload_thread.py | 73 ++++++++++++++++++++++++++ 4 files changed, 117 insertions(+), 15 deletions(-) create mode 100644 packages/core/modules/upload_thread.py diff --git a/packages/core/main.py b/packages/core/main.py index 311d6b98..22770366 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -11,6 +11,43 @@ logger = Logger(origin="main") +def toggle_thread_states( + config: dict, + helios_thread_instance: modules.helios_thread.HeliosThread, + upload_thread_instance: modules.upload_thread.UploadThread, +): + helios_should_be_running = all( + [ + not config["general"]["test_mode"], + config["helios"] is not None, + config["measurement_triggers"]["consider_helios"], + ] + ) + upload_should_be_running = all( + [ + not config["general"]["test_mode"], + config["upload"] is not None, + config["upload"]["is_active"], + ] + ) + + if config["general"]["test_mode"]: + logger.info("pyra-core in test mode") + logger.debug("Skipping HeliosThread and UploadThread in test mode") + + # Start/stop HeliosThread + if helios_should_be_running and not helios_thread_instance.is_running(): + helios_thread_instance.start() + if not helios_should_be_running and helios_thread_instance.is_running(): + helios_thread_instance.stop() + + # Start/stop UploadThread + if upload_should_be_running and not upload_thread_instance.is_running(): + upload_thread_instance.start() + if not upload_should_be_running and upload_thread_instance.is_running(): + upload_thread_instance.stop() + + def run(): StateInterface.initialize() logger.info(f"Starting mainloop inside process with PID {os.getpid()}") @@ -32,6 +69,7 @@ def run(): modules.system_checks.SystemChecks(_CONFIG), ] helios_thread_instance = modules.helios_thread.HeliosThread() + upload_thread_instance = modules.upload_thread.UploadThread() current_exceptions = StateInterface.read(persistent=True)["current_exceptions"] @@ -46,19 +84,11 @@ def run(): time.sleep(10) continue - if not _CONFIG["general"]["test_mode"]: - # Start or stop Helios in a thread - helios_should_be_running = ( - _CONFIG["helios"] is not None - and _CONFIG["measurement_triggers"]["consider_helios"] - ) - if helios_should_be_running and not helios_thread_instance.is_running(): - helios_thread_instance.start() - if not helios_should_be_running and helios_thread_instance.is_running(): - helios_thread_instance.stop() - else: + toggle_thread_states(_CONFIG, helios_thread_instance, upload_thread_instance) + + if _CONFIG["general"]["test_mode"]: logger.info("pyra-core in test mode") - logger.debug("Skipping HeliosThread in test mode") + logger.debug("Skipping HeliosThread and UploadThread in test mode") new_exception = None try: diff --git a/packages/core/modules/__init__.py b/packages/core/modules/__init__.py index 6cdae050..10b3cebf 100644 --- a/packages/core/modules/__init__.py +++ b/packages/core/modules/__init__.py @@ -1,6 +1,7 @@ from . import ( enclosure_control, helios_thread, + upload_thread, measurement_conditions, opus_measurement, sun_tracking, diff --git a/packages/core/modules/helios_thread.py b/packages/core/modules/helios_thread.py index d5a0cc79..552f0492 100644 --- a/packages/core/modules/helios_thread.py +++ b/packages/core/modules/helios_thread.py @@ -242,11 +242,9 @@ def stop(self): logger.info("Waiting for thread to terminate") self.__thread.join() - - logger.debug('Setting state to "null"') StateInterface.update({"helios_indicates_good_conditions": None}) - self.__thread = None + logger.info("Stopped the thread") @staticmethod def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = False): diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py new file mode 100644 index 00000000..9918b625 --- /dev/null +++ b/packages/core/modules/upload_thread.py @@ -0,0 +1,73 @@ +import os +import queue +import threading +import time +from packages.core.utils import ( + ConfigInterface, + StateInterface, + Logger, +) + +logger = Logger(origin="upload") + +dir = os.path.dirname +PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) + + +def get_directories_to_be_uploaded(ifg_src_dir): + pass + + +class UploadThread: + def __init__(self): + self.__thread = None + self.__shared_queue = queue.Queue() + + def start(self): + """ + Start the thread using the threading library + """ + logger.info("Starting thread") + self.__thread = threading.Thread(target=UploadThread.main, args=(self.__shared_queue,)) + self.__thread.start() + + def is_running(self): + return self.__thread is not None + + def stop(self): + """ + Send a stop-signal to the thread and wait for its termination + """ + + logger.info("Sending termination signal") + self.__shared_queue.put("stop") + + logger.info("Waiting for thread to terminate") + self.__thread.join() + self.__thread = None + + logger.info("Stopped the thread") + + @staticmethod + def main(shared_queue: queue.Queue): + while True: + # Check for termination + try: + if shared_queue.get(block=False) == "stop": + break + except queue.Empty: + pass + + start_time = time.time() + + # TODO: 1. add upload stuff to config + # TODO: 2. determine directories to be uploaded + # TODO: 3. loop over each directory and use the DirectoryUploadClient + # TODO: 4. make the client use threads -> still process one directory at a time but upload individual files in parallel + # TODO: 5. Figure out where ifgs lie on system + # TODO: 6. Implement datalogger upload + + elapsed_time = time.time() - start_time + time_to_wait = 5 - elapsed_time + if time_to_wait > 0: + time.sleep(time_to_wait) From 7fa13f74809a11678484d526703631458c9222a3 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 14:34:29 +0200 Subject: [PATCH 005/132] #92 (2) - add upload stuff to config --- config/config.default.json | 3 +- config/upload.config.default.json | 8 + .../utils/interfaces/config_validation.py | 255 +++++++----------- 3 files changed, 111 insertions(+), 155 deletions(-) create mode 100644 config/upload.config.default.json diff --git a/config/config.default.json b/config/config.default.json index 1a0c116e..526d34bd 100644 --- a/config/config.default.json +++ b/config/config.default.json @@ -48,5 +48,6 @@ "min_sun_elevation": 0 }, "tum_plc": null, - "helios": null + "helios": null, + "upload": null } diff --git a/config/upload.config.default.json b/config/upload.config.default.json new file mode 100644 index 00000000..5d6a0d85 --- /dev/null +++ b/config/upload.config.default.json @@ -0,0 +1,8 @@ +{ + "is_active": false, + "host": "1.2.3.4", + "user": "...", + "password": "...", + "src_directory": "...", + "dst_directory": "..." +} diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index b36d1699..2a9834ca 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -1,6 +1,7 @@ import json import os from typing import Tuple +from xmlrpc.client import boolean import cerberus from packages.core.utils import Logger @@ -9,6 +10,11 @@ CONFIG_FILE_PATH = os.path.join(PROJECT_DIR, "config", "config.json") +def _directory_path_exists(field, value, error): + if not os.path.isfile(value): + error(field, "Path has to be an existing file") + + def _file_path_exists(field, value, error): if not os.path.isfile(value): error(field, "Path has to be an existing file") @@ -27,162 +33,103 @@ def _is_valid_ip_adress(field, value, error): NULLABLE_DICT_SCHEMA = lambda s: {"type": "dict", "schema": s, "nullable": True} -class _Schemas: - time_dict = { - "type": "dict", - "schema": { - "hour": {"type": "integer", "min": 0, "max": 23}, - "minute": {"type": "integer", "min": 0, "max": 59}, - "second": {"type": "integer", "min": 0, "max": 59}, +def get_config_file_schema(strict: boolean): + filtered_spec = lambda spec: {k: v for k, v in spec.items() if (k == "type" or strict)} + specs = { + "ip": filtered_spec({"type": "string", "check_with": _is_valid_ip_adress}), + "file": filtered_spec({"type": "string", "check_with": _file_path_exists}), + "directory": filtered_spec({"type": "string", "check_with": _directory_path_exists}), + "time": { + "type": "dict", + "schema": { + "hour": filtered_spec({"type": "integer", "min": 0, "max": 23}), + "minute": filtered_spec({"type": "integer", "min": 0, "max": 59}), + "second": filtered_spec({"type": "integer", "min": 0, "max": 59}), + }, }, } - string = {"type": "string"} - boolean = {"type": "boolean"} - ip = {"type": "string", "check_with": _is_valid_ip_adress} - file = {"type": "string", "check_with": _file_path_exists} - - -CONFIG_FILE_STRUCTURE_SCHEMA = { - "general": DICT_SCHEMA( - { - "seconds_per_core_interval": {"type": "number"}, - "test_mode": {"type": "boolean"}, - "station_id": {"type": "string"}, - "min_sun_elevation": {"type": "number"}, - } - ), - "opus": DICT_SCHEMA( - { - "em27_ip": {"type": "string"}, - "executable_path": {"type": "string"}, - "experiment_path": {"type": "string"}, - "macro_path": {"type": "string"}, - "username": {"type": "string"}, - "password": {"type": "string"}, - } - ), - "camtracker": DICT_SCHEMA( - { - "config_path": {"type": "string"}, - "executable_path": {"type": "string"}, - "learn_az_elev_path": {"type": "string"}, - "sun_intensity_path": {"type": "string"}, - "motor_offset_threshold": {"type": "number"}, - } - ), - "error_email": DICT_SCHEMA( - { - "sender_address": {"type": "string"}, - "sender_password": {"type": "string"}, - "notify_recipients": {"type": "boolean"}, - "recipients": {"type": "string"}, - } - ), - "measurement_decision": DICT_SCHEMA( - { - "mode": {"type": "string"}, - "manual_decision_result": {"type": "boolean"}, - "cli_decision_result": {"type": "boolean"}, - } - ), - "measurement_triggers": DICT_SCHEMA( - { - "consider_time": {"type": "boolean"}, - "consider_sun_elevation": {"type": "boolean"}, - "consider_helios": {"type": "boolean"}, - "start_time": _Schemas.time_dict, - "stop_time": _Schemas.time_dict, - "min_sun_elevation": {"type": "number"}, - } - ), - "tum_plc": NULLABLE_DICT_SCHEMA( - { - "ip": {"type": "string"}, - "version": {"type": "integer"}, - "controlled_by_user": {"type": "boolean"}, - } - ), - "helios": NULLABLE_DICT_SCHEMA( - { - "camera_id": {"type": "integer"}, - "evaluation_size": {"type": "integer"}, - "seconds_per_interval": {"type": "number"}, - "measurement_threshold": {"type": "number"}, - "save_images": {"type": "boolean"}, - } - ), -} - -CONFIG_FILE_SCHEMA = { - "general": DICT_SCHEMA( - { - "seconds_per_core_interval": {"type": "number", "min": 5, "max": 600}, - "test_mode": _Schemas.boolean, - "station_id": {"type": "string"}, - "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, - } - ), - "opus": DICT_SCHEMA( - { - "em27_ip": _Schemas.ip, - "executable_path": _Schemas.file, - "experiment_path": _Schemas.file, - "macro_path": _Schemas.file, - "username": {"type": "string"}, - "password": {"type": "string"}, - } - ), - "camtracker": DICT_SCHEMA( - { - "config_path": _Schemas.file, - "executable_path": _Schemas.file, - "learn_az_elev_path": _Schemas.file, - "sun_intensity_path": _Schemas.file, - "motor_offset_threshold": {"type": "number", "min": -360, "max": 360}, - } - ), - "error_email": DICT_SCHEMA( - { - "sender_address": _Schemas.string, - "sender_password": _Schemas.string, - "notify_recipients": _Schemas.boolean, - "recipients": _Schemas.string, - } - ), - "measurement_decision": DICT_SCHEMA( - { - "mode": {"type": "string", "allowed": ["automatic", "manual", "cli"]}, - "manual_decision_result": _Schemas.boolean, - "cli_decision_result": _Schemas.boolean, - } - ), - "measurement_triggers": DICT_SCHEMA( - { - "consider_time": _Schemas.boolean, - "consider_sun_elevation": _Schemas.boolean, - "consider_helios": _Schemas.boolean, - "start_time": _Schemas.time_dict, - "stop_time": _Schemas.time_dict, - "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, - } - ), - "tum_plc": NULLABLE_DICT_SCHEMA( - { - "ip": _Schemas.ip, - "version": {"type": "integer", "allowed": [1, 2]}, - "controlled_by_user": {"type": "boolean"}, - } - ), - "helios": NULLABLE_DICT_SCHEMA( - { - "camera_id": {"type": "integer", "min": 0, "max": 999999}, - "evaluation_size": {"type": "integer", "min": 1, "max": 100}, - "seconds_per_interval": {"type": "number", "min": 5, "max": 600}, - "measurement_threshold": {"type": "number", "min": 0.1, "max": 1}, - "save_images": _Schemas.boolean, - } - ), -} + return { + "general": DICT_SCHEMA( + { + "seconds_per_core_interval": filtered_spec( + {"type": "number", "min": 5, "max": 600} + ), + "test_mode": {"type": "boolean"}, + "station_id": {"type": "string"}, + "min_sun_elevation": filtered_spec({"type": "number", "min": 0, "max": 90}), + } + ), + "opus": DICT_SCHEMA( + { + "em27_ip": specs["ip"], + "executable_path": specs["file"], + "experiment_path": specs["file"], + "macro_path": specs["file"], + "username": {"type": "string"}, + "password": {"type": "string"}, + } + ), + "camtracker": DICT_SCHEMA( + { + "config_path": specs["file"], + "executable_path": specs["file"], + "learn_az_elev_path": specs["file"], + "sun_intensity_path": specs["file"], + "motor_offset_threshold": {"type": "number", "min": -360, "max": 360}, + } + ), + "error_email": DICT_SCHEMA( + { + "sender_address": {"type": "string"}, + "sender_password": {"type": "string"}, + "notify_recipients": {"type": "boolean"}, + "recipients": {"type": "string"}, + } + ), + "measurement_decision": DICT_SCHEMA( + { + "mode": {"type": "string", "allowed": ["automatic", "manual", "cli"]}, + "manual_decision_result": {"type": "boolean"}, + "cli_decision_result": {"type": "boolean"}, + } + ), + "measurement_triggers": DICT_SCHEMA( + { + "consider_time": {"type": "boolean"}, + "consider_sun_elevation": {"type": "boolean"}, + "consider_helios": {"type": "boolean"}, + "start_time": specs["time"], + "stop_time": specs["time"], + "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, + } + ), + "tum_plc": NULLABLE_DICT_SCHEMA( + { + "ip": specs["ip"], + "version": {"type": "integer", "allowed": [1, 2]}, + "controlled_by_user": {"type": "boolean"}, + } + ), + "helios": NULLABLE_DICT_SCHEMA( + { + "camera_id": {"type": "integer", "min": 0, "max": 999999}, + "evaluation_size": {"type": "integer", "min": 1, "max": 100}, + "seconds_per_interval": {"type": "number", "min": 5, "max": 600}, + "measurement_threshold": {"type": "number", "min": 0.1, "max": 1}, + "save_images": {"type": "boolean"}, + } + ), + "upload": NULLABLE_DICT_SCHEMA( + { + "is_active": {"type": "boolean"}, + "host": specs["ip"], + "user": {"type": "string"}, + "password": {"type": "string"}, + "src_directory": specs["file"], + "dst_directory": {"type": "string"}, + } + ), + } class CerberusException(Exception): From 2c015a91c977d4c137eff266c6512eb7e627d40b Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 14:45:18 +0200 Subject: [PATCH 006/132] #92 (3) - refine config validation code --- .../utils/interfaces/config_validation.py | 37 ++++++++++++------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 2a9834ca..63d61398 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -1,6 +1,6 @@ import json import os -from typing import Tuple +from typing import Any, Tuple from xmlrpc.client import boolean import cerberus from packages.core.utils import Logger @@ -34,29 +34,38 @@ def _is_valid_ip_adress(field, value, error): def get_config_file_schema(strict: boolean): - filtered_spec = lambda spec: {k: v for k, v in spec.items() if (k == "type" or strict)} + """ + Returns a cerberus schema for the config. With strict=false, + the checks whether file paths or directories exist will be + skipped. Strict-mode is used by the core, Loose-mode is used + by the CLI (which has to work even with invalid paths). + """ + specs = { - "ip": filtered_spec({"type": "string", "check_with": _is_valid_ip_adress}), - "file": filtered_spec({"type": "string", "check_with": _file_path_exists}), - "directory": filtered_spec({"type": "string", "check_with": _directory_path_exists}), + "ip": {"type": "string", "check_with": _is_valid_ip_adress}, + "file": {"type": "string"}, + "directory": {"type": "string"}, "time": { "type": "dict", "schema": { - "hour": filtered_spec({"type": "integer", "min": 0, "max": 23}), - "minute": filtered_spec({"type": "integer", "min": 0, "max": 59}), - "second": filtered_spec({"type": "integer", "min": 0, "max": 59}), + "hour": {"type": "integer", "min": 0, "max": 23}, + "minute": {"type": "integer", "min": 0, "max": 59}, + "second": {"type": "integer", "min": 0, "max": 59}, }, }, } + + if strict: + specs["file"]["check_with"] = _file_path_exists + specs["directory"]["check_with"] = _directory_path_exists + return { "general": DICT_SCHEMA( { - "seconds_per_core_interval": filtered_spec( - {"type": "number", "min": 5, "max": 600} - ), + "seconds_per_core_interval": {"type": "number", "min": 5, "max": 600}, "test_mode": {"type": "boolean"}, "station_id": {"type": "string"}, - "min_sun_elevation": filtered_spec({"type": "number", "min": 0, "max": 90}), + "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, } ), "opus": DICT_SCHEMA( @@ -148,7 +157,7 @@ def check( partial_validation: bool = False, ): validator = cerberus.Validator( - CONFIG_FILE_SCHEMA, require_all=(not partial_validation) + get_config_file_schema(strict=True), require_all=(not partial_validation) ) assert validator.validate(content_object), validator.errors # Add assertions that cannot be done with cerberus here @@ -160,7 +169,7 @@ def check_structure(content_object: dict): keys and the correct value-datatypes. Not validations like "file exists", etc. """ - validator = cerberus.Validator(CONFIG_FILE_STRUCTURE_SCHEMA, require_all=True) + validator = cerberus.Validator(get_config_file_schema(strict=False), require_all=True) assert validator.validate(content_object), validator.errors @staticmethod From 0ae11ab3da9e3b6a4563a3d15f76bd4f9fecf59f Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 15:02:28 +0200 Subject: [PATCH 007/132] #92 (4) - add upload stuff to UI --- .../ui/src/components/configuration/index.ts | 4 +- .../sections/config-section-upload.tsx | 109 ++++++++++++++++++ packages/ui/src/custom-types.ts | 16 +++ .../functional-utils/parse-number-types.ts | 1 + 4 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 packages/ui/src/components/configuration/sections/config-section-upload.tsx diff --git a/packages/ui/src/components/configuration/index.ts b/packages/ui/src/components/configuration/index.ts index 8ba00f2c..6c133346 100644 --- a/packages/ui/src/components/configuration/index.ts +++ b/packages/ui/src/components/configuration/index.ts @@ -8,8 +8,9 @@ import ConfigSectionErrorEmail from './sections/config-section-error-email'; import ConfigSectionCamtracker from './sections/config-section-camtracker'; import ConfigSectionMeasurementTriggers from './sections/config-section-measurement-triggers'; import ConfigSectionOpus from './sections/config-section-opus'; -import ConfigSectionHelios from './sections/config-section-helios'; import ConfigSectionTumPlc from './sections/config-section-tum-plc'; +import ConfigSectionHelios from './sections/config-section-helios'; +import ConfigSectionUpload from './sections/config-section-upload'; export default { LabeledRow, @@ -24,4 +25,5 @@ export default { ConfigSectionOpus, ConfigSectionTumPlc, ConfigSectionHelios, + ConfigSectionUpload, }; diff --git a/packages/ui/src/components/configuration/sections/config-section-upload.tsx b/packages/ui/src/components/configuration/sections/config-section-upload.tsx new file mode 100644 index 00000000..bd623ea1 --- /dev/null +++ b/packages/ui/src/components/configuration/sections/config-section-upload.tsx @@ -0,0 +1,109 @@ +import { customTypes } from '../../../custom-types'; +import { configurationComponents, essentialComponents } from '../..'; +import { reduxUtils } from '../../../utils'; + +export default function ConfigSectionUpload() { + const centralSectionConfig = reduxUtils.useTypedSelector((s) => s.config.central?.upload); + const localSectionConfig = reduxUtils.useTypedSelector((s) => s.config.local?.upload); + const dispatch = reduxUtils.useTypedDispatch(); + + const update = (c: customTypes.partialConfig) => + dispatch(reduxUtils.configActions.setLocalPartial(c)); + + function addDefault() { + update({ + upload: { + is_active: false, + host: '1.2.3.4', + user: '...', + password: '...', + src_directory: '...', + dst_directory: '...', + }, + }); + } + + function setNull() { + update({ + upload: null, + }); + } + + if (localSectionConfig === undefined || centralSectionConfig === undefined) { + return <>; + } + + if (localSectionConfig === null) { + return ( +
+
+ Not configured yet + + set up now + + {centralSectionConfig !== null && ( +
+ )} +
+ +
+ ); + } + + return ( + <> + + remove configuration + +
+ update({ upload: { is_active: v } })} + oldValue={centralSectionConfig?.is_active === true} + /> + update({ upload: { host: v } })} + oldValue={centralSectionConfig !== null ? centralSectionConfig.host : 'null'} + /> + update({ upload: { user: v } })} + oldValue={centralSectionConfig !== null ? centralSectionConfig.user : 'null'} + /> + update({ upload: { password: v } })} + oldValue={centralSectionConfig !== null ? centralSectionConfig.password : 'null'} + /> + update({ upload: { src_directory: v } })} + oldValue={ + centralSectionConfig !== null ? centralSectionConfig.src_directory : 'null' + } + /> + update({ upload: { dst_directory: v } })} + oldValue={ + centralSectionConfig !== null ? centralSectionConfig.dst_directory : 'null' + } + /> + + ); +} diff --git a/packages/ui/src/custom-types.ts b/packages/ui/src/custom-types.ts index 6e400425..d652dca2 100644 --- a/packages/ui/src/custom-types.ts +++ b/packages/ui/src/custom-types.ts @@ -89,6 +89,14 @@ export namespace customTypes { measurement_threshold: number; save_images: boolean; }; + upload: null | { + is_active: boolean; + host: string; + user: string; + password: string; + src_directory: string; + dst_directory: string; + }; }; // I have not found a more elegant way yet to generate a partialConfig type @@ -145,6 +153,14 @@ export namespace customTypes { measurement_threshold?: number; save_images?: boolean; }; + upload?: null | { + is_active?: boolean; + host?: string; + user?: string; + password?: string; + src_directory?: string; + dst_directory?: string; + }; }; export type enclosurePlcReadings = { diff --git a/packages/ui/src/utils/functional-utils/parse-number-types.ts b/packages/ui/src/utils/functional-utils/parse-number-types.ts index 5005ad4a..5e8b9002 100644 --- a/packages/ui/src/utils/functional-utils/parse-number-types.ts +++ b/packages/ui/src/utils/functional-utils/parse-number-types.ts @@ -53,5 +53,6 @@ export default function parseNumberTypes(newConfig: customTypes.config): customT ), save_images: newConfig.helios.save_images, }, + upload: newConfig.upload, }; } From b4f725f45736aa18b82f113545fae8146646ecb0 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 15:18:49 +0200 Subject: [PATCH 008/132] #92 (5) - determine directories to be uploaded --- packages/core/modules/upload_thread.py | 32 +++++++++++++++++++++----- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index 9918b625..619f6c57 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -1,3 +1,4 @@ +from datetime import datetime import os import queue import threading @@ -14,8 +15,26 @@ PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) -def get_directories_to_be_uploaded(ifg_src_dir): - pass +def is_valid_date(date_string: str): + try: + day_ending = datetime.strptime(f"{date_string} 23:59:59", "%Y%m%d %H:%M:%S") + seconds_since_day_ending = (datetime.now() - day_ending).total_seconds() + assert seconds_since_day_ending >= 3600 + return True + except (ValueError, AssertionError): + return False + + +def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: + if not os.path.isdir(ifg_src_path): + return [] + + return list( + filter( + lambda f: os.path.isdir(os.path.join(ifg_src_path, f)) and is_valid_date(f), + os.listdir(ifg_src_path), + ) + ) class UploadThread: @@ -60,10 +79,11 @@ def main(shared_queue: queue.Queue): start_time = time.time() - # TODO: 1. add upload stuff to config - # TODO: 2. determine directories to be uploaded - # TODO: 3. loop over each directory and use the DirectoryUploadClient - # TODO: 4. make the client use threads -> still process one directory at a time but upload individual files in parallel + for d in get_directories_to_be_uploaded("fghj"): + pass + + # TODO: 3. load config in every loop + # TODO: 4. loop over each directory and use the DirectoryUploadClient # TODO: 5. Figure out where ifgs lie on system # TODO: 6. Implement datalogger upload From 6a031408147646cb807349ac68d3147917e927db Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 15:26:25 +0200 Subject: [PATCH 009/132] #92 (6) - load config in every loop --- packages/core/modules/upload_thread.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index 619f6c57..bce28b3a 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -40,13 +40,14 @@ def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: class UploadThread: def __init__(self): self.__thread = None - self.__shared_queue = queue.Queue() + self.__shared_queue = None def start(self): """ Start the thread using the threading library """ logger.info("Starting thread") + self.__shared_queue = queue.Queue() self.__thread = threading.Thread(target=UploadThread.main, args=(self.__shared_queue,)) self.__thread.start() @@ -58,31 +59,39 @@ def stop(self): Send a stop-signal to the thread and wait for its termination """ + assert self.__shared_queue is not None + logger.info("Sending termination signal") self.__shared_queue.put("stop") logger.info("Waiting for thread to terminate") self.__thread.join() self.__thread = None + self.__shared_queue = None logger.info("Stopped the thread") @staticmethod def main(shared_queue: queue.Queue): while True: + config = ConfigInterface.read() + # Check for termination try: - if shared_queue.get(block=False) == "stop": + if ( + (config["upload"] is None) + or (not config["upload"]["is_active"]) + or (shared_queue.get(block=False) == "stop") + ): break except queue.Empty: pass start_time = time.time() - for d in get_directories_to_be_uploaded("fghj"): + for d in get_directories_to_be_uploaded(config["upload"]["src_directory"]): pass - # TODO: 3. load config in every loop # TODO: 4. loop over each directory and use the DirectoryUploadClient # TODO: 5. Figure out where ifgs lie on system # TODO: 6. Implement datalogger upload From 7710e49e394eea0b7f067dfe5c66e25c3a2e4f16 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 15:35:42 +0200 Subject: [PATCH 010/132] #92 (7) - implement upload logic --- packages/core/modules/upload_thread.py | 154 +++++++++++++++++- poetry.lock | 215 ++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 362 insertions(+), 8 deletions(-) diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index bce28b3a..2ff93a14 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -1,11 +1,14 @@ from datetime import datetime +import json import os import queue +import shutil +import paramiko import threading import time +import fabric from packages.core.utils import ( ConfigInterface, - StateInterface, Logger, ) @@ -15,6 +18,134 @@ PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) +class InvalidUploadState(Exception): + pass + + +class DirectoryUploadClient: + def __init__(self, dirname: str, config: dict): + self.connection = fabric.connection.Connection( + f"{config['upload']['user']}@{config['upload']['host']}", + connect_kwargs={"password": config["upload"]["password"]}, + connect_timeout=5, + ) + self.transfer_process = fabric.transfer.Transfer(self.connection) + + self.src_dir_path = os.path.join(config["upload"]["src_directory"], dirname) + self.src_meta_path = os.path.join(self.src_dir_path, "upload-meta.json") + assert os.path.isdir(self.src_dir_path), f"{self.src_dir_path} is not a directory" + + self.dst_dir_path = f"{config['upload']['dst_directory']}/{dirname}" + self.dst_meta_path = f"{self.dst_dir_path}/upload-meta.json" + assert self.transfer_process.is_remote_dir( + config["upload"]["dst_directory"] + ), f"remote {config['upload']['dst_directory']} is not a directory" + + self.meta_content: dict | None = None + + def create_remote_dir(self): + self.connection.run(f"mkdir {self.dst_dir_path}") + with open(self.src_meta_path, "w") as f: + json.dump( + { + "complete": False, + "fileList": [], + "createdTime": round(time.time(), 3), + "lastModifiedTime": round(time.time(), 3), + }, + f, + indent=4, + ) + self.transfer_process.put(self.src_meta_path, self.dst_meta_path) + + def fetch_meta(self): + if os.path.isfile(self.src_meta_path): + os.remove(self.src_meta_path) + self.transfer_process.get(self.dst_meta_path, self.src_meta_path) + try: + assert os.path.isfile(self.src_meta_path) + with open(self.src_meta_path, "r") as f: + self.meta_content = json.load(f) + except (AssertionError, json.JSONDecodeError) as e: + # TODO: log/report this exception and continue with other directories + raise InvalidUploadState(str(e)) + + def update_meta(self, new_meta_content: dict): + new_meta_content = { + **new_meta_content, + "lastModifiedTime": round(time.time(), 3), + } + with open(self.src_meta_path, "w") as f: + json.dump(new_meta_content, f, indent=4) + self.transfer_process.put(self.src_meta_path, self.dst_meta_path) + self.meta_content = new_meta_content + + def run(self): + # possibly initialize remote dir, fetch remote meta + if not self.transfer_process.is_remote_dir(self.dst_dir_path): + self.create_remote_dir() + self.fetch_meta() + assert self.meta_content is not None + + # determine files missing in dst + src_file_set = set(os.listdir(self.src_dir_path)) + src_file_set.remove("upload-meta.json") + dst_file_set = set(self.meta_content["fileList"]) + files_missing_in_dst = src_file_set.difference(dst_file_set) + + # if there are files that have not been uploaded, + # assert that the remote meta also indicates an + # incomplete upload state + if len(files_missing_in_dst) != 0: + if self.meta_content["complete"]: + raise InvalidUploadState( + "there are missing files but remote " + "meta contains complete=True" + ) + + # upload every file that is missing in the remote + # meta but present in the local directory. Every 25 + # files, upload the remote meta file on which files + # have been uploaded + upload_count = 0 + uploaded_files: list[str] = [] + while True: + try: + f = files_missing_in_dst.pop() + except KeyError: + break + self.transfer_process.put( + os.path.join(self.src_dir_path, f), f"{self.dst_dir_path}/{f}" + ) + uploaded_files.append(f) + upload_count += 1 + if upload_count % 25 == 0: + self.update_meta( + { + **self.meta_content, + "fileList": [*(self.meta_content["fileList"]), *uploaded_files], + } + ) + uploaded_files = [] + + # update remote meta with the final files and set + # "complete" to True. This indicates that + self.update_meta( + { + **self.meta_content, + "complete": True, + "fileList": [*(self.meta_content["fileList"]), *uploaded_files], + } + ) + + # TODO: make sure all copying was successful - maybe + # use a checksum over a temporary tarball + # TODO: make the deletion of src optional (boolean in config) + # shutil.rmtree(self.src_dir_path) + + # close ssh and scp connection + self.connection.close() + + def is_valid_date(date_string: str): try: day_ending = datetime.strptime(f"{date_string} 23:59:59", "%Y%m%d %H:%M:%S") @@ -89,12 +220,21 @@ def main(shared_queue: queue.Queue): start_time = time.time() - for d in get_directories_to_be_uploaded(config["upload"]["src_directory"]): - pass - - # TODO: 4. loop over each directory and use the DirectoryUploadClient - # TODO: 5. Figure out where ifgs lie on system - # TODO: 6. Implement datalogger upload + for src_date_string in get_directories_to_be_uploaded( + config["upload"]["src_directory"] + ): + try: + DirectoryUploadClient(src_date_string).run() + logger.info(f"successfully uploaded data from {src_date_string}") + except TimeoutError as e: + logger.error(f"could not reach host (uploading {src_date_string}): {e}") + except paramiko.ssh_exception.AuthenticationException as e: + logger.error(f"failed to authenticate (uploading {src_date_string}): {e}") + except InvalidUploadState as e: + logger.error(f"stuck in invalid state (uploading {src_date_string}): {e}") + + # TODO: 6. Figure out where ifgs lie on systems + # TODO: 7. Implement datalogger upload elapsed_time = time.time() - start_time time_to_wait = 5 - elapsed_time diff --git a/poetry.lock b/poetry.lock index 3bab5c96..3d616389 100644 --- a/poetry.lock +++ b/poetry.lock @@ -41,6 +41,21 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "bcrypt" +version = "3.2.2" +description = "Modern password hashing for your software and your servers" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.1" + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + [[package]] name = "black" version = "22.3.0" @@ -70,6 +85,17 @@ category = "main" optional = false python-versions = ">=2.7" +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "click" version = "8.1.3" @@ -89,6 +115,25 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "cryptography" +version = "37.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools_rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + [[package]] name = "deepdiff" version = "5.8.1" @@ -103,6 +148,23 @@ ordered-set = ">=4.1.0,<4.2.0" [package.extras] cli = ["click (==8.0.3)", "pyyaml (==5.4.1)", "toml (==0.10.2)", "clevercsv (==0.7.1)"] +[[package]] +name = "fabric" +version = "2.7.1" +description = "High level SSH command execution" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +invoke = ">=1.3,<2.0" +paramiko = ">=2.4" +pathlib2 = "*" + +[package.extras] +testing = ["mock (>=2.0.0,<3.0)"] +pytest = ["pytest (>=3.2.5,<4.0)", "mock (>=2.0.0,<3.0)"] + [[package]] name = "filelock" version = "3.6.0" @@ -123,6 +185,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "invoke" +version = "1.7.1" +description = "Pythonic task execution" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "jdcal" version = "1.4.1" @@ -185,6 +255,37 @@ python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +[[package]] +name = "paramiko" +version = "2.11.0" +description = "SSH2 protocol library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +bcrypt = ">=3.1.3" +cryptography = ">=2.5" +pynacl = ">=1.0.1" +six = "*" + +[package.extras] +all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +invoke = ["invoke (>=1.3)"] + +[[package]] +name = "pathlib2" +version = "2.3.7.post1" +description = "Object-oriented filesystem paths" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + [[package]] name = "pathspec" version = "0.9.0" @@ -236,6 +337,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pyerfa" version = "2.0.0.1" @@ -251,6 +360,21 @@ numpy = ">=1.17" docs = ["sphinx-astropy (>=1.3)"] test = ["pytest", "pytest-doctestplus (>=0.7)"] +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] + [[package]] name = "pyparsing" version = "3.0.9" @@ -311,6 +435,14 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "tomli" version = "2.0.1" @@ -322,7 +454,7 @@ python-versions = ">=3.7" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "dbc1c40abfb604bf62e6394e8dba341ce8b59caf8fbf9cdf8503dad036fe80e9" +content-hash = "abeac95184e7c7f394e0a51f1820148898111773675c16d809f2e1a7f5064db4" [metadata.files] astropy = [ @@ -353,6 +485,7 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +bcrypt = [] black = [ {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, @@ -381,6 +514,72 @@ black = [ cerberus = [ {file = "Cerberus-1.3.4.tar.gz", hash = "sha256:d1b21b3954b2498d9a79edf16b3170a3ac1021df88d197dc2ce5928ba519237c"}, ] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, @@ -389,10 +588,12 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +cryptography = [] deepdiff = [ {file = "deepdiff-5.8.1-py3-none-any.whl", hash = "sha256:e9aea49733f34fab9a0897038d8f26f9d94a97db1790f1b814cced89e9e0d2b7"}, {file = "deepdiff-5.8.1.tar.gz", hash = "sha256:8d4eb2c4e6cbc80b811266419cb71dd95a157094a3947ccf937a94d44943c7b8"}, ] +fabric = [] filelock = [ {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"}, {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"}, @@ -401,6 +602,7 @@ iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] +invoke = [] jdcal = [ {file = "jdcal-1.4.1-py2.py3-none-any.whl", hash = "sha256:1abf1305fce18b4e8aa248cf8fe0c56ce2032392bc64bbd61b5dff2a19ec8bba"}, {file = "jdcal-1.4.1.tar.gz", hash = "sha256:472872e096eb8df219c23f2689fc336668bdb43d194094b5cc1707e1640acfc8"}, @@ -448,6 +650,8 @@ packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] +paramiko = [] +pathlib2 = [] pathspec = [ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, @@ -498,6 +702,10 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] pyerfa = [ {file = "pyerfa-2.0.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:278832de7803f2fb0ef4b14263200f98dfdb3eaa78dc63835d93796fd8fc42c6"}, {file = "pyerfa-2.0.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:629248cebc8626a52e80f69d4e2f30cc6e751f57803f5ba7ec99edd09785d181"}, @@ -529,6 +737,7 @@ pyerfa = [ {file = "pyerfa-2.0.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:63a83c35cea8c5d50d53c18089f1e625c0ffc59a7a5b8d44e0f1b3ec5288f183"}, {file = "pyerfa-2.0.0.1.tar.gz", hash = "sha256:2fd4637ffe2c1e6ede7482c13f583ba7c73119d78bef90175448ce506a0ede30"}, ] +pynacl = [] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, @@ -593,6 +802,10 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, diff --git a/pyproject.toml b/pyproject.toml index 9c776bd0..0baf5802 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ opencv-python = "4.5.5.64" astropy = "5.0.4" jdcal = "1.4.1" psutil = "5.9.1" +fabric = "^2.7.1" [tool.poetry.dev-dependencies] pytest = "7.1.2" From a71dc46602d5076e5193bac602e7ded715edb81a Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 17:12:31 +0200 Subject: [PATCH 011/132] #92 (8) - make removal of source optional --- config/upload.config.default.json | 3 +- packages/core/modules/upload_thread.py | 38 ++++++++++++------- .../utils/interfaces/config_validation.py | 1 + .../sections/config-section-upload.tsx | 7 ++++ packages/ui/src/custom-types.ts | 5 ++- packages/ui/src/tabs/configuration-tab.tsx | 2 + 6 files changed, 40 insertions(+), 16 deletions(-) diff --git a/config/upload.config.default.json b/config/upload.config.default.json index 5d6a0d85..1f9e4cc6 100644 --- a/config/upload.config.default.json +++ b/config/upload.config.default.json @@ -4,5 +4,6 @@ "user": "...", "password": "...", "src_directory": "...", - "dst_directory": "..." + "dst_directory": "...", + "remove_src_after_upload": false } diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index 2ff93a14..e62e1e31 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -42,6 +42,7 @@ def __init__(self, dirname: str, config: dict): ), f"remote {config['upload']['dst_directory']} is not a directory" self.meta_content: dict | None = None + self.remove_src_after_upload: bool = config["upload"]["remove_src_after_upload"] def create_remote_dir(self): self.connection.run(f"mkdir {self.dst_dir_path}") @@ -70,9 +71,10 @@ def fetch_meta(self): # TODO: log/report this exception and continue with other directories raise InvalidUploadState(str(e)) - def update_meta(self, new_meta_content: dict): + def update_meta(self, new_meta_content_partial: dict): new_meta_content = { - **new_meta_content, + **self.meta_content, + **new_meta_content_partial, "lastModifiedTime": round(time.time(), 3), } with open(self.src_meta_path, "w") as f: @@ -121,7 +123,6 @@ def run(self): if upload_count % 25 == 0: self.update_meta( { - **self.meta_content, "fileList": [*(self.meta_content["fileList"]), *uploaded_files], } ) @@ -131,18 +132,29 @@ def run(self): # "complete" to True. This indicates that self.update_meta( { - **self.meta_content, - "complete": True, "fileList": [*(self.meta_content["fileList"]), *uploaded_files], } ) - # TODO: make sure all copying was successful - maybe - # use a checksum over a temporary tarball - # TODO: make the deletion of src optional (boolean in config) - # shutil.rmtree(self.src_dir_path) + if self.remove_src_after_upload: + # TODO: make sure all copying was successful - calculate + # a checksum over all files in the fileList + local_checksum = "..." + remote_checksum = "......" + if local_checksum == remote_checksum: + self.update_meta({"complete": True}) + shutil.rmtree(self.src_dir_path) + logger.debug("successfully removed source") + else: + raise InvalidUploadState( + f"checksums do not match, local={local_checksum} " + + f"remote={remote_checksum}" + ) + else: + logger.debug("skipping removal of source") - # close ssh and scp connection + def teardown(self): + """close ssh and scp connection""" self.connection.close() @@ -224,7 +236,7 @@ def main(shared_queue: queue.Queue): config["upload"]["src_directory"] ): try: - DirectoryUploadClient(src_date_string).run() + client = DirectoryUploadClient(src_date_string).run() logger.info(f"successfully uploaded data from {src_date_string}") except TimeoutError as e: logger.error(f"could not reach host (uploading {src_date_string}): {e}") @@ -232,9 +244,7 @@ def main(shared_queue: queue.Queue): logger.error(f"failed to authenticate (uploading {src_date_string}): {e}") except InvalidUploadState as e: logger.error(f"stuck in invalid state (uploading {src_date_string}): {e}") - - # TODO: 6. Figure out where ifgs lie on systems - # TODO: 7. Implement datalogger upload + client.teardown() elapsed_time = time.time() - start_time time_to_wait = 5 - elapsed_time diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 63d61398..6d23de90 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -136,6 +136,7 @@ def get_config_file_schema(strict: boolean): "password": {"type": "string"}, "src_directory": specs["file"], "dst_directory": {"type": "string"}, + "remove_src_after_upload": {"type": "boolean"}, } ), } diff --git a/packages/ui/src/components/configuration/sections/config-section-upload.tsx b/packages/ui/src/components/configuration/sections/config-section-upload.tsx index bd623ea1..a60af770 100644 --- a/packages/ui/src/components/configuration/sections/config-section-upload.tsx +++ b/packages/ui/src/components/configuration/sections/config-section-upload.tsx @@ -19,6 +19,7 @@ export default function ConfigSectionUpload() { password: '...', src_directory: '...', dst_directory: '...', + remove_src_after_upload: false, }, }); } @@ -104,6 +105,12 @@ export default function ConfigSectionUpload() { centralSectionConfig !== null ? centralSectionConfig.dst_directory : 'null' } /> + update({ upload: { remove_src_after_upload: v } })} + oldValue={centralSectionConfig?.remove_src_after_upload === true} + /> ); } diff --git a/packages/ui/src/custom-types.ts b/packages/ui/src/custom-types.ts index d652dca2..4dab2dbf 100644 --- a/packages/ui/src/custom-types.ts +++ b/packages/ui/src/custom-types.ts @@ -35,7 +35,8 @@ export namespace customTypes { | 'error_email' | 'measurement_triggers' | 'tum_plc' - | 'helios'; + | 'helios' + | 'upload'; export type config = { general: { seconds_per_core_interval: number; @@ -96,6 +97,7 @@ export namespace customTypes { password: string; src_directory: string; dst_directory: string; + remove_src_after_upload: boolean; }; }; @@ -160,6 +162,7 @@ export namespace customTypes { password?: string; src_directory?: string; dst_directory?: string; + remove_src_after_upload?: boolean; }; }; diff --git a/packages/ui/src/tabs/configuration-tab.tsx b/packages/ui/src/tabs/configuration-tab.tsx index 20b00531..30e3a358 100644 --- a/packages/ui/src/tabs/configuration-tab.tsx +++ b/packages/ui/src/tabs/configuration-tab.tsx @@ -14,6 +14,7 @@ const sections: { key: customTypes.configSectionKey; label: string }[] = [ { key: 'measurement_triggers', label: 'Triggers' }, { key: 'tum_plc', label: 'TUM PLC' }, { key: 'helios', label: 'Helios' }, + { key: 'upload', label: 'Upload' }, ]; export default function ConfigurationTab() { const centralConfig = reduxUtils.useTypedSelector((s) => s.config.central); @@ -110,6 +111,7 @@ export default function ConfigurationTab() { )} {activeKey === 'tum_plc' && } {activeKey === 'helios' && } + {activeKey === 'upload' && } {configIsDiffering && ( Date: Sat, 13 Aug 2022 17:13:27 +0200 Subject: [PATCH 012/132] #92 (9) - Add script to calculate an upload-checksum --- scripts/get_upload_dir_checksum.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 scripts/get_upload_dir_checksum.py diff --git a/scripts/get_upload_dir_checksum.py b/scripts/get_upload_dir_checksum.py new file mode 100644 index 00000000..52411f39 --- /dev/null +++ b/scripts/get_upload_dir_checksum.py @@ -0,0 +1,30 @@ +import sys +import os +import json +import hashlib + +assert len(sys.argv) == 2, 'call this script with "python "' +assert sys.version.startswith("3.10"), "script requires Python 3.10" + +# check whether upload directory and meta file exist +upload_directory = sys.argv[1] +upload_meta_path = os.path.join(upload_directory, "upload-meta.json") +assert os.path.isdir(upload_directory), f'"{upload_directory}" is not a directory' +assert os.path.isfile(upload_meta_path), f'"{upload_meta_path}" is not a file' + +# get and validate fileList +with open(upload_meta_path) as f: + upload_meta = json.load(f) +file_list = upload_meta["fileList"] +assert isinstance(file_list, list), f"upload_meta.fileList is not a list" + +# calculate checksum over all files (sorted) +hasher = hashlib.md5() +for filename in sorted(file_list): + filepath = os.path.join(upload_directory, filename) + with open(filepath, "rb") as f: + hasher.update(f.read()) + +# output hashsum - with a status code of 0 the programs +# stdout is a checksome, otherwise it is a traceback +print(hasher.hexdigest()) From 559026dc867685512cc9d600efc31620e97b8aeb Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 17:41:31 +0200 Subject: [PATCH 013/132] #92 (10) - Use a checksum to validate whether upload has been successfull --- packages/core/modules/upload_thread.py | 44 +++++++++++++++++++++++--- scripts/get_upload_dir_checksum.py | 2 +- 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index e62e1e31..922a66cb 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -1,8 +1,10 @@ from datetime import datetime +import hashlib import json import os import queue import shutil +import invoke import paramiko import threading import time @@ -59,6 +61,40 @@ def create_remote_dir(self): ) self.transfer_process.put(self.src_meta_path, self.dst_meta_path) + def get_remote_directory_checksum(self): + local_script_path = os.path.join(PROJECT_DIR, "scripts", "get_upload_dir_checksum.py") + remote_script_path = ( + self.config["upload"]["src_directory"] + "/get_upload_dir_checksum.py" + ) + self.transfer_process.put(local_script_path, remote_script_path) + + try: + self.connection.run("python3.10 --version", hide=True) + except invoke.exceptions.UnexpectedExit: + raise InvalidUploadState("python3.10 is not installed on the server") + + try: + remote_command = f"python3.10 {remote_script_path} {self.src_dir_path}" + a: invoke.runners.Result = self.connection.run(remote_command, hide=True) + assert a.exited == 0 + return a.stdout.strip() + except (invoke.exceptions.UnexpectedExit, AssertionError) as e: + raise InvalidUploadState( + f"could not execute remote command on server ({remote_command}): {e}" + ) + + def get_local_directory_checksum(self): + # calculate checksum over all files (sorted) + hasher = hashlib.md5() + for filename in sorted(self.meta_content["fileList"]): + filepath = os.path.join(self.src_dir_path, filename) + with open(filepath, "rb") as f: + hasher.update(f.read()) + + # output hashsum - with a status code of 0 the programs + # stdout is a checksum, otherwise it is a traceback + return hasher.hexdigest() + def fetch_meta(self): if os.path.isfile(self.src_meta_path): os.remove(self.src_meta_path) @@ -137,11 +173,9 @@ def run(self): ) if self.remove_src_after_upload: - # TODO: make sure all copying was successful - calculate - # a checksum over all files in the fileList - local_checksum = "..." - remote_checksum = "......" - if local_checksum == remote_checksum: + remote_checksum = self.get_remote_directory_checksum() + local_checksum = self.get_local_directory_checksum() + if remote_checksum == local_checksum: self.update_meta({"complete": True}) shutil.rmtree(self.src_dir_path) logger.debug("successfully removed source") diff --git a/scripts/get_upload_dir_checksum.py b/scripts/get_upload_dir_checksum.py index 52411f39..c3003258 100644 --- a/scripts/get_upload_dir_checksum.py +++ b/scripts/get_upload_dir_checksum.py @@ -26,5 +26,5 @@ hasher.update(f.read()) # output hashsum - with a status code of 0 the programs -# stdout is a checksome, otherwise it is a traceback +# stdout is a checksum, otherwise it is a traceback print(hasher.hexdigest()) From a2da575f11e150312a6346584fb1bb16dc30ebbe Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Sat, 13 Aug 2022 17:53:14 +0200 Subject: [PATCH 014/132] #92 (11) - improve health checks while uploading --- packages/core/modules/upload_thread.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index 922a66cb..f6e7e42f 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -9,6 +9,7 @@ import threading import time import fabric +import re from packages.core.utils import ( ConfigInterface, Logger, @@ -25,7 +26,7 @@ class InvalidUploadState(Exception): class DirectoryUploadClient: - def __init__(self, dirname: str, config: dict): + def __init__(self, date_string: str, config: dict): self.connection = fabric.connection.Connection( f"{config['upload']['user']}@{config['upload']['host']}", connect_kwargs={"password": config["upload"]["password"]}, @@ -33,11 +34,12 @@ def __init__(self, dirname: str, config: dict): ) self.transfer_process = fabric.transfer.Transfer(self.connection) - self.src_dir_path = os.path.join(config["upload"]["src_directory"], dirname) + self.date_string + self.src_dir_path = os.path.join(config["upload"]["src_directory"], date_string) self.src_meta_path = os.path.join(self.src_dir_path, "upload-meta.json") assert os.path.isdir(self.src_dir_path), f"{self.src_dir_path} is not a directory" - self.dst_dir_path = f"{config['upload']['dst_directory']}/{dirname}" + self.dst_dir_path = f"{config['upload']['dst_directory']}/{date_string}" self.dst_meta_path = f"{self.dst_dir_path}/upload-meta.json" assert self.transfer_process.is_remote_dir( config["upload"]["dst_directory"] @@ -125,11 +127,20 @@ def run(self): self.fetch_meta() assert self.meta_content is not None - # determine files missing in dst - src_file_set = set(os.listdir(self.src_dir_path)) - src_file_set.remove("upload-meta.json") + # determine files present in src and dst directory + ifg_file_patter = re.compile("^.*" + self.date_string + ".*\.\d{4}$") + src_file_set = set( + [f for f in os.listdir(self.src_dir_path) if ifg_file_patter.match(f)] + ) dst_file_set = set(self.meta_content["fileList"]) + + # determine file differences between src and dst files_missing_in_dst = src_file_set.difference(dst_file_set) + files_missing_in_src = dst_file_set.difference(src_file_set) + if len(files_missing_in_src) > 0: + raise InvalidUploadState( + f"files present in dst are missing in src: {files_missing_in_src}" + ) # if there are files that have not been uploaded, # assert that the remote meta also indicates an From b2235342da7c2de650a813801630aac93cc6ba41 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 15:13:22 +0200 Subject: [PATCH 015/132] Extract some mainloop parts into functions --- packages/core/main.py | 56 +++++++++++++++++++++++++------------------ 1 file changed, 33 insertions(+), 23 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 22770366..91c519d1 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -48,6 +48,34 @@ def toggle_thread_states( upload_thread_instance.stop() +def update_exception_state( + config: dict, current_exceptions: list[str], new_exception: Exception +): + try: + new_current_exceptions = [*current_exceptions] + + if new_exception is not None: + if type(new_exception).__name__ not in current_exceptions: + new_current_exceptions.append(type(new_exception).__name__) + ExceptionEmailClient.handle_occured_exception(config, new_exception) + if len(current_exceptions) == 0: + Logger.log_activity_event("error-occured") + else: + if len(current_exceptions) > 0: + new_current_exceptions = [] + ExceptionEmailClient.handle_resolved_exception(config) + logger.info(f"All exceptions have been resolved.") + Logger.log_activity_event("errors-resolved") + + # if no errors until now + current_exceptions = [*new_current_exceptions] + StateInterface.update({"current_exceptions": current_exceptions}, persistent=True) + except Exception as e: + logger.exception(e) + + return current_exceptions + + def run(): StateInterface.initialize() logger.info(f"Starting mainloop inside process with PID {os.getpid()}") @@ -98,31 +126,13 @@ def run(): new_exception = e logger.exception(new_exception) - try: - new_current_exceptions = [*current_exceptions] - - if new_exception is not None: - if type(new_exception).__name__ not in current_exceptions: - new_current_exceptions.append(type(new_exception).__name__) - ExceptionEmailClient.handle_occured_exception(_CONFIG, new_exception) - if len(current_exceptions) == 0: - Logger.log_activity_event("error-occured") - else: - if len(current_exceptions) > 0: - new_current_exceptions = [] - ExceptionEmailClient.handle_resolved_exception(_CONFIG) - logger.info(f"All exceptions have been resolved.") - Logger.log_activity_event("errors-resolved") - - # if no errors until now - current_exceptions = [*new_current_exceptions] - StateInterface.update({"current_exceptions": current_exceptions}, persistent=True) - except Exception as e: - logger.exception(e) - - logger.info("Ending iteration") + # update the list of currently present exceptions + # send error emails on new exceptions, send resolved + # emails when no errors are present anymore + current_exceptions = update_exception_state(_CONFIG, current_exceptions, new_exception) # wait rest of loop time + logger.info("Ending iteration") elapsed_time = time.time() - start_time time_to_wait = _CONFIG["general"]["seconds_per_core_interval"] - elapsed_time if time_to_wait > 0: From cf910a38f1d48f48d1aafade3c6f0c9e8ef8bdb3 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 15:14:27 +0200 Subject: [PATCH 016/132] Add todos --- packages/core/main.py | 4 +++- packages/core/modules/upload_thread.py | 9 ++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 91c519d1..3462a55b 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -10,7 +10,7 @@ logger = Logger(origin="main") - +# TODO: document def toggle_thread_states( config: dict, helios_thread_instance: modules.helios_thread.HeliosThread, @@ -48,6 +48,7 @@ def toggle_thread_states( upload_thread_instance.stop() +# TODO: document def update_exception_state( config: dict, current_exceptions: list[str], new_exception: Exception ): @@ -112,6 +113,7 @@ def run(): time.sleep(10) continue + # TODO: add comment toggle_thread_states(_CONFIG, helios_thread_instance, upload_thread_instance) if _CONFIG["general"]["test_mode"]: diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index f6e7e42f..8c724a00 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -34,7 +34,7 @@ def __init__(self, date_string: str, config: dict): ) self.transfer_process = fabric.transfer.Transfer(self.connection) - self.date_string + self.date_string = date_string self.src_dir_path = os.path.join(config["upload"]["src_directory"], date_string) self.src_meta_path = os.path.join(self.src_dir_path, "upload-meta.json") assert os.path.isdir(self.src_dir_path), f"{self.src_dir_path} is not a directory" @@ -225,6 +225,10 @@ def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: ) +# TODO: draw architecture of the upload +# TODO: simplify the whole file! + + class UploadThread: def __init__(self): self.__thread = None @@ -265,6 +269,8 @@ def main(shared_queue: queue.Queue): config = ConfigInterface.read() # Check for termination + # FIXME: right now, this checks seems to be in multiple places + # TODO: extract this into a function try: if ( (config["upload"] is None) @@ -277,6 +283,7 @@ def main(shared_queue: queue.Queue): start_time = time.time() + # TODO: check for termination between loop iterations for src_date_string in get_directories_to_be_uploaded( config["upload"]["src_directory"] ): From b15c4ae593e52914f0706b43ae319c3c717e0067 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 22:21:58 +0200 Subject: [PATCH 017/132] Refactoring - simplify start/stop of the upload thread --- packages/core/main.py | 21 ++------ packages/core/modules/upload_thread.py | 69 ++++++++++---------------- 2 files changed, 31 insertions(+), 59 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 3462a55b..a5fda4ae 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -12,9 +12,7 @@ # TODO: document def toggle_thread_states( - config: dict, - helios_thread_instance: modules.helios_thread.HeliosThread, - upload_thread_instance: modules.upload_thread.UploadThread, + config: dict, helios_thread_instance: modules.helios_thread.HeliosThread ): helios_should_be_running = all( [ @@ -23,13 +21,6 @@ def toggle_thread_states( config["measurement_triggers"]["consider_helios"], ] ) - upload_should_be_running = all( - [ - not config["general"]["test_mode"], - config["upload"] is not None, - config["upload"]["is_active"], - ] - ) if config["general"]["test_mode"]: logger.info("pyra-core in test mode") @@ -41,12 +32,6 @@ def toggle_thread_states( if not helios_should_be_running and helios_thread_instance.is_running(): helios_thread_instance.stop() - # Start/stop UploadThread - if upload_should_be_running and not upload_thread_instance.is_running(): - upload_thread_instance.start() - if not upload_should_be_running and upload_thread_instance.is_running(): - upload_thread_instance.stop() - # TODO: document def update_exception_state( @@ -114,7 +99,9 @@ def run(): continue # TODO: add comment - toggle_thread_states(_CONFIG, helios_thread_instance, upload_thread_instance) + upload_thread_instance.update_thread_state() + + toggle_thread_states(_CONFIG, helios_thread_instance) if _CONFIG["general"]["test_mode"]: logger.info("pyra-core in test mode") diff --git a/packages/core/modules/upload_thread.py b/packages/core/modules/upload_thread.py index 8c724a00..45c03c53 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/modules/upload_thread.py @@ -232,56 +232,44 @@ def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: class UploadThread: def __init__(self): self.__thread = None - self.__shared_queue = None - def start(self): + def update_thread_state(self, config: dict): """ - Start the thread using the threading library + Make sure that the upload loop is (not) running, based on config.upload """ - logger.info("Starting thread") - self.__shared_queue = queue.Queue() - self.__thread = threading.Thread(target=UploadThread.main, args=(self.__shared_queue,)) - self.__thread.start() + is_running = self.__thread.is_alive() + should_be_running = UploadThread.should_be_running(config) - def is_running(self): - return self.__thread is not None + if should_be_running and (not is_running): + logger.info("Starting the thread") + self.__thread = threading.Thread(target=UploadThread.main) + self.__thread.start() - def stop(self): - """ - Send a stop-signal to the thread and wait for its termination - """ - - assert self.__shared_queue is not None - - logger.info("Sending termination signal") - self.__shared_queue.put("stop") - - logger.info("Waiting for thread to terminate") - self.__thread.join() - self.__thread = None - self.__shared_queue = None + if (self.__thread is not None) and (not is_running): + logger.info("Thread has stopped") + self.__thread.join() + self.__thread = None - logger.info("Stopped the thread") + @staticmethod + def should_be_running(config: dict) -> bool: + """Should the thread be running? (based on config.upload)""" + return ( + (not config["general"]["test_mode"]) + and ("upload" in config.keys()) + and (config["upload"]["is_active"]) + ) @staticmethod - def main(shared_queue: queue.Queue): + def main(): + """ + Main entry point for the upload process + """ while True: config = ConfigInterface.read() # Check for termination - # FIXME: right now, this checks seems to be in multiple places - # TODO: extract this into a function - try: - if ( - (config["upload"] is None) - or (not config["upload"]["is_active"]) - or (shared_queue.get(block=False) == "stop") - ): - break - except queue.Empty: - pass - - start_time = time.time() + if not UploadThread.should_be_running(config): + return # TODO: check for termination between loop iterations for src_date_string in get_directories_to_be_uploaded( @@ -298,7 +286,4 @@ def main(shared_queue: queue.Queue): logger.error(f"stuck in invalid state (uploading {src_date_string}): {e}") client.teardown() - elapsed_time = time.time() - start_time - time_to_wait = 5 - elapsed_time - if time_to_wait > 0: - time.sleep(time_to_wait) + time.sleep(60) From 5ca3c67f395f93d641a1ca120c2381fc2891ac46 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 22:25:57 +0200 Subject: [PATCH 018/132] Refactoring - split modules and threads --- packages/core/__init__.py | 3 +-- packages/core/main.py | 8 ++++---- packages/core/modules/__init__.py | 2 -- packages/core/threads/__init__.py | 4 ++++ packages/core/{modules => threads}/helios_thread.py | 0 packages/core/{modules => threads}/upload_thread.py | 1 - run_headless_vbdsd_thread.py | 4 ++-- tests/helios/test_helios.py | 2 +- 8 files changed, 12 insertions(+), 12 deletions(-) create mode 100644 packages/core/threads/__init__.py rename packages/core/{modules => threads}/helios_thread.py (100%) rename packages/core/{modules => threads}/upload_thread.py (99%) diff --git a/packages/core/__init__.py b/packages/core/__init__.py index 33e52140..913c063d 100644 --- a/packages/core/__init__.py +++ b/packages/core/__init__.py @@ -1,3 +1,2 @@ from .utils.interfaces import config_validation -from . import modules -from . import main +from . import modules, main, threads diff --git a/packages/core/main.py b/packages/core/main.py index a5fda4ae..3544a82c 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -1,6 +1,6 @@ import os import time -from packages.core import modules +from packages.core import modules, threads from packages.core.utils import ( ConfigInterface, StateInterface, @@ -12,7 +12,7 @@ # TODO: document def toggle_thread_states( - config: dict, helios_thread_instance: modules.helios_thread.HeliosThread + config: dict, helios_thread_instance: threads.helios_thread.HeliosThread ): helios_should_be_running = all( [ @@ -82,8 +82,8 @@ def run(): modules.opus_measurement.OpusMeasurement(_CONFIG), modules.system_checks.SystemChecks(_CONFIG), ] - helios_thread_instance = modules.helios_thread.HeliosThread() - upload_thread_instance = modules.upload_thread.UploadThread() + helios_thread_instance = threads.helios_thread.HeliosThread() + upload_thread_instance = threads.upload_thread.UploadThread() current_exceptions = StateInterface.read(persistent=True)["current_exceptions"] diff --git a/packages/core/modules/__init__.py b/packages/core/modules/__init__.py index 10b3cebf..ef90122c 100644 --- a/packages/core/modules/__init__.py +++ b/packages/core/modules/__init__.py @@ -1,7 +1,5 @@ from . import ( enclosure_control, - helios_thread, - upload_thread, measurement_conditions, opus_measurement, sun_tracking, diff --git a/packages/core/threads/__init__.py b/packages/core/threads/__init__.py new file mode 100644 index 00000000..45db511a --- /dev/null +++ b/packages/core/threads/__init__.py @@ -0,0 +1,4 @@ +from . import ( + helios_thread, + upload_thread, +) diff --git a/packages/core/modules/helios_thread.py b/packages/core/threads/helios_thread.py similarity index 100% rename from packages/core/modules/helios_thread.py rename to packages/core/threads/helios_thread.py diff --git a/packages/core/modules/upload_thread.py b/packages/core/threads/upload_thread.py similarity index 99% rename from packages/core/modules/upload_thread.py rename to packages/core/threads/upload_thread.py index 45c03c53..926a4d0c 100644 --- a/packages/core/modules/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -2,7 +2,6 @@ import hashlib import json import os -import queue import shutil import invoke import paramiko diff --git a/run_headless_vbdsd_thread.py b/run_headless_vbdsd_thread.py index a884328f..fd0ee7e6 100644 --- a/run_headless_vbdsd_thread.py +++ b/run_headless_vbdsd_thread.py @@ -1,6 +1,6 @@ import queue -from packages.core.modules.helios_thread import VBDSD_Thread +from packages.core.threads.helios_thread import HeliosThread if __name__ == "__main__": shared_queue = queue.Queue() - VBDSD_Thread.main(shared_queue, headless=True) + HeliosThread.main(shared_queue, headless=True) diff --git a/tests/helios/test_helios.py b/tests/helios/test_helios.py index 8cb1de2d..92f3b83a 100644 --- a/tests/helios/test_helios.py +++ b/tests/helios/test_helios.py @@ -1,4 +1,4 @@ -from packages.core.modules.helios_thread import HeliosThread +from packages.core.threads.helios_thread import HeliosThread import time From 27be5bcc0aa5dc764328e037f5b935be5aa419d2 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 22:50:25 +0200 Subject: [PATCH 019/132] Refactoring - Create an abstract base class for threads --- packages/core/threads/__init__.py | 1 + packages/core/threads/abstract_thread_base.py | 44 +++++++++++++++++++ packages/core/threads/upload_thread.py | 37 ++++------------ 3 files changed, 53 insertions(+), 29 deletions(-) create mode 100644 packages/core/threads/abstract_thread_base.py diff --git a/packages/core/threads/__init__.py b/packages/core/threads/__init__.py index 45db511a..df7c9e2d 100644 --- a/packages/core/threads/__init__.py +++ b/packages/core/threads/__init__.py @@ -1,4 +1,5 @@ from . import ( + abstract_thread_base, helios_thread, upload_thread, ) diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py new file mode 100644 index 00000000..29a9907a --- /dev/null +++ b/packages/core/threads/abstract_thread_base.py @@ -0,0 +1,44 @@ +import abc +import threading +from typing import Callable +from packages.core.utils.functions.logger import Logger + + +class AbstractThreadBase(abc.ABC): + """ + An abstract base class for thread classes used in PYRA + """ + + def __init__(self, logger_origin: str): + self.__thread = None + self.__logger = Logger(origin=logger_origin) + + def update_thread_state(self, config: dict): + """ + Make sure that the thread loop is (not) running, + based on config.upload + """ + self.config = config + + is_running = (self.__thread is not None) and self.__thread.is_alive() + should_be_running = self.should_be_running() + + if should_be_running and (not is_running): + self.__logger.info("Starting the thread") + self.__thread = threading.Thread(target=self.main) + self.__thread.start() + + if (self.__thread is not None) and (not is_running): + self.__logger.info("Thread has stopped") + self.__thread.join() + self.__thread = None + + @abc.abstractmethod + def should_be_running(self, config: dict) -> bool: + """Should the thread be running? (based on config.upload)""" + pass + + @abc.abstractmethod + def main(self): + """Main entrypoint of the thread""" + pass diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 926a4d0c..effd5d0c 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -5,7 +5,6 @@ import shutil import invoke import paramiko -import threading import time import fabric import re @@ -13,6 +12,7 @@ ConfigInterface, Logger, ) +from packages.core.threads.abstract_thread_base import AbstractThreadBase logger = Logger(origin="upload") @@ -228,29 +228,11 @@ def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: # TODO: simplify the whole file! -class UploadThread: +class UploadThread(AbstractThreadBase): def __init__(self): - self.__thread = None - - def update_thread_state(self, config: dict): - """ - Make sure that the upload loop is (not) running, based on config.upload - """ - is_running = self.__thread.is_alive() - should_be_running = UploadThread.should_be_running(config) - - if should_be_running and (not is_running): - logger.info("Starting the thread") - self.__thread = threading.Thread(target=UploadThread.main) - self.__thread.start() - - if (self.__thread is not None) and (not is_running): - logger.info("Thread has stopped") - self.__thread.join() - self.__thread = None - - @staticmethod - def should_be_running(config: dict) -> bool: + super().__init__(logger) + + def should_be_running(self, config: dict) -> bool: """Should the thread be running? (based on config.upload)""" return ( (not config["general"]["test_mode"]) @@ -258,16 +240,13 @@ def should_be_running(config: dict) -> bool: and (config["upload"]["is_active"]) ) - @staticmethod - def main(): - """ - Main entry point for the upload process - """ + def main(self): + """Main entrypoint of the thread""" while True: config = ConfigInterface.read() # Check for termination - if not UploadThread.should_be_running(config): + if not self.should_be_running(config): return # TODO: check for termination between loop iterations From e25ee3c387a45469a9fc38f88dc965fb7dd07031 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 22:55:18 +0200 Subject: [PATCH 020/132] Refactoring - use abstract thread class in helios --- packages/core/main.py | 5 +-- packages/core/threads/helios_thread.py | 53 +++++++++----------------- 2 files changed, 19 insertions(+), 39 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 3544a82c..2c19ff1c 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -10,7 +10,7 @@ logger = Logger(origin="main") -# TODO: document +# TODO: remove def toggle_thread_states( config: dict, helios_thread_instance: threads.helios_thread.HeliosThread ): @@ -99,10 +99,9 @@ def run(): continue # TODO: add comment + helios_thread_instance.update_thread_state() upload_thread_instance.update_thread_state() - toggle_thread_states(_CONFIG, helios_thread_instance) - if _CONFIG["general"]["test_mode"]: logger.info("pyra-core in test mode") logger.debug("Skipping HeliosThread and UploadThread in test mode") diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 552f0492..aaeb3e51 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -13,6 +13,7 @@ Astronomy, ImageProcessing, ) +from packages.core.threads.abstract_thread_base import AbstractThreadBase logger = Logger(origin="helios") @@ -216,38 +217,22 @@ def run(save_image: bool) -> int: return _Helios.determine_frame_status(frame, save_image) -class HeliosThread: +class HeliosThread(AbstractThreadBase): def __init__(self): - self.__thread = None - self.__shared_queue = queue.Queue() - - def start(self): - """ - Start a thread using the multiprocessing library - """ - logger.info("Starting thread") - self.__thread = threading.Thread(target=HeliosThread.main, args=(self.__shared_queue,)) - self.__thread.start() - - def is_running(self): - return self.__thread is not None - - def stop(self): - """ - Stop the thread and set the state to 'null' - """ - - logger.info("Sending termination signal") - self.__shared_queue.put("stop") - - logger.info("Waiting for thread to terminate") - self.__thread.join() - StateInterface.update({"helios_indicates_good_conditions": None}) - self.__thread = None - logger.info("Stopped the thread") + super().__init__(logger) + + # FIXME: update this logic (the code has just been copied from upload) + def should_be_running(self, config: dict) -> bool: + """Should the thread be running? (based on config.upload)""" + return ( + (not config["general"]["test_mode"]) + and ("upload" in config.keys()) + and (config["upload"]["is_active"]) + ) - @staticmethod - def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = False): + # TODO: Update tests/headless mode to comply with new class structure + def main(self, infinite_loop: bool = True, headless: bool = False): + """Main entrypoint of the thread""" global logger global _CONFIG @@ -263,12 +248,8 @@ def main(shared_queue: queue.Queue, infinite_loop: bool = True, headless: bool = while True: # Check for termination - try: - if shared_queue.get(block=False) == "stop": - _Helios.deinit() - break - except queue.Empty: - pass + if not self.should_be_running(_CONFIG): + return try: start_time = time.time() From 4a14e9f81de245d4f67ff2671e739d032b2fb130 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 22:59:40 +0200 Subject: [PATCH 021/132] Refactoring - remove old start-/stop-thread logic --- packages/core/main.py | 26 ++------------------------ packages/core/threads/helios_thread.py | 5 ++--- packages/core/threads/upload_thread.py | 2 +- 3 files changed, 5 insertions(+), 28 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 2c19ff1c..1559120b 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -10,29 +10,6 @@ logger = Logger(origin="main") -# TODO: remove -def toggle_thread_states( - config: dict, helios_thread_instance: threads.helios_thread.HeliosThread -): - helios_should_be_running = all( - [ - not config["general"]["test_mode"], - config["helios"] is not None, - config["measurement_triggers"]["consider_helios"], - ] - ) - - if config["general"]["test_mode"]: - logger.info("pyra-core in test mode") - logger.debug("Skipping HeliosThread and UploadThread in test mode") - - # Start/stop HeliosThread - if helios_should_be_running and not helios_thread_instance.is_running(): - helios_thread_instance.start() - if not helios_should_be_running and helios_thread_instance.is_running(): - helios_thread_instance.stop() - - # TODO: document def update_exception_state( config: dict, current_exceptions: list[str], new_exception: Exception @@ -98,7 +75,8 @@ def run(): time.sleep(10) continue - # TODO: add comment + # check whether the two threads are (not) running + # possibly (re)start each thread helios_thread_instance.update_thread_state() upload_thread_instance.update_thread_state() diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index aaeb3e51..e31dcbbf 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -221,13 +221,12 @@ class HeliosThread(AbstractThreadBase): def __init__(self): super().__init__(logger) - # FIXME: update this logic (the code has just been copied from upload) def should_be_running(self, config: dict) -> bool: """Should the thread be running? (based on config.upload)""" return ( (not config["general"]["test_mode"]) - and ("upload" in config.keys()) - and (config["upload"]["is_active"]) + and (config["helios"] is not None) + and (config["measurement_triggers"]["consider_helios"]) ) # TODO: Update tests/headless mode to comply with new class structure diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index effd5d0c..132b373a 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -236,7 +236,7 @@ def should_be_running(self, config: dict) -> bool: """Should the thread be running? (based on config.upload)""" return ( (not config["general"]["test_mode"]) - and ("upload" in config.keys()) + and (config["upload"] is not None) and (config["upload"]["is_active"]) ) From 0632f2363060d3cd4ddf280ea843b054a36d189e Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 23:11:52 +0200 Subject: [PATCH 022/132] Refactoring - add more comments to mainloop --- packages/core/main.py | 44 ++++++++++++++++++++++++++++++------------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 1559120b..95d732b9 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -40,25 +40,39 @@ def update_exception_state( def run(): + """ + The mainloop of PYRA Core. This function will loop infinitely. + It loads the config file, validates it runs every module one by + one, and possibly restarts the upload- and helios-thread. + """ StateInterface.initialize() logger.info(f"Starting mainloop inside process with PID {os.getpid()}") + # Loop until a valid config has been found. Without + # an invalid config, the mainloop cannot initialize while True: try: - _CONFIG = ConfigInterface.read() + config = ConfigInterface.read() break except AssertionError as e: logger.error(f"{e}") logger.error(f"Invalid config, waiting 10 seconds") time.sleep(10) - _modules = [ - modules.measurement_conditions.MeasurementConditions(_CONFIG), - modules.enclosure_control.EnclosureControl(_CONFIG), - modules.sun_tracking.SunTracking(_CONFIG), - modules.opus_measurement.OpusMeasurement(_CONFIG), - modules.system_checks.SystemChecks(_CONFIG), + # these modules will be executed one by one in each + # mainloop iteration + mainloop_modules = [ + modules.measurement_conditions.MeasurementConditions(config), + modules.enclosure_control.EnclosureControl(config), + modules.sun_tracking.SunTracking(config), + modules.opus_measurement.OpusMeasurement(config), + modules.system_checks.SystemChecks(config), ] + + # these thread classes always exist and start their + # dedicated mainloop in a parallel thread if the + # respective service is configured. The threads itself + # load the config periodically and stop themselves helios_thread_instance = threads.helios_thread.HeliosThread() upload_thread_instance = threads.upload_thread.UploadThread() @@ -68,8 +82,9 @@ def run(): start_time = time.time() logger.info("Starting iteration") + # load config at the beginning of each mainloop iteration try: - _CONFIG = ConfigInterface.read() + config = ConfigInterface.read() except AssertionError as e: logger.error(f"Invalid config, waiting 10 seconds") time.sleep(10) @@ -80,14 +95,17 @@ def run(): helios_thread_instance.update_thread_state() upload_thread_instance.update_thread_state() - if _CONFIG["general"]["test_mode"]: + if config["general"]["test_mode"]: logger.info("pyra-core in test mode") logger.debug("Skipping HeliosThread and UploadThread in test mode") + # loop over every module, when one of the modules + # encounters an exception, this inner loop stops + # and the exception will be processed (logs, emails) new_exception = None try: - for module in _modules: - module.run(_CONFIG) + for m in mainloop_modules: + m.run(config) except Exception as e: new_exception = e logger.exception(new_exception) @@ -95,12 +113,12 @@ def run(): # update the list of currently present exceptions # send error emails on new exceptions, send resolved # emails when no errors are present anymore - current_exceptions = update_exception_state(_CONFIG, current_exceptions, new_exception) + current_exceptions = update_exception_state(config, current_exceptions, new_exception) # wait rest of loop time logger.info("Ending iteration") elapsed_time = time.time() - start_time - time_to_wait = _CONFIG["general"]["seconds_per_core_interval"] - elapsed_time + time_to_wait = config["general"]["seconds_per_core_interval"] - elapsed_time if time_to_wait > 0: logger.debug(f"Waiting {round(time_to_wait, 2)} second(s)") time.sleep(time_to_wait) From fe952d86c5661a136f1dce2fa1a216fd982d3861 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 16 Aug 2022 23:22:15 +0200 Subject: [PATCH 023/132] Refactoring - add even more comments to mainloop --- packages/core/main.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index 95d732b9..a95bb06f 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -10,33 +10,46 @@ logger = Logger(origin="main") -# TODO: document + def update_exception_state( config: dict, current_exceptions: list[str], new_exception: Exception ): + """ + Take a list of current_exceptions (all exceptions that are + present from the last mainloop iteration, possibly empty) and + a new_exception (the one that happened in this loop, possibly + None). + + If the new_exception is None, all exceptions have been resolved + resolved: send a "resolved" email in case the current_exceptions + was not empty yet. + + If the new_exception is not None, if it is not already in the + list of current_exceptions: append it to that list and send a + "new error occured" email. + """ try: - new_current_exceptions = [*current_exceptions] - + updated_current_exceptions = [*current_exceptions] if new_exception is not None: if type(new_exception).__name__ not in current_exceptions: - new_current_exceptions.append(type(new_exception).__name__) + updated_current_exceptions.append(type(new_exception).__name__) ExceptionEmailClient.handle_occured_exception(config, new_exception) if len(current_exceptions) == 0: Logger.log_activity_event("error-occured") else: if len(current_exceptions) > 0: - new_current_exceptions = [] + updated_current_exceptions = [] ExceptionEmailClient.handle_resolved_exception(config) logger.info(f"All exceptions have been resolved.") Logger.log_activity_event("errors-resolved") # if no errors until now - current_exceptions = [*new_current_exceptions] StateInterface.update({"current_exceptions": current_exceptions}, persistent=True) + return updated_current_exceptions + except Exception as e: logger.exception(e) - - return current_exceptions + return current_exceptions def run(): From 2af6fe585e560d2db73a21c3a8703f17aa4194a6 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 00:10:39 +0200 Subject: [PATCH 024/132] Refactoring - code quality and documentation of upload thread --- packages/core/threads/upload_thread.py | 263 +++++++++++++++---------- 1 file changed, 163 insertions(+), 100 deletions(-) diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 132b373a..54fbe763 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -25,6 +25,12 @@ class InvalidUploadState(Exception): class DirectoryUploadClient: + """ + This is the client that is concerned with uploading one + specific directory (YYYYMMDD). "self.run()" will perform + the actual upload process. + """ + def __init__(self, date_string: str, config: dict): self.connection = fabric.connection.Connection( f"{config['upload']['user']}@{config['upload']['host']}", @@ -47,22 +53,43 @@ def __init__(self, date_string: str, config: dict): self.meta_content: dict | None = None self.remove_src_after_upload: bool = config["upload"]["remove_src_after_upload"] - def create_remote_dir(self): - self.connection.run(f"mkdir {self.dst_dir_path}") - with open(self.src_meta_path, "w") as f: - json.dump( - { - "complete": False, - "fileList": [], - "createdTime": round(time.time(), 3), - "lastModifiedTime": round(time.time(), 3), - }, - f, - indent=4, - ) - self.transfer_process.put(self.src_meta_path, self.dst_meta_path) - - def get_remote_directory_checksum(self): + def __initialize_remote_dir(self): + """ + If the respective dst directory does not exist, + create the directory and add a fresh upload-meta.json + file to it looking like this: { + "complete": false, + "fileList": [], + "createdTime": , + "lastModifiedTime": + } + """ + if not self.transfer_process.is_remote_dir(self.dst_dir_path): + self.connection.run(f"mkdir {self.dst_dir_path}") + with open(self.src_meta_path, "w") as f: + json.dump( + { + "complete": False, + "fileList": [], + "createdTime": round(time.time(), 3), + "lastModifiedTime": round(time.time(), 3), + }, + f, + indent=4, + ) + self.transfer_process.put(self.src_meta_path, self.dst_meta_path) + + def __get_remote_directory_checksum(self): + """ + Calculate checksum over all files listed in the + upload-meta.json file. The same logic will run + on the local machine - which also has a meta file + in its src directory with the same contents. + + This script requires the server to have Python + 3.10 installed and will raise an exception if its + not present. + """ local_script_path = os.path.join(PROJECT_DIR, "scripts", "get_upload_dir_checksum.py") remote_script_path = ( self.config["upload"]["src_directory"] + "/get_upload_dir_checksum.py" @@ -84,8 +111,13 @@ def get_remote_directory_checksum(self): f"could not execute remote command on server ({remote_command}): {e}" ) - def get_local_directory_checksum(self): - # calculate checksum over all files (sorted) + def __get_local_directory_checksum(self): + """ + Calculate checksum over all files listed in the + upload-meta.json file. The same logic will run + on the server - which also has a meta file in + its dst directory with the same contents + """ hasher = hashlib.md5() for filename in sorted(self.meta_content["fileList"]): filepath = os.path.join(self.src_dir_path, filename) @@ -96,7 +128,10 @@ def get_local_directory_checksum(self): # stdout is a checksum, otherwise it is a traceback return hasher.hexdigest() - def fetch_meta(self): + def __fetch_meta(self): + """ + Download the remote meta file to the local src directory + """ if os.path.isfile(self.src_meta_path): os.remove(self.src_meta_path) self.transfer_process.get(self.dst_meta_path, self.src_meta_path) @@ -105,10 +140,13 @@ def fetch_meta(self): with open(self.src_meta_path, "r") as f: self.meta_content = json.load(f) except (AssertionError, json.JSONDecodeError) as e: - # TODO: log/report this exception and continue with other directories raise InvalidUploadState(str(e)) - def update_meta(self, new_meta_content_partial: dict): + def __update_meta(self, new_meta_content_partial: dict): + """ + Update the local upload-meta.json file and overwrite + the meta file on the server + """ new_meta_content = { **self.meta_content, **new_meta_content_partial, @@ -120,16 +158,30 @@ def update_meta(self, new_meta_content_partial: dict): self.meta_content = new_meta_content def run(self): - # possibly initialize remote dir, fetch remote meta - if not self.transfer_process.is_remote_dir(self.dst_dir_path): - self.create_remote_dir() - self.fetch_meta() + """ + Perform the whole upload process for a given directory. + + 1. If the respective remote directory doesn't exist, create it + 2. Download the current upload-meta.json file from the server + 3. Determine which files have not been uploaded yet + 4. Upload every file that is found locally but not in the remote + meta. Update the remote meta every 25 uploaded files (reduces + load and traffic). + 5. Test whether the checksums of "ifgs on server" and "local ifgs" + are equal, raise an exception (and end the function) if the differ + 6. Indicate that the upload process is complete in remote meta + 7. Optionally remove local ifgs + """ + + self.__initialize_remote_dir() + self.__fetch_meta() assert self.meta_content is not None # determine files present in src and dst directory - ifg_file_patter = re.compile("^.*" + self.date_string + ".*\.\d{4}$") + # ifg files should be named like "YYYYMMDD." + ifg_file_pattern = re.compile("^.*" + self.date_string + ".*\.\d{2,6}$") src_file_set = set( - [f for f in os.listdir(self.src_dir_path) if ifg_file_patter.match(f)] + [f for f in os.listdir(self.src_dir_path) if ifg_file_pattern.match(f)] ) dst_file_set = set(self.meta_content["fileList"]) @@ -144,56 +196,46 @@ def run(self): # if there are files that have not been uploaded, # assert that the remote meta also indicates an # incomplete upload state - if len(files_missing_in_dst) != 0: - if self.meta_content["complete"]: - raise InvalidUploadState( - "there are missing files but remote " + "meta contains complete=True" - ) + if (len(files_missing_in_dst) != 0) and self.meta_content["complete"]: + raise InvalidUploadState( + "missing files on dst but remote meta contains complete=True" + ) # upload every file that is missing in the remote # meta but present in the local directory. Every 25 # files, upload the remote meta file on which files # have been uploaded - upload_count = 0 - uploaded_files: list[str] = [] - while True: + upload_is_finished = False + while not upload_is_finished: try: f = files_missing_in_dst.pop() + self.transfer_process.put( + os.path.join(self.src_dir_path, f), f"{self.dst_dir_path}/{f}" + ) + self.meta_content["fileList"].append(f) except KeyError: - break - self.transfer_process.put( - os.path.join(self.src_dir_path, f), f"{self.dst_dir_path}/{f}" + upload_is_finished = True + + if (self.meta_content["fileList"] % 25 == 0) or upload_is_finished: + self.__update_meta({"fileList": self.meta_content["fileList"]}) + + # raise an exception if the checksums do not match + remote_checksum = self.__get_remote_directory_checksum() + local_checksum = self.__get_local_directory_checksum() + if remote_checksum != local_checksum: + raise InvalidUploadState( + f"checksums do not match, local={local_checksum} " + + f"remote={remote_checksum}" ) - uploaded_files.append(f) - upload_count += 1 - if upload_count % 25 == 0: - self.update_meta( - { - "fileList": [*(self.meta_content["fileList"]), *uploaded_files], - } - ) - uploaded_files = [] - - # update remote meta with the final files and set - # "complete" to True. This indicates that - self.update_meta( - { - "fileList": [*(self.meta_content["fileList"]), *uploaded_files], - } - ) + # only set meta.complet to True, when the checksums match + self.__update_meta({"complete": True}) + logger.debug(f"successfully uploaded {self.date_string}") + + # only remove src if configured and checksums match if self.remove_src_after_upload: - remote_checksum = self.get_remote_directory_checksum() - local_checksum = self.get_local_directory_checksum() - if remote_checksum == local_checksum: - self.update_meta({"complete": True}) - shutil.rmtree(self.src_dir_path) - logger.debug("successfully removed source") - else: - raise InvalidUploadState( - f"checksums do not match, local={local_checksum} " - + f"remote={remote_checksum}" - ) + shutil.rmtree(self.src_dir_path) + logger.debug("successfully removed source") else: logger.debug("skipping removal of source") @@ -201,34 +243,52 @@ def teardown(self): """close ssh and scp connection""" self.connection.close() - -def is_valid_date(date_string: str): - try: - day_ending = datetime.strptime(f"{date_string} 23:59:59", "%Y%m%d %H:%M:%S") - seconds_since_day_ending = (datetime.now() - day_ending).total_seconds() - assert seconds_since_day_ending >= 3600 - return True - except (ValueError, AssertionError): - return False - - -def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: - if not os.path.isdir(ifg_src_path): - return [] - - return list( - filter( - lambda f: os.path.isdir(os.path.join(ifg_src_path, f)) and is_valid_date(f), - os.listdir(ifg_src_path), + @staticmethod + def __is_valid_date(date_string: str): + try: + day_ending = datetime.strptime(f"{date_string} 23:59:59", "%Y%m%d %H:%M:%S") + seconds_since_day_ending = (datetime.now() - day_ending).total_seconds() + assert seconds_since_day_ending >= 3600 + return True + except (ValueError, AssertionError): + return False + + @staticmethod + def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: + if not os.path.isdir(ifg_src_path): + return [] + + return list( + filter( + lambda f: os.path.isdir(os.path.join(ifg_src_path, f)) + and DirectoryUploadClient.__is_valid_date(f), + os.listdir(ifg_src_path), + ) ) - ) - - -# TODO: draw architecture of the upload -# TODO: simplify the whole file! class UploadThread(AbstractThreadBase): + """ + Thread for uloading all interferograms from a specific + directory to a server via SSH. The local files will only + be removed (optional) if the files on the server generate + the same MD5 checksum as the local files. + + The source directory (where OPUS puts the interferograms) + can be configured with config.upload.src_directory. OPUS's + dst directory should be configured inside the macro file. + + The expected file structure looks like this: + 📁 + 📁 + 📄 + 📄 + 📁 + 📄 + 📄 + 📁 ... + """ + def __init__(self): super().__init__(logger) @@ -245,23 +305,26 @@ def main(self): while True: config = ConfigInterface.read() - # Check for termination - if not self.should_be_running(config): - return - - # TODO: check for termination between loop iterations - for src_date_string in get_directories_to_be_uploaded( + src_dates_strings = DirectoryUploadClient.get_directories_to_be_uploaded( config["upload"]["src_directory"] - ): + ) + for src_date_string in src_dates_strings: + + # check for termination before processing each directory + if not self.should_be_running(config): + return + try: - client = DirectoryUploadClient(src_date_string).run() - logger.info(f"successfully uploaded data from {src_date_string}") + client = DirectoryUploadClient(src_date_string) + client.run() except TimeoutError as e: logger.error(f"could not reach host (uploading {src_date_string}): {e}") except paramiko.ssh_exception.AuthenticationException as e: logger.error(f"failed to authenticate (uploading {src_date_string}): {e}") except InvalidUploadState as e: logger.error(f"stuck in invalid state (uploading {src_date_string}): {e}") + client.teardown() - time.sleep(60) + # Wait 10 minutes until checking all directories again + time.sleep(600) From 573794ce9481771fa118c165a74ba7a5dcbd99f5 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 00:29:45 +0200 Subject: [PATCH 025/132] Refactoring - documentation of helios thread --- packages/core/threads/helios_thread.py | 73 ++++++++++++++++++++++---- packages/core/threads/upload_thread.py | 2 +- 2 files changed, 65 insertions(+), 10 deletions(-) diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index e31dcbbf..82855928 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -1,7 +1,5 @@ from datetime import datetime import os -import queue -import threading import time import cv2 as cv import numpy as np @@ -71,12 +69,20 @@ def init(camera_id: int, retries: int = 5): @staticmethod def deinit(): + """ + Possibly release the camera (linked over cv2.VideoCapture) + """ if _Helios.cam is not None: _Helios.cam.release() _Helios.cam = None @staticmethod def get_available_exposures() -> list[int]: + """ + Loop over every integer in [-20, ..., +20] and try to set + the camera exposure to each value. Return a list of integers + that the camera accepted as an exposure setting. + """ possible_values = [] for exposure in range(-20, 20): _Helios.cam.set(cv.CAP_PROP_EXPOSURE, exposure) @@ -95,9 +101,11 @@ def update_camera_settings( width: int = None, height: int = None, ): - # which settings are available depends on the camera model. - # however, this function will throw an AssertionError, when - # the value could not be changed + """ + Update the settings of the connected camera. Which settings are + available depends on the camera model. However, this function will + throw an AssertionError, when the value could not be changed. + """ properties = { "width": (cv.CAP_PROP_FRAME_WIDTH, width), "height": (cv.CAP_PROP_FRAME_HEIGHT, height), @@ -118,12 +126,20 @@ def update_camera_settings( ), f"could not set {property_name} to {value}, value is still at {new_value}" # throw away some images after changing settings. I don't know - # why this is necessary, but it resolved a lot of issues + # why this is necessary, but it resolves a lot of issues for _ in range(2): _Helios.cam.read() @staticmethod def take_image(retries: int = 10, trow_away_white_images: bool = True) -> cv.Mat: + """ + Take an image using the initialized camera. Raises an + AssertionError if camera has not been set up. + + Retries up to n times (camera can say "not possible") + and throws away all mostly white images (overexposed) + except when specified not to (used in autoexposure). + """ assert _Helios.cam is not None, "camera is not initialized yet" if not _Helios.cam.isOpened(): raise CameraError("camera is not open") @@ -139,8 +155,10 @@ def take_image(retries: int = 10, trow_away_white_images: bool = True) -> cv.Mat @staticmethod def adjust_exposure(): """ - set exposure to the value where the overall - mean pixel value color is closest to 100 + This function will loop over all available exposures and + take one image for each exposure. Then it sets exposure + to the value where the overall mean pixel value color is + closest to 50. """ exposure_results = [] for e in _Helios.available_exposures: @@ -164,6 +182,19 @@ def adjust_exposure(): @staticmethod def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: + """ + For a given frame, determine whether the conditions are + good (direct sunlight, returns 1) or bad (diffuse light + or darkness, returns 0). + + 1. Downscale image (faster processing) + 2. Convert to grayscale image + 3. Determine position and size of circular opening + 4. Determine edges in image (canny edge filter) + 5. Only consider edges inside 0.9 * circleradius + 6. If number of edge-pixels is > x: return 1; else: return 0; + """ + # transform image from 1280x720 to 640x360 downscaled_image = cv.resize(frame, None, fx=0.5, fy=0.5) @@ -207,7 +238,11 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: @staticmethod def run(save_image: bool) -> int: - # run autoexposure function every 3 minutes + """ + Take an image and evaluate the sun conditions. + + Run autoexposure function every 3 minutes. + """ now = time.time() if (now - _Helios.last_autoexposure_time) > 180: _Helios.adjust_exposure() @@ -218,6 +253,26 @@ def run(save_image: bool) -> int: class HeliosThread(AbstractThreadBase): + """ + Thread for determining the current sun conditions in a + parallel mainloop. + + "Good" sun conditions with respect to EM27 measurements + means direct sunlight, i.e. no clouds in front of the + sun. Interferograms recored in diffuse light conditions + result in a concentration timeseries (after retrieval) + with a very large standard deviation. + + Direct sunlight can be determined by "hard" shadows, i.e. + quick transitions between light and dark surfaces. This + thread periodically takes images in a special camera setup + and uses edge detected to determine how many hard shadows + it can find in the image. + + The result of this constant sunlight evaluation is written + to the StateInterface. + """ + def __init__(self): super().__init__(logger) diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 54fbe763..7b1bdfb2 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -269,7 +269,7 @@ def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: class UploadThread(AbstractThreadBase): """ - Thread for uloading all interferograms from a specific + Thread for uploading all interferograms from a specific directory to a server via SSH. The local files will only be removed (optional) if the files on the server generate the same MD5 checksum as the local files. From b67d7100bc5d47d7660991d6811a12c47470ea1e Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 10:14:23 +0200 Subject: [PATCH 026/132] Refactoring - documentation of interfaces (1) --- packages/core/modules/system_checks.py | 3 +- packages/core/threads/__init__.py | 1 - packages/core/threads/helios_thread.py | 2 +- packages/core/threads/upload_thread.py | 2 +- .../core/utils/interfaces/config_interface.py | 8 +- .../utils/interfaces/config_validation.py | 86 ++++++++++++++----- .../core/utils/interfaces/os_interface.py | 58 ++++++++----- .../core/utils/interfaces/plc_interface.py | 22 ++++- .../utils/interfaces/plc_specification.py | 21 ++--- .../core/utils/interfaces/state_interface.py | 76 +++++++++++----- 10 files changed, 191 insertions(+), 88 deletions(-) diff --git a/packages/core/modules/system_checks.py b/packages/core/modules/system_checks.py index c10f8a13..542b331f 100644 --- a/packages/core/modules/system_checks.py +++ b/packages/core/modules/system_checks.py @@ -1,5 +1,4 @@ -from packages.core.utils import Logger, OSInterface -from packages.core.utils.interfaces.state_interface import StateInterface +from packages.core.utils import Logger, OSInterface, StateInterface logger = Logger(origin="system-checks") diff --git a/packages/core/threads/__init__.py b/packages/core/threads/__init__.py index df7c9e2d..45db511a 100644 --- a/packages/core/threads/__init__.py +++ b/packages/core/threads/__init__.py @@ -1,5 +1,4 @@ from . import ( - abstract_thread_base, helios_thread, upload_thread, ) diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 82855928..ff585513 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -11,7 +11,7 @@ Astronomy, ImageProcessing, ) -from packages.core.threads.abstract_thread_base import AbstractThreadBase +from .abstract_thread_base import AbstractThreadBase logger = Logger(origin="helios") diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 7b1bdfb2..5fb0b354 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -12,7 +12,7 @@ ConfigInterface, Logger, ) -from packages.core.threads.abstract_thread_base import AbstractThreadBase +from .abstract_thread_base import AbstractThreadBase logger = Logger(origin="upload") diff --git a/packages/core/utils/interfaces/config_interface.py b/packages/core/utils/interfaces/config_interface.py index bc509f15..4384bc72 100644 --- a/packages/core/utils/interfaces/config_interface.py +++ b/packages/core/utils/interfaces/config_interface.py @@ -10,13 +10,15 @@ CONFIG_LOCK_PATH = os.path.join(PROJECT_DIR, "config", ".config.lock") -# TODO: Make config interface statically typed - - class ConfigInterface: @staticmethod @with_filelock(CONFIG_LOCK_PATH) def read() -> dict: + """ + Read the contents of the current config.json file. + The function will validate its integrity and raises + an AssertionError if the file is not valid. + """ file_is_valid, validation_exception = ConfigValidation.check_current_config_file() assert file_is_valid, str(validation_exception) with open(CONFIG_FILE_PATH, "r") as f: diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 6d23de90..46d42766 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -62,7 +62,11 @@ def get_config_file_schema(strict: boolean): return { "general": DICT_SCHEMA( { - "seconds_per_core_interval": {"type": "number", "min": 5, "max": 600}, + "seconds_per_core_interval": { + "type": "number", + "min": 5, + "max": 600, + }, "test_mode": {"type": "boolean"}, "station_id": {"type": "string"}, "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, @@ -84,7 +88,11 @@ def get_config_file_schema(strict: boolean): "executable_path": specs["file"], "learn_az_elev_path": specs["file"], "sun_intensity_path": specs["file"], - "motor_offset_threshold": {"type": "number", "min": -360, "max": 360}, + "motor_offset_threshold": { + "type": "number", + "min": -360, + "max": 360, + }, } ), "error_email": DICT_SCHEMA( @@ -97,7 +105,10 @@ def get_config_file_schema(strict: boolean): ), "measurement_decision": DICT_SCHEMA( { - "mode": {"type": "string", "allowed": ["automatic", "manual", "cli"]}, + "mode": { + "type": "string", + "allowed": ["automatic", "manual", "cli"], + }, "manual_decision_result": {"type": "boolean"}, "cli_decision_result": {"type": "boolean"}, } @@ -123,8 +134,16 @@ def get_config_file_schema(strict: boolean): { "camera_id": {"type": "integer", "min": 0, "max": 999999}, "evaluation_size": {"type": "integer", "min": 1, "max": 100}, - "seconds_per_interval": {"type": "number", "min": 5, "max": 600}, - "measurement_threshold": {"type": "number", "min": 0.1, "max": 1}, + "seconds_per_interval": { + "type": "number", + "min": 5, + "max": 600, + }, + "measurement_threshold": { + "type": "number", + "min": 0.1, + "max": 1, + }, "save_images": {"type": "boolean"}, } ), @@ -150,31 +169,48 @@ class CerberusException(Exception): class ConfigValidation: + """ + Functions used to validate config objects/files. + + All functions in here do not used filelocks because + higher level functions should do that. + """ + logging_handler = logger.error @staticmethod - def check( - content_object: dict, - partial_validation: bool = False, - ): + def check_dict( + content_object: dict, partial_validation: bool = False, validate_paths: bool = True + ) -> None: + """ + For a given config object, check its integrity. + + "partial_validation" means that keys can be missing. + This is used when updating the config via CLI, since + the errors given when updating should only concern + the passed properties). + + "validate_paths" means that paths (files and directories) + contained in the config object should be checked too - + whether they exist. This path-validation is skipped when + reading the config via CLI because the UI can and should + deal with invalid paths but not with an invalid structure. + + Does not return anything, only raises AssertionErrors. + """ validator = cerberus.Validator( - get_config_file_schema(strict=True), require_all=(not partial_validation) + get_config_file_schema(strict=validate_paths), + require_all=(not partial_validation), ) assert validator.validate(content_object), validator.errors # Add assertions that cannot be done with cerberus here @staticmethod - def check_structure(content_object: dict): + def check_current_config_file() -> Tuple[bool, str]: """ - Only validates whether the object has all required - keys and the correct value-datatypes. Not validations - like "file exists", etc. + Load the contents of the current config file and + validate its full integrity (with filepaths). """ - validator = cerberus.Validator(get_config_file_schema(strict=False), require_all=True) - assert validator.validate(content_object), validator.errors - - @staticmethod - def check_current_config_file() -> Tuple[bool, str]: try: assert os.path.isfile(CONFIG_FILE_PATH), "file does not exist" with open(CONFIG_FILE_PATH, "r") as f: @@ -183,21 +219,25 @@ def check_current_config_file() -> Tuple[bool, str]: except: raise AssertionError("file not in a valid json format") - ConfigValidation.check(content_object, partial_validation=False) + ConfigValidation.check_dict(content_object, partial_validation=False) return True, "" except Exception as e: ConfigValidation.logging_handler(f"Error in current config file: {e}") return False, e @staticmethod - def check_partial_config_string(content: str): + def check_partial_config_string(content: str) -> bool: + """ + For a given string, check whether its is a valid + partial config object. Used in CLI. + """ try: try: - content_object = json.loads(content) + content_dict = json.loads(content) except: raise AssertionError("content not in a valid json format") - ConfigValidation.check(content_object, partial_validation=True) + ConfigValidation.check_dict(content_dict, partial_validation=True) return True except Exception as e: ConfigValidation.logging_handler(f"Error in new config string: {e}") diff --git a/packages/core/utils/interfaces/os_interface.py b/packages/core/utils/interfaces/os_interface.py index 9e9a9a5d..9148d99f 100644 --- a/packages/core/utils/interfaces/os_interface.py +++ b/packages/core/utils/interfaces/os_interface.py @@ -1,3 +1,4 @@ +from typing import Literal import psutil import datetime @@ -12,7 +13,7 @@ class StorageError(Exception): class OSInterface: @staticmethod - def get_cpu_usage() -> list: + def get_cpu_usage() -> list[float]: """returns cpu_percent for all cores -> list [cpu1%, cpu2%,...]""" return psutil.cpu_percent(interval=1, percpu=True) @@ -35,16 +36,19 @@ def get_disk_space() -> float: def validate_disk_space(): """Raises an error if the diskspace is less than 10%""" if OSInterface.get_disk_space() > 90: - raise StorageError("Disk space is less than 10%. This is bad for the OS stability.") + raise StorageError( + "Disk space is less than 10%. This is bad for the OS stability." + ) + # TODO: function is not working as expected. Needs revision. @staticmethod - def get_connection_status(ip: str) -> str: - """Checks the ip connection. - Takes IP as input as str: i.e. 10.10.0.4 - and returns status i.e. ESTABLISHED, CLOSED, SYN_SENT - returns NO_INFO if IP is not found. + def get_connection_status( + ip: str, + ) -> Literal["ESTABLISHED", "CLOSED", "SYN_SENT", "NO_INFO"]: + """ + Takes ip address as input str: i.e. 10.10.0.4 + Checks the ip connection for that address. """ - # TODO: function is not working as expected. Needs revision. connections = psutil.net_connections(kind="inet4") @@ -59,12 +63,14 @@ def get_connection_status(ip: str) -> str: return "NO_INFO" @staticmethod - def get_system_battery() -> float: - """Returns system battery in percent as a float. - Returns 100.0 if no battery is in the device.""" - if psutil.sensors_battery(): + def get_system_battery() -> int: + """ + Returns system battery in percent as an integer (1-100). + Returns 100 if device has no battery. + """ + if psutil.sensors_battery() is not None: return psutil.sensors_battery().percent - return 100.0 + return 100 @staticmethod def validate_system_battery(): @@ -76,16 +82,28 @@ def validate_system_battery(): ) @staticmethod - def get_last_boot_time(): + def get_last_boot_time() -> str: """Returns last OS boot time.""" - return datetime.datetime.fromtimestamp(psutil.boot_time()).strftime("%Y-%m-%d %H:%M:%S") + return datetime.datetime.fromtimestamp(psutil.boot_time()).strftime( + "%Y-%m-%d %H:%M:%S" + ) @staticmethod - def get_process_status(process_name: str) -> str: - """Takes a process name "*.exe" and returns its OS process status: - “running”, “paused”, “start_pending”, “pause_pending”, “continue_pending”, - “stop_pending” or “stopped”. - returns "not_found" if process is not found. + def get_process_status( + process_name: str, + ) -> Literal[ + "running", + "paused", + "start_pending", + "pause_pending", + "continue_pending", + "stop_pending", + "stopped", + "not_found", + ]: + """ + Takes a process name "*.exe" and returns its OS process + status (see return types). """ for p in psutil.process_iter(): if p.name() == process_name: diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index f18a1927..f70d7b7e 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -82,6 +82,7 @@ def to_dict(self): return out +# used when initializing the state.json file EMPTY_PLC_STATE = PLCState( actors=PLCActorsState(), control=PLCControlState(), @@ -116,13 +117,21 @@ def __init__(self, config: dict): # CONNECTION def update_config(self, new_config: dict): + """ + Update the internally used config (executed at the) + beginning of enclosure-control's run-function. + + Reconnecting to PLC, when IP has changed. + """ if self.config["tum_plc"]["ip"] != new_config["tum_plc"]["ip"]: logger.debug("PLC ip has changed, reconnecting now") + self.disconnect() + self.connect() self.config = new_config def connect(self) -> None: """ - Connects to the PLC Snap7 + Connects to the PLC Snap7. Times out after 30 seconds. """ self.plc = snap7.client.Client() start_time = time.time() @@ -158,7 +167,7 @@ def disconnect(self) -> None: self.plc.destroy() logger.debug("Disconnected ungracefully from PLC.") - def _is_connected(self) -> bool: + def __is_connected(self) -> bool: """ Checks whether PLC is connected """ @@ -172,12 +181,21 @@ def is_responsive(self) -> bool: # return self._read_bool(self.specification.state.rain) def cover_is_closed(self) -> bool: + """ + Reads the single value "state.cover_closed" + """ return self._read_bool(self.specification.state.cover_closed) def reset_is_needed(self) -> bool: + """ + Reads the single value "state.reset_needed" + """ return self._read_bool(self.specification.state.reset_needed) def get_cover_angle(self) -> int: + """ + Reads the single value "actors.current_angle" + """ return self._read_int(self.specification.actors.current_angle) def read(self) -> PLCState: diff --git a/packages/core/utils/interfaces/plc_specification.py b/packages/core/utils/interfaces/plc_specification.py index 0f1328ea..f45be8d4 100644 --- a/packages/core/utils/interfaces/plc_specification.py +++ b/packages/core/utils/interfaces/plc_specification.py @@ -61,9 +61,7 @@ class PLCSpecification: connections: PLCConnectionsSpecification -# TODO: Add correct variables for PLC -# TODO: Think of a better naming for this class/file - +# these are the pins used on the TUM-PLC for all functionality PLC_SPECIFICATION_VERSIONS: dict[int, PLCSpecification] = { 1: PLCSpecification( actors=PLCActorsSpecification( @@ -116,23 +114,20 @@ class PLCSpecification: reset=[3, 4, 1, 5], sync_to_tracker=[8, 8, 1, 1], ), - sensors=PLCSensorsSpecification( - humidity=[8, 22, 2], - temperature=[8, 16, 2] - ), + sensors=PLCSensorsSpecification(humidity=[8, 22, 2], temperature=[8, 16, 2]), state=PLCStateSpecification( cover_closed=[6, 16, 1, 1], - motor_failed= None, + motor_failed=None, rain=[3, 0, 1, 0], reset_needed=[3, 2, 1, 2], ups_alert=[8, 13, 1, 6], ), power=PLCPowerSpecification( - camera=[8, 8, 1, 4], #K5 Relay - computer= None, - heater=[8, 12, 1, 7], #K3 Relay - router= None, #not allowed - spectrometer=[8, 8, 1, 2], #K4 Relay + camera=[8, 8, 1, 4], # K5 Relay + computer=None, + heater=[8, 12, 1, 7], # K3 Relay + router=None, # not allowed + spectrometer=[8, 8, 1, 2], # K4 Relay ), connections=PLCConnectionsSpecification( camera=None, diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 97bc528c..67c0fff8 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -1,5 +1,6 @@ import json import os +import shutil from packages.core.utils import with_filelock, update_dict_recursively from .plc_interface import EMPTY_PLC_STATE @@ -16,42 +17,68 @@ PERSISTENT_STATE_FILE_PATH = os.path.join(PROJECT_DIR, "logs", "persistent-state.json") -# TODO: Rename as CoreStateInterface -# TODO: Documentation +EMPTY_STATE_OBJECT = { + "helios_indicates_good_conditions": None, + "measurements_should_be_running": False, + "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), + "os_state": { + "cpu_usage": None, + "memory_usage": None, + "last_boot_time": None, + "filled_disk_space_fraction": None, + }, +} + +EMPTY_PERSISTENT_STATE_OBJECT = { + "active_opus_macro_id": None, + "current_exceptions": [], +} + +# TODO: Validate structure with cerberus (assertion) class StateInterface: @staticmethod @with_filelock(STATE_LOCK_PATH) - def initialize() -> None: - # possibly create runtime_data directory - if not os.path.exists(RUNTIME_DATA_PATH): - os.mkdir(RUNTIME_DATA_PATH) - - # write initial state.json file - new_state = { - "helios_indicates_good_conditions": None, - "measurements_should_be_running": False, - "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), - "os_state": { - "cpu_usage": None, - "memory_usage": None, - "last_boot_time": None, - "filled_disk_space_fraction": None, - }, + def initialize(): + """ + This will create two files: + + 1. runtime-data/state.json: { + "helios_indicates_good_conditions": ..., + "measurements_should_be_running": ..., + "enclosure_plc_readings": {...}, + "os_state": {...} + } + + 2. logs/persistent-state.json: { + "active_opus_macro_id": ..., + "current_exceptions": [] } + + The state.json file will be cleared with every restart + of PYRA Core. The persistent-state.json will only be + created, when it does not exist yet. + """ + + # clear runtime-data directory + if os.path.exists(RUNTIME_DATA_PATH): + shutil.rmtree(RUNTIME_DATA_PATH) + os.mkdir(RUNTIME_DATA_PATH) + + # create the state file with open(STATE_FILE_PATH, "w") as f: - json.dump(new_state, f, indent=4) + json.dump(EMPTY_STATE_OBJECT, f, indent=4) - # persistent state will not be overwritten with a restart of pyra-core + # possibly create the persistent state file if not os.path.isfile(PERSISTENT_STATE_FILE_PATH): - new_persistent_state = {"active_opus_macro_id": None, "current_exceptions": []} with open(PERSISTENT_STATE_FILE_PATH, "w") as f: - json.dump(new_persistent_state, f, indent=4) + json.dump(EMPTY_PERSISTENT_STATE_OBJECT, f, indent=4) @staticmethod @with_filelock(STATE_LOCK_PATH) def read(persistent: bool = False) -> dict: + """Read the (persistent) state file and return its content""" file_path = PERSISTENT_STATE_FILE_PATH if persistent else STATE_FILE_PATH with open(file_path, "r") as f: return json.load(f) @@ -59,6 +86,11 @@ def read(persistent: bool = False) -> dict: @staticmethod @with_filelock(STATE_LOCK_PATH) def update(update: dict, persistent: bool = False): + """ + Update the (persistent) state file and return its content. + The update object should only include the properties to be + changed in contrast to containing the whole file. + """ file_path = PERSISTENT_STATE_FILE_PATH if persistent else STATE_FILE_PATH with open(file_path, "r") as f: From ad52ff839697774fe46845be04c9dbb82a3e9193 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 11:52:47 +0200 Subject: [PATCH 027/132] Refactoring - documentation of interfaces (2) --- packages/core/main.py | 4 +- packages/core/modules/enclosure_control.py | 26 +- .../core/modules/measurement_conditions.py | 5 +- packages/core/threads/abstract_thread_base.py | 3 +- packages/core/utils/interfaces/__init__.py | 5 +- .../core/utils/interfaces/os_interface.py | 21 +- .../core/utils/interfaces/plc_interface.py | 244 ++++++++++-------- 7 files changed, 171 insertions(+), 137 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index a95bb06f..58b9fdfb 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -105,8 +105,8 @@ def run(): # check whether the two threads are (not) running # possibly (re)start each thread - helios_thread_instance.update_thread_state() - upload_thread_instance.update_thread_state() + helios_thread_instance.update_thread_state(config) + upload_thread_instance.update_thread_state(config) if config["general"]["test_mode"]: logger.info("pyra-core in test mode") diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 0fa9e957..cf3524b7 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -1,23 +1,23 @@ import time from snap7.exceptions import Snap7Exception -from packages.core.utils import StateInterface, Logger, Astronomy, PLCInterface, PLCError +from packages.core.utils import StateInterface, Logger, Astronomy, PLCInterface logger = Logger(origin="enclosure-control") -class CoverError(Exception): - pass - - -class MotorFailedError(Exception): - pass - - class EnclosureControl: """ https://buildmedia.readthedocs.org/media/pdf/python-snap7/latest/python-snap7.pdf """ + @staticmethod + class CoverError(Exception): + pass + + @staticmethod + class MotorFailedError(Exception): + pass + def __init__(self, initial_config: dict): self.config = initial_config self.initialized = False @@ -80,7 +80,9 @@ def run(self, new_config: dict) -> None: self.auto_set_power_spectrometer() if self.plc_state.state.motor_failed: - raise MotorFailedError("URGENT: stop all actions, check cover in person") + raise EnclosureControl.MotorFailedError( + "URGENT: stop all actions, check cover in person" + ) # check PLC ip connection (single ping) if self.plc_interface.is_responsive(): @@ -112,7 +114,7 @@ def run(self, new_config: dict) -> None: now = time.time() seconds_since_error_occured = now - self.last_plc_connection_time if seconds_since_error_occured > 600: - raise PLCError("Snap7Exception persisting for 10+ minutes") + raise PLCInterface.PLCError("Snap7Exception persisting for 10+ minutes") else: logger.info( f"Snap7Exception persisting for {round(seconds_since_error_occured/60, 2)}" @@ -161,7 +163,7 @@ def wait_for_cover_closing(self, throw_error=True) -> None: elapsed_time = time.time() - start_time if elapsed_time > 60: if throw_error: - raise CoverError("Enclosure cover might be stuck.") + raise EnclosureControl.CoverError("Enclosure cover might be stuck.") break def auto_set_power_spectrometer(self) -> None: diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index 1e841779..955a320b 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -1,11 +1,12 @@ import datetime +from typing import Literal from packages.core.utils import Astronomy, StateInterface, Logger logger = Logger(origin="measurement-conditions") -def get_times_from_tuples(triggers: any): - +# TODO: add type annotation +def get_times_from_tuples(triggers) -> tuple[datetime.time, datetime.time, datetime.time]: now = datetime.datetime.now() current_time = datetime.time(now.hour, now.minute, now.second) start_time = datetime.time(**triggers["start_time"]) diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index 29a9907a..b48e4b67 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -1,6 +1,5 @@ import abc import threading -from typing import Callable from packages.core.utils.functions.logger import Logger @@ -21,7 +20,7 @@ def update_thread_state(self, config: dict): self.config = config is_running = (self.__thread is not None) and self.__thread.is_alive() - should_be_running = self.should_be_running() + should_be_running = self.should_be_running(config) if should_be_running and (not is_running): self.__logger.info("Starting the thread") diff --git a/packages/core/utils/interfaces/__init__.py b/packages/core/utils/interfaces/__init__.py index f1b2d30b..b8210a9c 100644 --- a/packages/core/utils/interfaces/__init__.py +++ b/packages/core/utils/interfaces/__init__.py @@ -1,5 +1,4 @@ from .state_interface import StateInterface from .config_interface import ConfigInterface -from .config_validation import ConfigValidation -from .plc_interface import PLCInterface, PLCError -from .os_interface import OSInterface, LowEnergyError, StorageError +from .plc_interface import PLCInterface +from .os_interface import OSInterface diff --git a/packages/core/utils/interfaces/os_interface.py b/packages/core/utils/interfaces/os_interface.py index 9148d99f..1cbe2844 100644 --- a/packages/core/utils/interfaces/os_interface.py +++ b/packages/core/utils/interfaces/os_interface.py @@ -3,15 +3,15 @@ import datetime -class LowEnergyError(Exception): - pass - - -class StorageError(Exception): - pass +class OSInterface: + @staticmethod + class StorageError(Exception): + """Raised when storage is more than 90% full""" + @staticmethod + class LowEnergyError(Exception): + """Raised when battery is less than 20% full""" -class OSInterface: @staticmethod def get_cpu_usage() -> list[float]: """returns cpu_percent for all cores -> list [cpu1%, cpu2%,...]""" @@ -36,7 +36,7 @@ def get_disk_space() -> float: def validate_disk_space(): """Raises an error if the diskspace is less than 10%""" if OSInterface.get_disk_space() > 90: - raise StorageError( + raise OSInterface.StorageError( "Disk space is less than 10%. This is bad for the OS stability." ) @@ -77,7 +77,7 @@ def validate_system_battery(): """Raises LowEnergyError if system battery runs lower than 20%.""" if psutil.sensors_battery(): if psutil.sensors_battery().percent < 20.0: - raise LowEnergyError( + raise OSInterface.LowEnergyError( "The battery of the system is below 20%. Please check the power supply." ) @@ -110,3 +110,6 @@ def get_process_status( return p.status() return "not_found" + + +OSInterface.StorageError diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index f70d7b7e..0002fd1f 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -1,10 +1,9 @@ import dataclasses -import json import snap7 -from snap7.exceptions import Snap7Exception import time import os -from packages.core.utils import Logger, with_filelock, update_dict_recursively +from snap7.exceptions import Snap7Exception +from packages.core.utils import Logger, StateInterface from .plc_specification import PLC_SPECIFICATION_VERSIONS logger = Logger(origin="plc-interface") @@ -12,8 +11,7 @@ PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) -class PLCError(Exception): - pass +# TODO: possibly rewrite this using typeddict @dataclasses.dataclass @@ -93,28 +91,24 @@ def to_dict(self): ) -# This duplication is required in order top prevent circular imports -STATE_LOCK_PATH = os.path.join(PROJECT_DIR, "config", ".state.lock") -RUNTIME_DATA_PATH = os.path.join(PROJECT_DIR, "runtime-data") -STATE_FILE_PATH = os.path.join(RUNTIME_DATA_PATH, "state.json") - - -@with_filelock(STATE_LOCK_PATH) -def update_state_file(update: dict): - with open(STATE_FILE_PATH, "r") as f: - current_state = json.load(f) - - new_state = update_dict_recursively(current_state, update) - with open(STATE_FILE_PATH, "w") as f: - json.dump(new_state, f, indent=4) +class PLCInterface: + @staticmethod + class PLCError(Exception): + """ + Raised when updating a boolean value on the + plc did not change its internal value. + Can originate from: + * set_power_camera/_computer/_heater/_router/_spectrometer + * set_sync_to_tracker/_manual_control + * set_auto_temperature/_manual_temperature + """ -class PLCInterface: def __init__(self, config: dict): self.config = config self.specification = PLC_SPECIFICATION_VERSIONS[config["tum_plc"]["version"]] - # CONNECTION + # CONNECTION/CLASS MANAGEMENT def update_config(self, new_config: dict): """ @@ -167,18 +161,14 @@ def disconnect(self) -> None: self.plc.destroy() logger.debug("Disconnected ungracefully from PLC.") - def __is_connected(self) -> bool: - """ - Checks whether PLC is connected - """ - return self.plc.get_connected() - def is_responsive(self) -> bool: """Pings the PLC""" return os.system("ping -n 1 " + self.config["tum_plc"]["ip"]) == 0 - # def rain_is_detected(self) -> bool: - # return self._read_bool(self.specification.state.rain) + # DIRECT READ FUNCTIONS + + def rain_is_detected(self) -> bool: + return self._read_bool(self.specification.state.rain) def cover_is_closed(self) -> bool: """ @@ -196,13 +186,17 @@ def get_cover_angle(self) -> int: """ Reads the single value "actors.current_angle" """ - return self._read_int(self.specification.actors.current_angle) + return self.__read_int(self.specification.actors.current_angle) + + # BULK READ def read(self) -> PLCState: """ Read the whole state of the PLC """ + # TODO: self.plc.read_multi_vars() + plc_db_content = {} if self.config["tum_plc"]["version"] == 1: plc_db_size = {3: 6, 8: 26, 25: 10} @@ -211,7 +205,7 @@ def read(self) -> PLCState: for db_index, db_size in plc_db_size.items(): plc_db_content[db_index] = self.plc.db_read(db_index, 0, db_size) - self._sleep_while_cpu_is_busy() + self.__sleep_while_cpu_is_busy() logger.debug(f"new plc bulk read: {plc_db_content}") @@ -223,50 +217,59 @@ def _get_bool(spec: list[int] | None) -> bool: return None return snap7.util.get_bool(plc_db_content[spec[0]], spec[1], spec[3]) + s = self.specification + return PLCState( actors=PLCActorsState( - fan_speed=_get_int(self.specification.actors.fan_speed), - current_angle=_get_int(self.specification.actors.current_angle), + fan_speed=_get_int(s.actors.fan_speed), + current_angle=_get_int(s.actors.current_angle), ), control=PLCControlState( - auto_temp_mode=_get_bool(self.specification.control.auto_temp_mode), - manual_control=_get_bool(self.specification.control.manual_control), - manual_temp_mode=_get_bool(self.specification.control.manual_temp_mode), - sync_to_tracker=_get_bool(self.specification.control.sync_to_tracker), + auto_temp_mode=_get_bool(s.control.auto_temp_mode), + manual_control=_get_bool(s.control.manual_control), + manual_temp_mode=_get_bool(s.control.manual_temp_mode), + sync_to_tracker=_get_bool(s.control.sync_to_tracker), ), sensors=PLCSensorsState( - humidity=_get_int(self.specification.sensors.humidity), - temperature=_get_int(self.specification.sensors.temperature), + humidity=_get_int(s.sensors.humidity), + temperature=_get_int(s.sensors.temperature), ), state=PLCStateState( - cover_closed=_get_bool(self.specification.state.cover_closed), - motor_failed=_get_bool(self.specification.state.motor_failed), - rain=_get_bool(self.specification.state.rain), - reset_needed=_get_bool(self.specification.state.reset_needed), - ups_alert=_get_bool(self.specification.state.ups_alert), + cover_closed=_get_bool(s.state.cover_closed), + motor_failed=_get_bool(s.state.motor_failed), + rain=_get_bool(s.state.rain), + reset_needed=_get_bool(s.state.reset_needed), + ups_alert=_get_bool(s.state.ups_alert), ), power=PLCPowerState( - camera=_get_bool(self.specification.power.camera), - computer=_get_bool(self.specification.power.computer), - heater=_get_bool(self.specification.power.heater), - router=_get_bool(self.specification.power.router), - spectrometer=_get_bool(self.specification.power.spectrometer), + camera=_get_bool(s.power.camera), + computer=_get_bool(s.power.computer), + heater=_get_bool(s.power.heater), + router=_get_bool(s.power.router), + spectrometer=_get_bool(s.power.spectrometer), ), connections=PLCConnectionsState( - camera=_get_bool(self.specification.connections.camera), - computer=_get_bool(self.specification.connections.computer), - heater=_get_bool(self.specification.connections.heater), - router=_get_bool(self.specification.connections.router), - spectrometer=_get_bool(self.specification.connections.spectrometer), + camera=_get_bool(s.connections.camera), + computer=_get_bool(s.connections.computer), + heater=_get_bool(s.connections.heater), + router=_get_bool(s.connections.router), + spectrometer=_get_bool(s.connections.spectrometer), ), ) - def _sleep_while_cpu_is_busy(self) -> None: + # LOW LEVEL READ FUNCTIONS + + def __sleep_while_cpu_is_busy(self) -> None: + """ + Initially sleeps 0.5 seconds. The checks every 2 seconds + whether the CPU of the PLC is still busy. End function + if the CPU is idle again. + """ time.sleep(0.5) if str(self.plc.get_cpu_state()) == "S7CpuStatusRun": time.sleep(2) - def _read_int(self, action: list[int]) -> int: + def __read_int(self, action: list[int]) -> int: """Reads an INT value in the PLC database.""" assert len(action) == 3 db_number, start, size = action @@ -274,11 +277,11 @@ def _read_int(self, action: list[int]) -> int: msg = self.plc.db_read(db_number, start, size) value = snap7.util.get_int(msg, 0) - self._sleep_while_cpu_is_busy() + self.__sleep_while_cpu_is_busy() return value - def _write_int(self, action: list[int], value: int) -> None: + def __write_int(self, action: list[int], value: int) -> None: """Changes an INT value in the PLC database.""" assert len(action) == 3 db_number, start, size = action @@ -287,9 +290,9 @@ def _write_int(self, action: list[int], value: int) -> None: snap7.util.set_int(msg, 0, value) self.plc.db_write(db_number, start, msg) - self._sleep_while_cpu_is_busy() + self.__sleep_while_cpu_is_busy() - def _read_bool(self, action: list[int]) -> bool: + def __read_bool(self, action: list[int]) -> bool: """Reads a BOOL value in the PLC database.""" assert len(action) == 4 db_number, start, size, bool_index = action @@ -297,11 +300,11 @@ def _read_bool(self, action: list[int]) -> bool: msg = self.plc.db_read(db_number, start, size) value = snap7.util.get_bool(msg, 0, bool_index) - self._sleep_while_cpu_is_busy() + self.__sleep_while_cpu_is_busy() return value - def _write_bool(self, action: list[int], value: bool) -> None: + def __write_bool(self, action: list[int], value: bool) -> None: """Changes a BOOL value in the PLC database.""" assert len(action) == 4 db_number, start, size, bool_index = action @@ -310,80 +313,107 @@ def _write_bool(self, action: list[int], value: bool) -> None: snap7.util.set_bool(msg, 0, bool_index, value) self.plc.db_write(db_number, start, msg) - self._sleep_while_cpu_is_busy() + self.__sleep_while_cpu_is_busy() # PLC.POWER SETTERS + def __update_bool(self, new_state: bool, spec: list[int], partial_plc_state: dict) -> None: + """ + 1. low-level direct-write new_state to PLC according to spec + 2. low-level direct-read of plc's value according to spec + 3. raise PLCInterface.PLCError if value is different + 4. write update to StateInterface if update was successful + """ + self.__write_bool(spec, new_state) + if self.__read_bool(spec) != new_state: + raise PLCInterface.PLCError("PLC state did not change") + + # TODO: check whether this results in a circular import + StateInterface.update({"enclosure_plc_readings": partial_plc_state}) + def set_power_camera(self, new_state: bool) -> None: - self._write_bool(self.specification.power.camera, new_state) - if self._read_bool(self.specification.power.camera) != new_state: - raise PLCError("PLC state did not change") - update_state_file({"enclosure_plc_readings": {"power": {"camera": new_state}}}) + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.power.camera, + {"power": {"camera": new_state}}, + ) def set_power_computer(self, new_state: bool) -> None: - self._write_bool(self.specification.power.computer, new_state) - if self._read_bool(self.specification.power.computer) != new_state: - raise PLCError("PLC state did not change") - update_state_file({"enclosure_plc_readings": {"power": {"computer": new_state}}}) + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.power.computer, + {"power": {"computer": new_state}}, + ) def set_power_heater(self, new_state: bool) -> None: - self._write_bool(self.specification.power.heater, new_state) - if self._read_bool(self.specification.power.heater) != new_state: - raise PLCError("PLC state did not change") - update_state_file({"enclosure_plc_readings": {"power": {"heater": new_state}}}) + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.power.heater, + {"power": {"heater": new_state}}, + ) - def set_power_router(self, new_state: bool) -> None: - self._write_bool(self.specification.power.router, new_state) - if self._read_bool(self.specification.power.router) != new_state: - raise PLCError("PLC state did not change") - update_state_file({"enclosure_plc_readings": {"power": {"router": new_state}}}) + def set__power_router(self, new_state: bool) -> None: + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.power.router, + {"power": {"router": new_state}}, + ) def set_power_spectrometer(self, new_state: bool) -> None: - self._write_bool(self.specification.power.spectrometer, new_state) - if self._read_bool(self.specification.power.spectrometer) != new_state: - raise PLCError("PLC state did not change") - update_state_file({"enclosure_plc_readings": {"power": {"spectrometer": new_state}}}) + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.power.spectrometer, + {"power": {"spectrometer": new_state}}, + ) # PLC.CONTROL SETTERS def set_sync_to_tracker(self, new_state: bool) -> None: - self._write_bool(self.specification.control.sync_to_tracker, new_state) - if self._read_bool(self.specification.control.sync_to_tracker) != new_state: - raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"sync_to_tracker": new_state}}} + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.control.sync_to_tracker, + {"control": {"sync_to_tracker": new_state}}, ) def set_manual_control(self, new_state: bool) -> None: - self._write_bool(self.specification.control.manual_control, new_state) - if self._read_bool(self.specification.control.manual_control) != new_state: - raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"manual_control": new_state}}} + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.control.manual_control, + {"control": {"manual_control": new_state}}, ) def set_auto_temperature(self, new_state: bool) -> None: - self._write_bool(self.specification.control.auto_temp_mode, new_state) - if self._read_bool(self.specification.control.auto_temp_mode) != new_state: - raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"auto_temp_mode": new_state}}} + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.control.auto_temp_mode, + {"control": {"auto_temp_mode": new_state}}, ) def set_manual_temperature(self, new_state: bool) -> None: - self._write_bool(self.specification.control.manual_temp_mode, new_state) - if self._read_bool(self.specification.control.manual_temp_mode) != new_state: - raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"manual_temp_mode": new_state}}} + """Raises PLCInterface.PLCError, if value hasn't been changed""" + self.__update_bool( + new_state, + self.specification.control.manual_temp_mode, + {"control": {"manual_temp_mode": new_state}}, ) def reset(self) -> None: + """Does not check, whether the value has been changed""" if self.config["tum_plc"]["version"] == 1: - self._write_bool(self.specification.control.reset, False) + self.__write_bool(self.specification.control.reset, False) else: - self._write_bool(self.specification.control.reset, True) + self.__write_bool(self.specification.control.reset, True) # PLC.ACTORS SETTERS + def set_cover_angle(self, value: int) -> None: - self._write_int(self.specification.actors.move_cover, value) + """Does not check, whether the value has been changed""" + self.__write_int(self.specification.actors.move_cover, value) From f4aa78f0fcdfbf1b5716153f2b9cc5b48e866de9 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 12:14:43 +0200 Subject: [PATCH 028/132] #96 (1) - add dependencies --- poetry.lock | 69 +++++++++++++++++++++++++++++++++++- pyproject.toml | 4 +++ scripts/run_type_analysis.sh | 1 + 3 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 scripts/run_type_analysis.sh diff --git a/poetry.lock b/poetry.lock index 3d616389..5dccf65d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -201,6 +201,24 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "mypy" +version = "0.971" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -451,10 +469,53 @@ category = "dev" optional = false python-versions = ">=3.7" +[[package]] +name = "types-cryptography" +version = "3.3.22" +description = "Typing stubs for cryptography" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-invoke" +version = "1.7.3" +description = "Typing stubs for invoke" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-paramiko" +version = "2.11.3" +description = "Typing stubs for paramiko" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +types-cryptography = "*" + +[[package]] +name = "types-psutil" +version = "5.9.5" +description = "Typing stubs for psutil" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" +optional = false +python-versions = ">=3.7" + [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "abeac95184e7c7f394e0a51f1820148898111773675c16d809f2e1a7f5064db4" +content-hash = "41558403cd5eef318cc8bcf8d8350a6ead45476d6e12caf5cd0eff76a5a5ae2b" [metadata.files] astropy = [ @@ -607,6 +668,7 @@ jdcal = [ {file = "jdcal-1.4.1-py2.py3-none-any.whl", hash = "sha256:1abf1305fce18b4e8aa248cf8fe0c56ce2032392bc64bbd61b5dff2a19ec8bba"}, {file = "jdcal-1.4.1.tar.gz", hash = "sha256:472872e096eb8df219c23f2689fc336668bdb43d194094b5cc1707e1640acfc8"}, ] +mypy = [] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -810,3 +872,8 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +types-cryptography = [] +types-invoke = [] +types-paramiko = [] +types-psutil = [] +typing-extensions = [] diff --git a/pyproject.toml b/pyproject.toml index 0baf5802..adbaee0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,10 @@ fabric = "^2.7.1" pytest = "7.1.2" black = "22.3.0" deepdiff = "^5.8.1" +mypy = "^0.971" +types-paramiko = "^2.11.3" +types-invoke = "^1.7.3" +types-psutil = "^5.9.5" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/scripts/run_type_analysis.sh b/scripts/run_type_analysis.sh new file mode 100644 index 00000000..a0a9d46f --- /dev/null +++ b/scripts/run_type_analysis.sh @@ -0,0 +1 @@ +python -m mypy run-pyra-core.py \ No newline at end of file From d565fd87dfbc28281023fa4c5b18e335cd36dd70 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 12:15:13 +0200 Subject: [PATCH 029/132] #96 (2) - ignore incomplete library type sets --- packages/core/modules/enclosure_control.py | 2 +- packages/core/modules/sun_tracking.py | 2 +- packages/core/threads/helios_thread.py | 2 +- packages/core/threads/upload_thread.py | 2 +- packages/core/utils/functions/astronomy.py | 6 +-- .../utils/interfaces/config_validation.py | 4 +- .../core/utils/interfaces/plc_interface.py | 47 ++++++++++--------- 7 files changed, 33 insertions(+), 32 deletions(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index cf3524b7..e3892019 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -1,5 +1,5 @@ import time -from snap7.exceptions import Snap7Exception +from snap7.exceptions import Snap7Exception # type: ignore from packages.core.utils import StateInterface, Logger, Astronomy, PLCInterface logger = Logger(origin="enclosure-control") diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index ad65b17b..51723a66 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -5,7 +5,7 @@ import os import sys import time -import jdcal +import jdcal # type: ignore import datetime from packages.core.utils import StateInterface, Logger, OSInterface diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index ff585513..49d833f0 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -1,7 +1,7 @@ from datetime import datetime import os import time -import cv2 as cv +import cv2 as cv # type: ignore import numpy as np from packages.core.utils import ( ConfigInterface, diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 5fb0b354..d67aee73 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -6,7 +6,7 @@ import invoke import paramiko import time -import fabric +import fabric # type: ignore import re from packages.core.utils import ( ConfigInterface, diff --git a/packages/core/utils/functions/astronomy.py b/packages/core/utils/functions/astronomy.py index a19523e3..3cf80416 100644 --- a/packages/core/utils/functions/astronomy.py +++ b/packages/core/utils/functions/astronomy.py @@ -1,6 +1,6 @@ -import astropy.coordinates as astropy_coordinates -import astropy.time as astropy_time -import astropy.units as astropy_units +import astropy.coordinates as astropy_coordinates # type: ignore +import astropy.time as astropy_time # type: ignore +import astropy.units as astropy_units # type: ignore class Astronomy: diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 46d42766..ab999e23 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -1,8 +1,8 @@ import json import os -from typing import Any, Tuple +from typing import Tuple from xmlrpc.client import boolean -import cerberus +import cerberus # type: ignore from packages.core.utils import Logger dir = os.path.dirname diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 0002fd1f..98e9e85f 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -1,8 +1,9 @@ import dataclasses -import snap7 +from typing import Optional +import snap7 # type: ignore import time import os -from snap7.exceptions import Snap7Exception +from snap7.exceptions import Snap7Exception # type: ignore from packages.core.utils import Logger, StateInterface from .plc_specification import PLC_SPECIFICATION_VERSIONS @@ -16,16 +17,16 @@ @dataclasses.dataclass class PLCActorsState: - current_angle: int = None - fan_speed: int = None + current_angle: Optional[int] = None + fan_speed: Optional[int] = None @dataclasses.dataclass class PLCControlState: - auto_temp_mode: bool = None - manual_control: bool = None - manual_temp_mode: bool = None - sync_to_tracker: bool = None + auto_temp_mode: Optional[bool] = None + manual_control: Optional[bool] = None + manual_temp_mode: Optional[bool] = None + sync_to_tracker: Optional[bool] = None @dataclasses.dataclass @@ -36,29 +37,29 @@ class PLCSensorsState: @dataclasses.dataclass class PLCStateState: - cover_closed: bool = None - motor_failed: bool | None = None - rain: bool = None - reset_needed: bool = None - ups_alert: bool = None + cover_closed: Optional[bool] = None + motor_failed: Optional[bool] = None + rain: Optional[bool] = None + reset_needed: Optional[bool] = None + ups_alert: Optional[bool] = None @dataclasses.dataclass class PLCPowerState: - camera: bool = None - computer: bool = None - heater: bool = None - router: bool = None - spectrometer: bool = None + camera: Optional[bool] = None + computer: Optional[bool] = None + heater: Optional[bool] = None + router: Optional[bool] = None + spectrometer: Optional[bool] = None @dataclasses.dataclass class PLCConnectionsState: - camera: bool | None = None - computer: bool = None - heater: bool = None - router: bool = None - spectrometer: bool | None = None + camera: Optional[bool] = None + computer: Optional[bool] = None + heater: Optional[bool] = None + router: Optional[bool] = None + spectrometer: Optional[bool] = None @dataclasses.dataclass From 4553a9b5445addff2718745a9e4361aac57f5a38 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 13:14:46 +0200 Subject: [PATCH 030/132] #96 (3) - fix typing issues --- packages/core/threads/abstract_thread_base.py | 16 ++++---- packages/core/threads/helios_thread.py | 38 ++++++++++++------- packages/core/threads/upload_thread.py | 8 ++-- packages/core/utils/__init__.py | 6 +-- packages/core/utils/functions/astronomy.py | 17 ++++++--- .../core/utils/functions/image_processing.py | 2 +- packages/core/utils/functions/logger.py | 1 + .../utils/interfaces/config_validation.py | 4 +- .../core/utils/interfaces/os_interface.py | 31 +++++++++------ .../core/utils/interfaces/plc_interface.py | 20 ++++++---- .../utils/interfaces/plc_specification.py | 15 +++++--- .../core/utils/interfaces/state_interface.py | 4 +- 12 files changed, 100 insertions(+), 62 deletions(-) diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index b48e4b67..9a4cfbf8 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -1,5 +1,6 @@ import abc import threading +from typing import Optional from packages.core.utils.functions.logger import Logger @@ -8,19 +9,20 @@ class AbstractThreadBase(abc.ABC): An abstract base class for thread classes used in PYRA """ - def __init__(self, logger_origin: str): - self.__thread = None - self.__logger = Logger(origin=logger_origin) + def __init__(self, config: dict, logger_origin: str): + self.__thread: Optional[threading.Thread] = None + self.__logger: Logger = Logger(origin=logger_origin) + self.config: dict = config - def update_thread_state(self, config: dict): + def update_thread_state(self, new_config: dict): """ Make sure that the thread loop is (not) running, based on config.upload """ - self.config = config + self.config = new_config is_running = (self.__thread is not None) and self.__thread.is_alive() - should_be_running = self.should_be_running(config) + should_be_running = self.should_be_running() if should_be_running and (not is_running): self.__logger.info("Starting the thread") @@ -33,7 +35,7 @@ def update_thread_state(self, config: dict): self.__thread = None @abc.abstractmethod - def should_be_running(self, config: dict) -> bool: + def should_be_running(self) -> bool: """Should the thread be running? (based on config.upload)""" pass diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 49d833f0..fd9792f2 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -1,6 +1,7 @@ from datetime import datetime import os import time +from typing import Any, Literal, Optional import cv2 as cv # type: ignore import numpy as np from packages.core.utils import ( @@ -27,21 +28,23 @@ class CameraError(Exception): class _Helios: - cam = None + cam: Optional[Any] = None current_exposure = None - last_autoexposure_time = 0 + last_autoexposure_time = 0.0 available_exposures = None @staticmethod def init(camera_id: int, retries: int = 5): # TODO: Why is this necessary? _Helios.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) + assert _Helios.cam is not None _Helios.cam.release() for _ in range(retries): _Helios.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) - if _Helios.cam.isOpened(): + assert _Helios.cam is not None + if _Helios.cam.isOpened(): if _Helios.available_exposures is None: _Helios.available_exposures = _Helios.get_available_exposures() logger.debug( @@ -83,6 +86,8 @@ def get_available_exposures() -> list[int]: the camera exposure to each value. Return a list of integers that the camera accepted as an exposure setting. """ + assert _Helios.cam is not None, "camera is not initialized yet" + possible_values = [] for exposure in range(-20, 20): _Helios.cam.set(cv.CAP_PROP_EXPOSURE, exposure) @@ -106,6 +111,8 @@ def update_camera_settings( available depends on the camera model. However, this function will throw an AssertionError, when the value could not be changed. """ + assert _Helios.cam is not None, "camera is not initialized yet" + properties = { "width": (cv.CAP_PROP_FRAME_WIDTH, width), "height": (cv.CAP_PROP_FRAME_HEIGHT, height), @@ -141,6 +148,7 @@ def take_image(retries: int = 10, trow_away_white_images: bool = True) -> cv.Mat except when specified not to (used in autoexposure). """ assert _Helios.cam is not None, "camera is not initialized yet" + if not _Helios.cam.isOpened(): raise CameraError("camera is not open") for _ in range(retries + 1): @@ -181,7 +189,7 @@ def adjust_exposure(): _Helios.current_exposure = new_exposure @staticmethod - def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: + def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[1, 0]: """ For a given frame, determine whether the conditions are good (direct sunlight, returns 1) or bad (diffuse light @@ -217,10 +225,11 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> int: ) # determine how many pixels inside the circle are made up of "edge pixels" - edge_fraction = round((np.sum(edges_only_dilated) / 255) / np.sum(binary_mask), 6) - - # TODO: the values below should be adjusted by looking at the ifgs directly - status = 1 if (edge_fraction > 0.02) else 0 + pixels_inside_circle: int = np.sum(binary_mask) + status: Literal[1, 0] = 0 + if pixels_inside_circle != 0: + edge_fraction = round((np.sum(edges_only_dilated) / 255) / pixels_inside_circle, 6) + status = 1 if (edge_fraction > 0.02) else 0 logger.debug(f"exposure = {_Helios.current_exposure}, edge_fraction = {edge_fraction}") @@ -276,12 +285,12 @@ class HeliosThread(AbstractThreadBase): def __init__(self): super().__init__(logger) - def should_be_running(self, config: dict) -> bool: + def should_be_running(self) -> bool: """Should the thread be running? (based on config.upload)""" return ( - (not config["general"]["test_mode"]) - and (config["helios"] is not None) - and (config["measurement_triggers"]["consider_helios"]) + (not self.config["general"]["test_mode"]) + and (self.config["helios"] is not None) + and (self.config["measurement_triggers"]["consider_helios"]) ) # TODO: Update tests/headless mode to comply with new class structure @@ -294,6 +303,7 @@ def main(self, infinite_loop: bool = True, headless: bool = False): if headless: logger = Logger(origin="helios", just_print=True) _CONFIG = ConfigInterface.read() + self.config = _CONFIG status_history = RingList(_CONFIG["helios"]["evaluation_size"]) current_state = None @@ -302,7 +312,7 @@ def main(self, infinite_loop: bool = True, headless: bool = False): while True: # Check for termination - if not self.should_be_running(_CONFIG): + if not self.should_be_running(): return try: @@ -332,7 +342,7 @@ def main(self, infinite_loop: bool = True, headless: bool = False): StateInterface.update({"helios_indicates_good_conditions": False}) current_state = None # reinit for next day - _Helios.reinit_settings() + _Helios.deinit() time.sleep(300) continue diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index d67aee73..4d9a984e 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -292,12 +292,12 @@ class UploadThread(AbstractThreadBase): def __init__(self): super().__init__(logger) - def should_be_running(self, config: dict) -> bool: + def should_be_running(self) -> bool: """Should the thread be running? (based on config.upload)""" return ( - (not config["general"]["test_mode"]) - and (config["upload"] is not None) - and (config["upload"]["is_active"]) + (not self.config["general"]["test_mode"]) + and (self.config["upload"] is not None) + and (self.config["upload"]["is_active"]) ) def main(self): diff --git a/packages/core/utils/__init__.py b/packages/core/utils/__init__.py index 05ee3ed0..6ba1c2fa 100644 --- a/packages/core/utils/__init__.py +++ b/packages/core/utils/__init__.py @@ -8,7 +8,7 @@ from .decorators import with_filelock -from .interfaces import ConfigInterface, ConfigValidation +from .interfaces import ConfigInterface from .interfaces import StateInterface -from .interfaces import PLCInterface, PLCError -from .interfaces import OSInterface, LowEnergyError, StorageError +from .interfaces import PLCInterface +from .interfaces import OSInterface diff --git a/packages/core/utils/functions/astronomy.py b/packages/core/utils/functions/astronomy.py index 3cf80416..92798ba3 100644 --- a/packages/core/utils/functions/astronomy.py +++ b/packages/core/utils/functions/astronomy.py @@ -1,10 +1,14 @@ +from typing import Optional import astropy.coordinates as astropy_coordinates # type: ignore import astropy.time as astropy_time # type: ignore import astropy.units as astropy_units # type: ignore +# TODO: pass config via functions instea of indirectly +# more code but way simpler + class Astronomy: - CONFIG: dict = None + CONFIG: Optional[dict] = None units = astropy_units @staticmethod @@ -22,7 +26,7 @@ def get_current_sun_elevation(): return sun_angle_deg @staticmethod - def __get_location_from_camtracker_config() -> tuple[float]: + def __get_location_from_camtracker_config() -> tuple[float, float, float]: """Reads the config.txt file of the CamTracker application to receive the latest tracker position. @@ -30,6 +34,8 @@ def __get_location_from_camtracker_config() -> tuple[float]: tracker_position as a python list """ + assert Astronomy.CONFIG is not None, "astronomy has no config yet" + with open(Astronomy.CONFIG["camtracker"]["config_path"], "r") as f: _lines = f.readlines() @@ -42,9 +48,10 @@ def __get_location_from_camtracker_config() -> tuple[float]: assert _marker_line_index is not None, "Camtracker config file is not valid" # (latitude, longitude, altitude) - return tuple( - [float(_lines[_marker_line_index + n].replace("\n", "")) for n in [1, 2, 3]] - ) + lat = float(_lines[_marker_line_index + 1].strip()) + lon = float(_lines[_marker_line_index + 2].strip()) + alt = float(_lines[_marker_line_index + 3].strip()) + return (lat, lon, alt) @staticmethod def __get_astropy_location(): diff --git a/packages/core/utils/functions/image_processing.py b/packages/core/utils/functions/image_processing.py index afd52480..a393da24 100644 --- a/packages/core/utils/functions/image_processing.py +++ b/packages/core/utils/functions/image_processing.py @@ -1,4 +1,4 @@ -import cv2 as cv +import cv2 as cv # type: ignore import numpy as np diff --git a/packages/core/utils/functions/logger.py b/packages/core/utils/functions/logger.py index f0e85d58..8decccc6 100644 --- a/packages/core/utils/functions/logger.py +++ b/packages/core/utils/functions/logger.py @@ -72,6 +72,7 @@ def _write_log_line(self, level: str, message: str): Logger.archive(keep_last_hour=True) Logger.last_archive_time = now + @staticmethod def archive(keep_last_hour=False): with filelock.FileLock(LOG_FILES_LOCK): with open(DEBUG_LOG_FILE, "r") as f: diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index ab999e23..61a0b4e7 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -206,7 +206,7 @@ def check_dict( # Add assertions that cannot be done with cerberus here @staticmethod - def check_current_config_file() -> Tuple[bool, str]: + def check_current_config_file() -> Tuple[bool, Exception]: """ Load the contents of the current config file and validate its full integrity (with filepaths). @@ -220,7 +220,7 @@ def check_current_config_file() -> Tuple[bool, str]: raise AssertionError("file not in a valid json format") ConfigValidation.check_dict(content_object, partial_validation=False) - return True, "" + return True, Exception("") except Exception as e: ConfigValidation.logging_handler(f"Error in current config file: {e}") return False, e diff --git a/packages/core/utils/interfaces/os_interface.py b/packages/core/utils/interfaces/os_interface.py index 1cbe2844..519646cf 100644 --- a/packages/core/utils/interfaces/os_interface.py +++ b/packages/core/utils/interfaces/os_interface.py @@ -1,3 +1,4 @@ +from ctypes import Union from typing import Literal import psutil import datetime @@ -15,7 +16,7 @@ class LowEnergyError(Exception): @staticmethod def get_cpu_usage() -> list[float]: """returns cpu_percent for all cores -> list [cpu1%, cpu2%,...]""" - return psutil.cpu_percent(interval=1, percpu=True) + return psutil.cpu_percent(interval=1, percpu=True) # type: ignore @staticmethod def get_memory_usage() -> float: @@ -44,7 +45,7 @@ def validate_disk_space(): @staticmethod def get_connection_status( ip: str, - ) -> Literal["ESTABLISHED", "CLOSED", "SYN_SENT", "NO_INFO"]: + ) -> str: """ Takes ip address as input str: i.e. 10.10.0.4 Checks the ip connection for that address. @@ -68,8 +69,9 @@ def get_system_battery() -> int: Returns system battery in percent as an integer (1-100). Returns 100 if device has no battery. """ - if psutil.sensors_battery() is not None: - return psutil.sensors_battery().percent + battery_state = psutil.sensors_battery() + if battery_state is not None: + return battery_state.percent return 100 @staticmethod @@ -93,13 +95,20 @@ def get_process_status( process_name: str, ) -> Literal[ "running", - "paused", - "start_pending", - "pause_pending", - "continue_pending", - "stop_pending", + "sleeping", + "disk-sleep", "stopped", - "not_found", + "tracing-stop", + "zombie", + "dead", + "wake-kill", + "waking", + "idle", + "locked", + "waiting", + "suspended", + "parked", + "not-found", ]: """ Takes a process name "*.exe" and returns its OS process @@ -109,7 +118,7 @@ def get_process_status( if p.name() == process_name: return p.status() - return "not_found" + return "not-found" OSInterface.StorageError diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 98e9e85f..5c057edd 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -31,8 +31,8 @@ class PLCControlState: @dataclasses.dataclass class PLCSensorsState: - humidity: int = None - temperature: int = None + humidity: Optional[int] = None + temperature: Optional[int] = None @dataclasses.dataclass @@ -169,19 +169,19 @@ def is_responsive(self) -> bool: # DIRECT READ FUNCTIONS def rain_is_detected(self) -> bool: - return self._read_bool(self.specification.state.rain) + return self.__read_bool(self.specification.state.rain) def cover_is_closed(self) -> bool: """ Reads the single value "state.cover_closed" """ - return self._read_bool(self.specification.state.cover_closed) + return self.__read_bool(self.specification.state.cover_closed) def reset_is_needed(self) -> bool: """ Reads the single value "state.reset_needed" """ - return self._read_bool(self.specification.state.reset_needed) + return self.__read_bool(self.specification.state.reset_needed) def get_cover_angle(self) -> int: """ @@ -210,10 +210,12 @@ def read(self) -> PLCState: logger.debug(f"new plc bulk read: {plc_db_content}") - def _get_int(spec: list[int]) -> int: + def _get_int(spec: Optional[list[int]]) -> Optional[int]: + if spec is None: + return None return snap7.util.get_int(plc_db_content[spec[0]], spec[1]) - def _get_bool(spec: list[int] | None) -> bool: + def _get_bool(spec: Optional[list[int]]) -> Optional[bool]: if spec is None: return None return snap7.util.get_bool(plc_db_content[spec[0]], spec[1], spec[3]) @@ -342,6 +344,7 @@ def set_power_camera(self, new_state: bool) -> None: def set_power_computer(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" + assert self.specification.power.computer is not None self.__update_bool( new_state, self.specification.power.computer, @@ -356,8 +359,9 @@ def set_power_heater(self, new_state: bool) -> None: {"power": {"heater": new_state}}, ) - def set__power_router(self, new_state: bool) -> None: + def set_power_router(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" + assert self.specification.power.router is not None self.__update_bool( new_state, self.specification.power.router, diff --git a/packages/core/utils/interfaces/plc_specification.py b/packages/core/utils/interfaces/plc_specification.py index f45be8d4..936c4370 100644 --- a/packages/core/utils/interfaces/plc_specification.py +++ b/packages/core/utils/interfaces/plc_specification.py @@ -1,4 +1,9 @@ from dataclasses import dataclass +from typing import Optional + + +# TODO: use typeddict +# TODO: use tuples (3 ints vs 4 ints) @dataclass @@ -27,7 +32,7 @@ class PLCSensorsSpecification: @dataclass class PLCStateSpecification: cover_closed: list[int] - motor_failed: list[int] | None + motor_failed: Optional[list[int]] rain: list[int] reset_needed: list[int] ups_alert: list[int] @@ -36,19 +41,19 @@ class PLCStateSpecification: @dataclass class PLCPowerSpecification: camera: list[int] - computer: list[int] + computer: Optional[list[int]] heater: list[int] - router: list[int] + router: Optional[list[int]] spectrometer: list[int] @dataclass class PLCConnectionsSpecification: - camera: list[int] | None + camera: Optional[list[int]] computer: list[int] heater: list[int] router: list[int] - spectrometer: list[int] | None + spectrometer: Optional[list[int]] @dataclass diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 67c0fff8..6b56f585 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -17,7 +17,7 @@ PERSISTENT_STATE_FILE_PATH = os.path.join(PROJECT_DIR, "logs", "persistent-state.json") -EMPTY_STATE_OBJECT = { +EMPTY_STATE_OBJECT: dict = { "helios_indicates_good_conditions": None, "measurements_should_be_running": False, "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), @@ -29,7 +29,7 @@ }, } -EMPTY_PERSISTENT_STATE_OBJECT = { +EMPTY_PERSISTENT_STATE_OBJECT: dict = { "active_opus_macro_id": None, "current_exceptions": [], } From bf51a8c57e8be177f878295e9392cd9102091b7d Mon Sep 17 00:00:00 2001 From: patrickaigner Date: Wed, 17 Aug 2022 13:51:37 +0200 Subject: [PATCH 031/132] #91 Added detailed code documentation to "enclosure_control.py" --- packages/core/modules/enclosure_control.py | 87 ++++++++++++++++------ 1 file changed, 65 insertions(+), 22 deletions(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 0fa9e957..5cf1edf9 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -30,6 +30,7 @@ def __init__(self, initial_config: dict): return def _initialize(self): + """Initializes the default PLC settings at startup or activation in config.""" self.plc_interface = PLCInterface(self.config) self.plc_interface.connect() self.plc_interface.set_auto_temperature(True) @@ -39,18 +40,31 @@ def _initialize(self): self.initialized = True def run(self, new_config: dict) -> None: + """Called in every cycle of the main loop. + Updates enclosure state based on the current automation status. + """ self.config = new_config + # Skips the rest of run if module not activated in config if self.config["tum_plc"] is None: logger.debug("Skipping EnclosureControl without a TUM PLC") return + # Allows to run Pyra-4 without measurement system hardware present if self.config["general"]["test_mode"]: logger.debug("Skipping EnclosureControl in test mode") return logger.info("Running EnclosureControl") + # Check for current measurement status + self.measurements_should_be_running = StateInterface.read()[ + "measurements_should_be_running" + ] + + # Updates the current loop to the latest config. + # Performs a connect to the PLC for the duration of this loop. + # Initializes if first call of the module. try: if not self.initialized: self._initialize() @@ -60,53 +74,55 @@ def run(self, new_config: dict) -> None: # TODO: possibly end function if plc is not connected - # get the latest PLC interface state and update with current config + # Reads and returns the latest PLC database states try: self.plc_state = self.plc_interface.read() except Snap7Exception: logger.warning("Could not read PLC state in this loop.") - # read current state of actors and sensors in enclosure + # Push the latest readout of the PLC state to the StateInterface logger.info("New continuous readings.") StateInterface.update({"enclosure_plc_readings": self.plc_state.to_dict()}) + # Skip writing to the PLC as the user took over control from the automation if self.config["tum_plc"]["controlled_by_user"]: - logger.debug( - "Skipping EnclosureControl because enclosure is controlled by user" - ) + logger.debug("Skipping EnclosureControl because enclosure is controlled by user") return - # dawn/dusk detection: powerup/down spectrometer + # Dawn/dusk detection: powerup/down spectrometer at a defined sun angle self.auto_set_power_spectrometer() + # Check for critial error: Motor Failed Flag in PLC + # In case of present motor failed flag the cover might not be closed in bad weather conditions. + # Potentially putting the measurement instrument at risk. if self.plc_state.state.motor_failed: raise MotorFailedError("URGENT: stop all actions, check cover in person") - # check PLC ip connection (single ping) + # Check PLC ip connection (single ping). if self.plc_interface.is_responsive(): logger.debug("Successful ping to PLC.") else: logger.warning("Could not ping PLC.") - # check for automation state flank changes - self.measurements_should_be_running = StateInterface.read()[ - "measurements_should_be_running" - ] + # Syncs the cover to the current automation status present self.sync_cover_to_measurement_status() - # save the automation status for the next run - self.last_cycle_automation_status = self.measurements_should_be_running - - # verify cover position for every loop. Close cover if supposed to be closed. + # Verify functions will handle desync caused by a user taking over control in previous loops + # Verify cover position for every loop. Close cover if supposed to be closed. self.verify_cover_position() - # verify that sync_to_cover status is still synced with measurement status + # Verify that sync_to_cover status is still synced with measurement status self.verify_cover_sync() - # disconnect from PLC + # Save the automation status for the next run. This allows for flank detection from previous completed loops. + self.last_cycle_automation_status = self.measurements_should_be_running + + # Disconnect from PLC self.plc_interface.disconnect() self.last_plc_connection_time = time.time() + # Allows for PLC connection downtime of 10 minutes before an error is raised. + # Allows PLC connection to heal itself. except Snap7Exception as e: logger.exception(e) now = time.time() @@ -121,7 +137,13 @@ def run(self, new_config: dict) -> None: # PLC.ACTORS SETTERS - def move_cover(self, value) -> None: + def move_cover(self, value: int) -> None: + """Moves the cover attached on top of the enclosure. The cover is moved by a electrical + motor controlled by the PLC. The cover functions as weather protection for the measurement + instrument. In case of bad weather the PLC takes over control and closes the cover itself. + A movement of the cover during bad weather conditions shall not be allowed as instrument + saefty is priotized higher than maximization of overall measurement uptime. + """ logger.debug(f"Received request to move cover to position {value} degrees.") # rain check before moving cover. PLC will deny cover requests during rain anyway @@ -133,6 +155,12 @@ def move_cover(self, value) -> None: self.plc_interface.set_manual_control(False) def force_cover_close(self) -> None: + """Emergency option to call to ensure that the cover is closed immediately. Accounts for + possible blocking conditions caused by the PLC internals: + 1. Reset needed + 2. Sync to tracker still active + 3. Manual control not active + """ if not self.initialized: self._initialize() @@ -144,11 +172,11 @@ def force_cover_close(self) -> None: self.plc_interface.set_cover_angle(0) self.plc_interface.set_manual_control(False) - def wait_for_cover_closing(self, throw_error=True) -> None: - """Waits steps of 5s for the enclosure cover to close. + def wait_for_cover_closing(self, throw_error: bool = True) -> None: + """Validates the progress of a cover closing call. Continues when cover is closed. + Validation is done every 5s with a maximum waiting time of 60s. - Raises the custom error CoverError if clover doesn't close in a given - period of time. + Raises the custom error CoverError if clover doesn't close in time. """ start_time = time.time() @@ -185,6 +213,10 @@ def auto_set_power_spectrometer(self) -> None: logger.info("Powering down the spectrometer.") def sync_cover_to_measurement_status(self) -> None: + """Checks for flank changes in parameter measurement_should_be_running. + Positive flank: Set sync_cover flag in PLC to start matching the Camtracker mirror position. + Negative flank: Remove sync_cover flag in PLC and move cover to position 0. + """ if self.last_cycle_automation_status != self.measurements_should_be_running: if self.measurements_should_be_running: # flank change 0 -> 1: set cover mode: sync to tracker @@ -196,12 +228,20 @@ def sync_cover_to_measurement_status(self) -> None: logger.info("Syncing Cover to Tracker.") else: # flank change 1 -> 0: remove cover mode: sync to tracker, close cover + if self.plc_state.state.reset_needed: + self.plc_interface.reset() + time.sleep(10) self.plc_interface.set_sync_to_tracker(False) self.move_cover(0) logger.info("Closing Cover.") self.wait_for_cover_closing(throw_error=False) def verify_cover_position(self) -> None: + """Verifies that the cover is closed when measurements are currently not set to be running. + Closed the cover in case of a mismatch. + + This functions allows to detect desync caused by previous user controlled decisions. It + also functions as a failsafe to ensure weather protection of the instrument.""" if (not self.measurements_should_be_running) & (not self.plc_state.state.rain): if not self.plc_state.state.cover_closed: logger.info("Cover is still open. Trying to close again.") @@ -209,6 +249,9 @@ def verify_cover_position(self) -> None: self.wait_for_cover_closing() def verify_cover_sync(self) -> None: + """Syncs the current cover_sync flag in the PLC with the present measurement status. + + This functions allows to detect desync caused by previous user controlled decisions.""" if self.measurements_should_be_running & (not self.plc_state.control.sync_to_tracker): logger.debug("Set sync to tracker to True to match measurement status.") self.plc_interface.set_sync_to_tracker(True) From dce4d23e4517a97d72c1006cbc123deb3ca11fdf Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 14:04:04 +0200 Subject: [PATCH 032/132] #96 (4) - fix strict typing issues --- packages/core/__init__.py | 1 + packages/core/main.py | 13 ++-- packages/core/modules/enclosure_control.py | 6 +- .../core/modules/measurement_conditions.py | 2 +- packages/core/modules/opus_measurement.py | 57 +++++++-------- packages/core/threads/helios_thread.py | 34 ++++----- packages/core/threads/upload_thread.py | 73 ++++++++----------- packages/core/utils/__init__.py | 2 + .../utils/interfaces/config_validation.py | 30 ++++---- packages/core/utils/types.py | 15 ++++ 10 files changed, 122 insertions(+), 111 deletions(-) create mode 100644 packages/core/utils/types.py diff --git a/packages/core/__init__.py b/packages/core/__init__.py index 913c063d..bd3c66f5 100644 --- a/packages/core/__init__.py +++ b/packages/core/__init__.py @@ -1,2 +1,3 @@ +from .utils import types from .utils.interfaces import config_validation from . import modules, main, threads diff --git a/packages/core/main.py b/packages/core/main.py index 58b9fdfb..d73eecce 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -1,5 +1,6 @@ import os import time +from typing import Any, Optional from packages.core import modules, threads from packages.core.utils import ( ConfigInterface, @@ -12,8 +13,8 @@ def update_exception_state( - config: dict, current_exceptions: list[str], new_exception: Exception -): + config: dict, current_exceptions: list[str], new_exception: Optional[Exception] +) -> list[str]: """ Take a list of current_exceptions (all exceptions that are present from the last mainloop iteration, possibly empty) and @@ -52,7 +53,7 @@ def update_exception_state( return current_exceptions -def run(): +def run() -> None: """ The mainloop of PYRA Core. This function will loop infinitely. It loads the config file, validates it runs every module one by @@ -74,7 +75,7 @@ def run(): # these modules will be executed one by one in each # mainloop iteration - mainloop_modules = [ + mainloop_modules: Any = [ modules.measurement_conditions.MeasurementConditions(config), modules.enclosure_control.EnclosureControl(config), modules.sun_tracking.SunTracking(config), @@ -86,8 +87,8 @@ def run(): # dedicated mainloop in a parallel thread if the # respective service is configured. The threads itself # load the config periodically and stop themselves - helios_thread_instance = threads.helios_thread.HeliosThread() - upload_thread_instance = threads.upload_thread.UploadThread() + helios_thread_instance = threads.helios_thread.HeliosThread(config) + upload_thread_instance = threads.upload_thread.UploadThread(config) current_exceptions = StateInterface.read(persistent=True)["current_exceptions"] diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index e3892019..f0239d34 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -29,7 +29,7 @@ def __init__(self, initial_config: dict): logger.debug("Skipping EnclosureControl without a TUM PLC") return - def _initialize(self): + def __initialize(self) -> None: self.plc_interface = PLCInterface(self.config) self.plc_interface.connect() self.plc_interface.set_auto_temperature(True) @@ -53,7 +53,7 @@ def run(self, new_config: dict) -> None: try: if not self.initialized: - self._initialize() + self.__initialize() else: self.plc_interface.update_config(self.config) self.plc_interface.connect() @@ -136,7 +136,7 @@ def move_cover(self, value) -> None: def force_cover_close(self) -> None: if not self.initialized: - self._initialize() + self.__initialize() if self.plc_state.state.reset_needed: self.plc_interface.reset() diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index 955a320b..6e1a00ea 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -18,7 +18,7 @@ class MeasurementConditions: def __init__(self, initial_config: dict): self._CONFIG = initial_config - def run(self, new_config: dict): + def run(self, new_config: dict) -> None: self._CONFIG = new_config if self._CONFIG["general"]["test_mode"]: logger.debug("Skipping MeasurementConditions in test mode") diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index 637e1b44..b2934f75 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -27,12 +27,12 @@ def __init__(self, initial_config: dict): self._CONFIG = initial_config self.initialized = False self.current_experiment = self._CONFIG["opus"]["experiment_path"] - if self._CONFIG["general"]["test_mode"]: + if self._CONFIG["general"]["test_mode"] or (sys.platform != "win32"): return - self._initialize() + self.__initialize() - def _initialize(self): + def __initialize(self) -> None: # note: dde servers talk to dde servers self.server = dde.CreateServer() self.server.Create("Client") @@ -40,7 +40,7 @@ def _initialize(self): self.last_cycle_automation_status = 0 self.initialized = True - def run(self, new_config: dict): + def run(self, new_config: dict) -> None: self._CONFIG = new_config if self._CONFIG["general"]["test_mode"]: logger.debug("Skipping OpusMeasurement in test mode") @@ -56,7 +56,7 @@ def run(self, new_config: dict): return if not self.initialized: - self._initialize() + self.__initialize() # start or stops opus.exe depending on sun angle self.automated_process_handling() @@ -70,7 +70,9 @@ def run(self, new_config: dict): logger.info("EM27 seems to be disconnected.") # check for automation state flank changes - measurements_should_be_running = StateInterface.read()["measurements_should_be_running"] + measurements_should_be_running = StateInterface.read()[ + "measurements_should_be_running" + ] if self.last_cycle_automation_status != measurements_should_be_running: if measurements_should_be_running: # flank change 0 -> 1: load experiment, start macro @@ -84,7 +86,7 @@ def run(self, new_config: dict): # save the automation status for the next run self.last_cycle_automation_status = measurements_should_be_running - def __connect_to_dde_opus(self): + def __connect_to_dde_opus(self) -> None: try: self.conversation.ConnectTo("OPUS", "OPUS/System") logger.info("Connected to OPUS DDE Server.") @@ -92,7 +94,7 @@ def __connect_to_dde_opus(self): logger.info("Could not connect to OPUS DDE Server.") @property - def __test_dde_connection(self): + def __test_dde_connection(self) -> bool: """Tests the DDE connection. Tries to reinitialize the DDE socket if connection test fails. """ @@ -112,9 +114,9 @@ def __test_dde_connection(self): self.__connect_to_dde_opus() # retest DDE connection - return self.conversation.Connected() == 1 + return self.conversation.Connected() == 1 # type: ignore - def load_experiment(self): + def load_experiment(self) -> None: """Loads a new experiment in OPUS over DDE connection.""" self.__connect_to_dde_opus() experiment_path = self._CONFIG["opus"]["experiment_path"] @@ -134,7 +136,7 @@ def load_experiment(self): logger.info("Could not load OPUS experiment as expected.") """ - def start_macro(self): + def start_macro(self) -> None: """Starts a new macro in OPUS over DDE connection.""" self.__connect_to_dde_opus() if not self.__test_dde_connection: @@ -155,7 +157,7 @@ def start_macro(self): logger.info(f"Could not start OPUS macro with id: {active_macro_id} as expected.") """ - def stop_macro(self): + def stop_macro(self) -> None: """Stops the currently running macro in OPUS over DDE connection.""" self.__connect_to_dde_opus() macro_path = os.path.basename(self._CONFIG["opus"]["macro_path"]) @@ -174,7 +176,7 @@ def stop_macro(self): logger.info(f"Could not stop OPUS macro with id: {active_macro_id} as expected.") """ - def close_opus(self): + def close_opus(self) -> None: """Closes OPUS via DDE.""" self.__connect_to_dde_opus() @@ -191,17 +193,17 @@ def close_opus(self): logger.info("No response for OPUS.exe close request.") """ - def __shutdown_dde_server(self): + def __shutdown_dde_server(self) -> None: """Note the underlying DDE object (ie, Server, Topics and Items) are not cleaned up by this call. """ self.server.Shutdown() - def __destroy_dde_server(self): + def __destroy_dde_server(self) -> None: """Destroys the underlying C++ object.""" self.server.Destroy() - def __is_em27_responsive(self): + def __is_em27_responsive(self) -> bool: """Pings the EM27 and returns: True -> Connected @@ -209,7 +211,7 @@ def __is_em27_responsive(self): response = os.system("ping -n 1 " + self._CONFIG["em27"]["ip"]) return response == 0 - def start_opus(self): + def start_opus(self) -> None: """Uses os.startfile() to start up OPUS This simulates a user click on the opus.exe. """ @@ -227,7 +229,7 @@ def start_opus(self): show_cmd=2, ) - def opus_application_running(self): + def opus_application_running(self) -> bool: """Checks if OPUS is already running by identifying the window. False if Application is currently not running on OS @@ -249,7 +251,7 @@ def opus_application_running(self): except win32ui.error: return False - def test_setup(self): + def test_setup(self) -> None: if sys.platform != "win32": return @@ -274,18 +276,15 @@ def test_setup(self): self.stop_macro() - def low_sun_angle_present(self): + def low_sun_angle_present(self) -> bool: """OPUS closes at the end of the day to start up fresh the next day.""" - # sleep while sun angle is too low - if Astronomy.get_current_sun_elevation().is_within_bounds( + sun_angle_is_low: bool = Astronomy.get_current_sun_elevation().is_within_bounds( None, self._CONFIG["general"]["min_sun_elevation"] * Astronomy.units.deg - ): - return True - else: - return False + ) + return sun_angle_is_low - def automated_process_handling(self): + def automated_process_handling(self) -> None: """Start OPUS.exe if not running and sun angle conditions satisfied. Shuts down OPUS.exe if running and sun angle conditions not satisfied. """ @@ -310,7 +309,7 @@ def automated_process_handling(self): time.sleep(5) self.close_opus() - def wait_for_opus_startup(self): + def wait_for_opus_startup(self) -> None: """Checks for OPUS to be running. Breaks out of the loop after a defined time.""" start_time = time.time() while True: @@ -321,7 +320,7 @@ def wait_for_opus_startup(self): if time.time() - start_time > 60: break - def check_for_experiment_change(self): + def check_for_experiment_change(self) -> None: """Compares the experiment in the config with the current active experiment. To reload an experiment during an active macro the macro needs to be stopped first. """ diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index fd9792f2..01802dcc 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -34,7 +34,7 @@ class _Helios: available_exposures = None @staticmethod - def init(camera_id: int, retries: int = 5): + def init(camera_id: int, retries: int = 5) -> None: # TODO: Why is this necessary? _Helios.cam = cv.VideoCapture(camera_id, cv.CAP_DSHOW) assert _Helios.cam is not None @@ -71,7 +71,7 @@ def init(camera_id: int, retries: int = 5): raise CameraError("could not initialize camera") @staticmethod - def deinit(): + def deinit() -> None: """ Possibly release the camera (linked over cv2.VideoCapture) """ @@ -98,14 +98,14 @@ def get_available_exposures() -> list[int]: @staticmethod def update_camera_settings( - exposure: int = None, - brightness: int = None, - contrast: int = None, - saturation: int = None, - gain: int = None, - width: int = None, - height: int = None, - ): + exposure: Optional[int] = None, + brightness: Optional[int] = None, + contrast: Optional[int] = None, + saturation: Optional[int] = None, + gain: Optional[int] = None, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> None: """ Update the settings of the connected camera. Which settings are available depends on the camera model. However, this function will @@ -161,7 +161,7 @@ def take_image(retries: int = 10, trow_away_white_images: bool = True) -> cv.Mat raise CameraError("could not take image") @staticmethod - def adjust_exposure(): + def adjust_exposure() -> None: """ This function will loop over all available exposures and take one image for each exposure. Then it sets exposure @@ -189,7 +189,7 @@ def adjust_exposure(): _Helios.current_exposure = new_exposure @staticmethod - def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[1, 0]: + def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: """ For a given frame, determine whether the conditions are good (direct sunlight, returns 1) or bad (diffuse light @@ -246,7 +246,7 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[1, 0]: return status @staticmethod - def run(save_image: bool) -> int: + def run(save_image: bool) -> Literal[0, 1]: """ Take an image and evaluate the sun conditions. @@ -282,8 +282,8 @@ class HeliosThread(AbstractThreadBase): to the StateInterface. """ - def __init__(self): - super().__init__(logger) + def __init__(self, config: dict): + super().__init__(config, "helios") def should_be_running(self) -> bool: """Should the thread be running? (based on config.upload)""" @@ -294,7 +294,7 @@ def should_be_running(self) -> bool: ) # TODO: Update tests/headless mode to comply with new class structure - def main(self, infinite_loop: bool = True, headless: bool = False): + def main(self, infinite_loop: bool = True, headless: bool = False) -> None: """Main entrypoint of the thread""" global logger global _CONFIG @@ -367,7 +367,7 @@ def main(self, infinite_loop: bool = True, headless: bool = False): continue # append sun status to status history - status_history.append(0 if (status == -1) else status) + status_history.append(status) logger.debug( f"New Helios status: {status}. Current history: {status_history.get()}" ) diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 4d9a984e..d20f6e34 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -3,15 +3,13 @@ import json import os import shutil +from typing import Optional import invoke import paramiko import time import fabric # type: ignore import re -from packages.core.utils import ( - ConfigInterface, - Logger, -) +from packages.core.utils import ConfigInterface, Logger, types from .abstract_thread_base import AbstractThreadBase logger = Logger(origin="upload") @@ -50,36 +48,27 @@ def __init__(self, date_string: str, config: dict): config["upload"]["dst_directory"] ), f"remote {config['upload']['dst_directory']} is not a directory" - self.meta_content: dict | None = None + self.meta_content: types.UploadMetaDict = { + "complete": False, + "fileList": [], + "createdTime": round(time.time(), 3), + "lastModifiedTime": round(time.time(), 3), + } self.remove_src_after_upload: bool = config["upload"]["remove_src_after_upload"] - def __initialize_remote_dir(self): + def __initialize_remote_dir(self) -> None: """ If the respective dst directory does not exist, create the directory and add a fresh upload-meta.json - file to it looking like this: { - "complete": false, - "fileList": [], - "createdTime": , - "lastModifiedTime": - } + file to it. """ if not self.transfer_process.is_remote_dir(self.dst_dir_path): self.connection.run(f"mkdir {self.dst_dir_path}") with open(self.src_meta_path, "w") as f: - json.dump( - { - "complete": False, - "fileList": [], - "createdTime": round(time.time(), 3), - "lastModifiedTime": round(time.time(), 3), - }, - f, - indent=4, - ) + json.dump(self.meta_content, f, indent=4) self.transfer_process.put(self.src_meta_path, self.dst_meta_path) - def __get_remote_directory_checksum(self): + def __get_remote_directory_checksum(self) -> str: """ Calculate checksum over all files listed in the upload-meta.json file. The same logic will run @@ -111,7 +100,7 @@ def __get_remote_directory_checksum(self): f"could not execute remote command on server ({remote_command}): {e}" ) - def __get_local_directory_checksum(self): + def __get_local_directory_checksum(self) -> str: """ Calculate checksum over all files listed in the upload-meta.json file. The same logic will run @@ -128,7 +117,7 @@ def __get_local_directory_checksum(self): # stdout is a checksum, otherwise it is a traceback return hasher.hexdigest() - def __fetch_meta(self): + def __fetch_meta(self) -> None: """ Download the remote meta file to the local src directory """ @@ -138,26 +127,25 @@ def __fetch_meta(self): try: assert os.path.isfile(self.src_meta_path) with open(self.src_meta_path, "r") as f: + # TODO: validate this with cerberus self.meta_content = json.load(f) except (AssertionError, json.JSONDecodeError) as e: raise InvalidUploadState(str(e)) - def __update_meta(self, new_meta_content_partial: dict): + def __update_meta(self, new_meta_content_partial: types.PartialUploadMetaDict) -> None: """ Update the local upload-meta.json file and overwrite the meta file on the server """ - new_meta_content = { - **self.meta_content, - **new_meta_content_partial, - "lastModifiedTime": round(time.time(), 3), - } + assert self.meta_content is not None + self.meta_content.update(new_meta_content_partial) + self.meta_content.update({"lastModifiedTime": round(time.time(), 3)}) + with open(self.src_meta_path, "w") as f: - json.dump(new_meta_content, f, indent=4) + json.dump(self.meta_content, f, indent=4) self.transfer_process.put(self.src_meta_path, self.dst_meta_path) - self.meta_content = new_meta_content - def run(self): + def run(self) -> None: """ Perform the whole upload process for a given directory. @@ -216,7 +204,7 @@ def run(self): except KeyError: upload_is_finished = True - if (self.meta_content["fileList"] % 25 == 0) or upload_is_finished: + if (len(self.meta_content["fileList"]) % 25 == 0) or upload_is_finished: self.__update_meta({"fileList": self.meta_content["fileList"]}) # raise an exception if the checksums do not match @@ -239,12 +227,12 @@ def run(self): else: logger.debug("skipping removal of source") - def teardown(self): + def teardown(self) -> None: """close ssh and scp connection""" self.connection.close() @staticmethod - def __is_valid_date(date_string: str): + def __is_valid_date(date_string: str) -> bool: try: day_ending = datetime.strptime(f"{date_string} 23:59:59", "%Y%m%d %H:%M:%S") seconds_since_day_ending = (datetime.now() - day_ending).total_seconds() @@ -289,8 +277,8 @@ class UploadThread(AbstractThreadBase): 📁 ... """ - def __init__(self): - super().__init__(logger) + def __init__(self, config: dict): + super().__init__(config, "upload") def should_be_running(self) -> bool: """Should the thread be running? (based on config.upload)""" @@ -300,10 +288,11 @@ def should_be_running(self) -> bool: and (self.config["upload"]["is_active"]) ) - def main(self): + def main(self) -> None: """Main entrypoint of the thread""" while True: config = ConfigInterface.read() + self.config = config src_dates_strings = DirectoryUploadClient.get_directories_to_be_uploaded( config["upload"]["src_directory"] @@ -311,11 +300,11 @@ def main(self): for src_date_string in src_dates_strings: # check for termination before processing each directory - if not self.should_be_running(config): + if not self.should_be_running(): return try: - client = DirectoryUploadClient(src_date_string) + client = DirectoryUploadClient(src_date_string, config) client.run() except TimeoutError as e: logger.error(f"could not reach host (uploading {src_date_string}): {e}") diff --git a/packages/core/utils/__init__.py b/packages/core/utils/__init__.py index 6ba1c2fa..9bcca9be 100644 --- a/packages/core/utils/__init__.py +++ b/packages/core/utils/__init__.py @@ -12,3 +12,5 @@ from .interfaces import StateInterface from .interfaces import PLCInterface from .interfaces import OSInterface + +from . import types diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 61a0b4e7..9d0039ac 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -10,12 +10,12 @@ CONFIG_FILE_PATH = os.path.join(PROJECT_DIR, "config", "config.json") -def _directory_path_exists(field, value, error): +def _directory_path_exists(field, value, error): # type: ignore if not os.path.isfile(value): error(field, "Path has to be an existing file") -def _file_path_exists(field, value, error): +def _file_path_exists(field, value, error): # type: ignore if not os.path.isfile(value): error(field, "Path has to be an existing file") @@ -29,8 +29,12 @@ def _is_valid_ip_adress(field, value, error): error(field, "String has to be a valid IPv4 address") -DICT_SCHEMA = lambda s: {"type": "dict", "schema": s} -NULLABLE_DICT_SCHEMA = lambda s: {"type": "dict", "schema": s, "nullable": True} +def get_dict_schema(s: dict) -> dict: + return {"type": "dict", "schema": s} + + +def get_nullable_dict_schema(s: dict) -> dict: + return {"type": "dict", "schema": s, "nullable": True} def get_config_file_schema(strict: boolean): @@ -60,7 +64,7 @@ def get_config_file_schema(strict: boolean): specs["directory"]["check_with"] = _directory_path_exists return { - "general": DICT_SCHEMA( + "general": get_dict_schema( { "seconds_per_core_interval": { "type": "number", @@ -72,7 +76,7 @@ def get_config_file_schema(strict: boolean): "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, } ), - "opus": DICT_SCHEMA( + "opus": get_dict_schema( { "em27_ip": specs["ip"], "executable_path": specs["file"], @@ -82,7 +86,7 @@ def get_config_file_schema(strict: boolean): "password": {"type": "string"}, } ), - "camtracker": DICT_SCHEMA( + "camtracker": get_dict_schema( { "config_path": specs["file"], "executable_path": specs["file"], @@ -95,7 +99,7 @@ def get_config_file_schema(strict: boolean): }, } ), - "error_email": DICT_SCHEMA( + "error_email": get_dict_schema( { "sender_address": {"type": "string"}, "sender_password": {"type": "string"}, @@ -103,7 +107,7 @@ def get_config_file_schema(strict: boolean): "recipients": {"type": "string"}, } ), - "measurement_decision": DICT_SCHEMA( + "measurement_decision": get_dict_schema( { "mode": { "type": "string", @@ -113,7 +117,7 @@ def get_config_file_schema(strict: boolean): "cli_decision_result": {"type": "boolean"}, } ), - "measurement_triggers": DICT_SCHEMA( + "measurement_triggers": get_dict_schema( { "consider_time": {"type": "boolean"}, "consider_sun_elevation": {"type": "boolean"}, @@ -123,14 +127,14 @@ def get_config_file_schema(strict: boolean): "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, } ), - "tum_plc": NULLABLE_DICT_SCHEMA( + "tum_plc": get_nullable_dict_schema( { "ip": specs["ip"], "version": {"type": "integer", "allowed": [1, 2]}, "controlled_by_user": {"type": "boolean"}, } ), - "helios": NULLABLE_DICT_SCHEMA( + "helios": get_nullable_dict_schema( { "camera_id": {"type": "integer", "min": 0, "max": 999999}, "evaluation_size": {"type": "integer", "min": 1, "max": 100}, @@ -147,7 +151,7 @@ def get_config_file_schema(strict: boolean): "save_images": {"type": "boolean"}, } ), - "upload": NULLABLE_DICT_SCHEMA( + "upload": get_nullable_dict_schema( { "is_active": {"type": "boolean"}, "host": specs["ip"], diff --git a/packages/core/utils/types.py b/packages/core/utils/types.py new file mode 100644 index 00000000..76316d9d --- /dev/null +++ b/packages/core/utils/types.py @@ -0,0 +1,15 @@ +from typing import TypedDict + + +class UploadMetaDict(TypedDict): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + +class PartialUploadMetaDict(TypedDict, total=False): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float From 667af339c6e7df2f21a980bcfc462cdd4a863ab4 Mon Sep 17 00:00:00 2001 From: patrickaigner Date: Wed, 17 Aug 2022 14:51:37 +0200 Subject: [PATCH 033/132] Moved the link to the Snap7 API to the correct place. --- .../core/utils/interfaces/plc_interface.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index f18a1927..f5df5292 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -109,6 +109,8 @@ def update_state_file(update: dict): class PLCInterface: + """https://buildmedia.readthedocs.org/media/pdf/python-snap7/latest/python-snap7.pdf""" + def __init__(self, config: dict): self.config = config self.specification = PLC_SPECIFICATION_VERSIONS[config["tum_plc"]["version"]] @@ -332,33 +334,25 @@ def set_sync_to_tracker(self, new_state: bool) -> None: self._write_bool(self.specification.control.sync_to_tracker, new_state) if self._read_bool(self.specification.control.sync_to_tracker) != new_state: raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"sync_to_tracker": new_state}}} - ) + update_state_file({"enclosure_plc_readings": {"control": {"sync_to_tracker": new_state}}}) def set_manual_control(self, new_state: bool) -> None: self._write_bool(self.specification.control.manual_control, new_state) if self._read_bool(self.specification.control.manual_control) != new_state: raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"manual_control": new_state}}} - ) + update_state_file({"enclosure_plc_readings": {"control": {"manual_control": new_state}}}) def set_auto_temperature(self, new_state: bool) -> None: self._write_bool(self.specification.control.auto_temp_mode, new_state) if self._read_bool(self.specification.control.auto_temp_mode) != new_state: raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"auto_temp_mode": new_state}}} - ) + update_state_file({"enclosure_plc_readings": {"control": {"auto_temp_mode": new_state}}}) def set_manual_temperature(self, new_state: bool) -> None: self._write_bool(self.specification.control.manual_temp_mode, new_state) if self._read_bool(self.specification.control.manual_temp_mode) != new_state: raise PLCError("PLC state did not change") - update_state_file( - {"enclosure_plc_readings": {"control": {"manual_temp_mode": new_state}}} - ) + update_state_file({"enclosure_plc_readings": {"control": {"manual_temp_mode": new_state}}}) def reset(self) -> None: if self.config["tum_plc"]["version"] == 1: From 1a4ffe6acf590efb3d9b39bc5563072f1e2453ac Mon Sep 17 00:00:00 2001 From: patrickaigner Date: Wed, 17 Aug 2022 15:02:35 +0200 Subject: [PATCH 034/132] #91 Added a class docstring to EnclosureControl --- packages/core/modules/enclosure_control.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 5cf1edf9..47f9d41d 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -15,7 +15,18 @@ class MotorFailedError(Exception): class EnclosureControl: """ - https://buildmedia.readthedocs.org/media/pdf/python-snap7/latest/python-snap7.pdf + EnclosureControl allows to interact with TUM made weather protected enclosures that allow a + 24/7 deployment of the FTIR spectrometer EM27/Sun in the field. The class takes the latest + decision from measurement_conditions.py (StateInterface: measurements_should_be_running) and + communicates with the enclosure's built in Siemens S7 PLC to read and write parameters to its + database (PLCInterface). Additionally, it powers down the spectrometer during dusk to extend + the overall spectrometer lifetime. During dawn, it powers up the spectrometer to prepare and + warm it up for the next measurement day. At initialization, the PLC is set to control the + ambient enclosure temperature in automatic mode. + During flank changes of measurements_should_be_running it either tells the enclosure to + open up the cover to allow direct sunlight to hit the CamTracker mirrors or close the cover + to protect the instrument. Instrument protection from bad weather conditions is always + prioritised over a slight maximization of measurement uptime. """ def __init__(self, initial_config: dict): From 2e10e5abc2778204973af3ef0f3dd3b920bbc692 Mon Sep 17 00:00:00 2001 From: patrickaigner Date: Wed, 17 Aug 2022 15:37:08 +0200 Subject: [PATCH 035/132] #91: Added extensive documentation to MeasurementConditions --- .../core/modules/measurement_conditions.py | 53 ++++++++++++++----- 1 file changed, 40 insertions(+), 13 deletions(-) diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index 1e841779..a9a4911c 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -3,8 +3,9 @@ logger = Logger(origin="measurement-conditions") - +# TODO add typing for return val def get_times_from_tuples(triggers: any): + """Returns time tuples (now, start_time, stop_time) for the automatic mode trigger: Time.""" now = datetime.datetime.now() current_time = datetime.time(now.hour, now.minute, now.second) @@ -14,11 +15,34 @@ def get_times_from_tuples(triggers: any): class MeasurementConditions: + """MeasurementConditions allows operation in three different modes: Manual, Automatic, Manual, + and CLI. Whenever a decision is made the parameter measurements_should_be_running in + StateInterface is updated. + + In Manual mode, the user has full control over whether measurements should be active. The user- + controlled state can be controlled by the Pyra UI. + + In Automatic mode, three different triggers are considered: Sun Elevation, Time, and Helios + State. These triggers may also be active in any combination at the same time. Measurements are + only set to be running if all triggers agree, while measurements will be set to be not active + if at least one of the active triggers decides to stop measurements. + + In CLI mode, triggers from external sources can be considered. This option is available for + custom-built systems or sensors not part of Pyra-4. It is also possible in this mode to move the + measurement control to remote systems i.e. by ssh.""" + def __init__(self, initial_config: dict): self._CONFIG = initial_config def run(self, new_config: dict): + """Called in every cycle of the main loop. + Updates StateInterface: measurements_should_be_running based on the selected mode, triggers + and present conditions.""" + + # Read the latest config self._CONFIG = new_config + + # Skip rest of the function if test mode is active if self._CONFIG["general"]["test_mode"]: logger.debug("Skipping MeasurementConditions in test mode") return @@ -27,6 +51,7 @@ def run(self, new_config: dict): decision = self._CONFIG["measurement_decision"] logger.debug(f"Decision mode for measurements is: {decision['mode']}.") + # Selection and evaluation of the current set measurement mode if decision["mode"] == "manual": measurements_should_be_running = decision["manual_decision_result"] if decision["mode"] == "cli": @@ -42,18 +67,20 @@ def run(self, new_config: dict): "start-measurements" if measurements_should_be_running else "stop-measurements" ) - logger.info( - f"Measurements should be running is set to: {measurements_should_be_running}." - ) - StateInterface.update( - {"measurements_should_be_running": measurements_should_be_running} - ) + logger.info(f"Measurements should be running is set to: {measurements_should_be_running}.") + # Update of the StateInterface with the latest measurement decision + StateInterface.update({"measurements_should_be_running": measurements_should_be_running}) def _get_automatic_decision(self) -> bool: + """Evaluates the activated automatic mode triggers (Sun Angle, Time, Helios). + Reads the config to consider activated measurement triggers. Evaluates active measurement + triggers and combines their states by logical conjunction. + """ triggers = self._CONFIG["measurement_triggers"] if self._CONFIG["helios"] is None: triggers["consider_helios"] = False + # If not triggers are considered during automatic mode return False if not any( [ triggers["consider_sun_elevation"], @@ -63,21 +90,21 @@ def _get_automatic_decision(self) -> bool: ): return False + # Evaluate sun elevation if trigger is active if triggers["consider_sun_elevation"]: logger.info("Sun elevation as a trigger is considered.") current_sun_elevation = Astronomy.get_current_sun_elevation() min_sun_elevation = max( self._CONFIG["general"]["min_sun_elevation"], triggers["min_sun_elevation"] ) - sun_above_threshold = current_sun_elevation > ( - min_sun_elevation * Astronomy.units.deg - ) + sun_above_threshold = current_sun_elevation > (min_sun_elevation * Astronomy.units.deg) if sun_above_threshold: logger.debug("Sun angle is above threshold.") else: logger.debug("Sun angle is below threshold.") return False + # Evaluate time if trigger is active if triggers["consider_time"]: logger.info("Time as a trigger is considered.") current_time, start_time, end_time = get_times_from_tuples(triggers) @@ -86,6 +113,8 @@ def _get_automatic_decision(self) -> bool: if not time_is_valid: return False + # Read latest Helios decision from StateInterface if trigger is active + # Helios runs in a thread and evaluates the sun conditions consistanly during day. if triggers["consider_helios"]: logger.info("Helios as a trigger is considered.") helios_result = StateInterface.read()["helios_indicates_good_conditions"] @@ -94,9 +123,7 @@ def _get_automatic_decision(self) -> bool: logger.debug(f"Helios does not nave enough images yet.") return False - logger.debug( - f"Helios indicates {'good' if helios_result else 'bad'} sun conditions." - ) + logger.debug(f"Helios indicates {'good' if helios_result else 'bad'} sun conditions.") return helios_result return True From ebea8548dcc48af0c8cf184c9c024d8b1ebbb76c Mon Sep 17 00:00:00 2001 From: patrickaigner Date: Wed, 17 Aug 2022 16:04:06 +0200 Subject: [PATCH 036/132] #91: Added documentation to SystemChecks --- packages/core/modules/system_checks.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/core/modules/system_checks.py b/packages/core/modules/system_checks.py index c10f8a13..cf62b719 100644 --- a/packages/core/modules/system_checks.py +++ b/packages/core/modules/system_checks.py @@ -5,6 +5,11 @@ class SystemChecks: + """SystemChecks interacts with the present Operating System through OSInterface. It checks and + logs important parameters (CPU, memory, disk space) to give insight into the overall system + stability. It raises custom errors when the disk runs out of storage or the energy supply is + not ensured. SystemChecks writes the latest readout into StateInterface.""" + def __init__(self, initial_config: dict): self._CONFIG = initial_config From c68df2054a3cce60f3bdec57fabb68205c3c305f Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 16:51:15 +0200 Subject: [PATCH 037/132] #96 (5) - fix strict typing issues --- packages/core/modules/enclosure_control.py | 8 +- .../core/modules/measurement_conditions.py | 1 - packages/core/modules/opus_measurement.py | 63 ++++--- packages/core/modules/sun_tracking.py | 164 +++++++----------- packages/core/modules/system_checks.py | 2 +- packages/core/threads/abstract_thread_base.py | 2 +- packages/core/threads/helios_thread.py | 12 +- packages/core/threads/upload_thread.py | 3 +- packages/core/utils/functions/ring_list.py | 5 +- .../utils/interfaces/config_validation.py | 4 +- .../core/utils/interfaces/os_interface.py | 9 +- .../core/utils/interfaces/state_interface.py | 4 +- scripts/run_type_analysis.sh | 2 +- 13 files changed, 140 insertions(+), 139 deletions(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index f0239d34..fb7626d8 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -211,9 +211,13 @@ def verify_cover_position(self) -> None: self.wait_for_cover_closing() def verify_cover_sync(self) -> None: - if self.measurements_should_be_running & (not self.plc_state.control.sync_to_tracker): + if self.measurements_should_be_running and ( + not self.plc_state.control.sync_to_tracker + ): logger.debug("Set sync to tracker to True to match measurement status.") self.plc_interface.set_sync_to_tracker(True) - if (not self.measurements_should_be_running) & self.plc_state.control.sync_to_tracker: + if ( + not self.measurements_should_be_running + ) and self.plc_state.control.sync_to_tracker: logger.debug("Set sync to tracker to False to match measurement status.") self.plc_interface.set_sync_to_tracker(False) diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index 6e1a00ea..e8febdab 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -1,5 +1,4 @@ import datetime -from typing import Literal from packages.core.utils import Astronomy, StateInterface, Logger logger = Logger(origin="measurement-conditions") diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index b2934f75..26701a2a 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -13,10 +13,6 @@ logger = Logger(origin="opus-measurement") -class SpectrometerError(Exception): - pass - - class OpusMeasurement: """Creates a working DDE connection to the OPUS DDE Server. Allows to remotely control experiments and macros in OPUS over the @@ -33,6 +29,8 @@ def __init__(self, initial_config: dict): self.__initialize() def __initialize(self) -> None: + assert sys.platform == "win32" + # note: dde servers talk to dde servers self.server = dde.CreateServer() self.server.Create("Client") @@ -42,8 +40,8 @@ def __initialize(self) -> None: def run(self, new_config: dict) -> None: self._CONFIG = new_config - if self._CONFIG["general"]["test_mode"]: - logger.debug("Skipping OpusMeasurement in test mode") + if self._CONFIG["general"]["test_mode"] or (sys.platform != "win32"): + logger.debug("Skipping OpusMeasurement in test mode and on non-windows systems") return logger.info("Running OpusMeasurement") @@ -87,17 +85,18 @@ def run(self, new_config: dict) -> None: self.last_cycle_automation_status = measurements_should_be_running def __connect_to_dde_opus(self) -> None: + assert sys.platform == "win32" try: self.conversation.ConnectTo("OPUS", "OPUS/System") logger.info("Connected to OPUS DDE Server.") except: logger.info("Could not connect to OPUS DDE Server.") - @property def __test_dde_connection(self) -> bool: """Tests the DDE connection. Tries to reinitialize the DDE socket if connection test fails. """ + assert sys.platform == "win32" # conversation.Connected() returns 1 if connected if self.conversation.Connected() == 1: @@ -118,10 +117,12 @@ def __test_dde_connection(self) -> bool: def load_experiment(self) -> None: """Loads a new experiment in OPUS over DDE connection.""" + assert sys.platform == "win32" + self.__connect_to_dde_opus() experiment_path = self._CONFIG["opus"]["experiment_path"] - if not self.__test_dde_connection: + if not self.__test_dde_connection(): return answer = self.conversation.Request("LOAD_EXPERIMENT " + experiment_path) logger.info(f"Loaded new OPUS experiment: {experiment_path}") @@ -138,8 +139,10 @@ def load_experiment(self) -> None: def start_macro(self) -> None: """Starts a new macro in OPUS over DDE connection.""" + assert sys.platform == "win32" + self.__connect_to_dde_opus() - if not self.__test_dde_connection: + if not self.__test_dde_connection(): return macro_path = self._CONFIG["opus"]["macro_path"] @@ -159,10 +162,12 @@ def start_macro(self) -> None: def stop_macro(self) -> None: """Stops the currently running macro in OPUS over DDE connection.""" + assert sys.platform == "win32" + self.__connect_to_dde_opus() macro_path = os.path.basename(self._CONFIG["opus"]["macro_path"]) - if not self.__test_dde_connection: + if not self.__test_dde_connection(): return answer = self.conversation.Request("KILL_MACRO " + macro_path) logger.info(f"Stopped OPUS macro: {macro_path}") @@ -178,9 +183,11 @@ def stop_macro(self) -> None: def close_opus(self) -> None: """Closes OPUS via DDE.""" + assert sys.platform == "win32" + self.__connect_to_dde_opus() - if not self.__test_dde_connection: + if not self.__test_dde_connection(): return answer = self.conversation.Request("CLOSE_OPUS") logger.info("Stopped OPUS.exe") @@ -197,10 +204,13 @@ def __shutdown_dde_server(self) -> None: """Note the underlying DDE object (ie, Server, Topics and Items) are not cleaned up by this call. """ + assert sys.platform == "win32" + self.server.Shutdown() def __destroy_dde_server(self) -> None: """Destroys the underlying C++ object.""" + assert sys.platform == "win32" self.server.Destroy() def __is_em27_responsive(self) -> bool: @@ -208,6 +218,8 @@ def __is_em27_responsive(self) -> bool: True -> Connected False -> Not Connected""" + assert sys.platform == "win32" + response = os.system("ping -n 1 " + self._CONFIG["em27"]["ip"]) return response == 0 @@ -215,6 +227,7 @@ def start_opus(self) -> None: """Uses os.startfile() to start up OPUS This simulates a user click on the opus.exe. """ + assert sys.platform == "win32" opus_path = self._CONFIG["opus"]["executable_path"] opus_username = self._CONFIG["opus"]["username"] @@ -222,12 +235,15 @@ def start_opus(self) -> None: # works only > python3.10 # without cwd CT will have trouble loading its internal database) - os.startfile( - os.path.basename(opus_path), - cwd=os.path.dirname(opus_path), - arguments=f"/LANGUAGE=ENGLISH /DIRECTLOGINPASSWORD={opus_username}@{opus_password}", - show_cmd=2, - ) + try: + os.startfile( # type: ignore + os.path.basename(opus_path), + cwd=os.path.dirname(opus_path), + arguments=f"/LANGUAGE=ENGLISH /DIRECTLOGINPASSWORD={opus_username}@{opus_password}", + show_cmd=2, + ) + except AttributeError: + pass def opus_application_running(self) -> bool: """Checks if OPUS is already running by identifying the window. @@ -235,6 +251,8 @@ def opus_application_running(self) -> bool: False if Application is currently not running on OS True if Application is currently running on OS """ + assert sys.platform == "win32" + # FindWindow(className, windowName) # className: String, The window class name to find, else None # windowName: String, The window name (ie,title) to find, else None @@ -248,12 +266,12 @@ def opus_application_running(self) -> bool: opus_windows_name, ): return True + return False except win32ui.error: return False def test_setup(self) -> None: - if sys.platform != "win32": - return + assert sys.platform == "win32" opus_is_running = self.opus_application_running() if not opus_is_running: @@ -266,7 +284,7 @@ def test_setup(self) -> None: time.sleep(6) assert self.opus_application_running() - assert self.__test_dde_connection + assert self.__test_dde_connection() self.load_experiment() time.sleep(2) @@ -278,6 +296,7 @@ def test_setup(self) -> None: def low_sun_angle_present(self) -> bool: """OPUS closes at the end of the day to start up fresh the next day.""" + assert sys.platform == "win32" sun_angle_is_low: bool = Astronomy.get_current_sun_elevation().is_within_bounds( None, self._CONFIG["general"]["min_sun_elevation"] * Astronomy.units.deg @@ -288,6 +307,7 @@ def automated_process_handling(self) -> None: """Start OPUS.exe if not running and sun angle conditions satisfied. Shuts down OPUS.exe if running and sun angle conditions not satisfied. """ + assert sys.platform == "win32" if not self.low_sun_angle_present(): # start OPUS if not currently running @@ -311,6 +331,8 @@ def automated_process_handling(self) -> None: def wait_for_opus_startup(self) -> None: """Checks for OPUS to be running. Breaks out of the loop after a defined time.""" + assert sys.platform == "win32" + start_time = time.time() while True: if self.opus_application_running(): @@ -324,6 +346,7 @@ def check_for_experiment_change(self) -> None: """Compares the experiment in the config with the current active experiment. To reload an experiment during an active macro the macro needs to be stopped first. """ + assert sys.platform == "win32" if self._CONFIG["opus"]["experiment_path"] != self.current_experiment: if StateInterface.read(persistent=True)["active_opus_macro_id"] == None: diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index 51723a66..ce0fab2c 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -5,6 +5,7 @@ import os import sys import time +from typing import Any import jdcal # type: ignore import datetime from packages.core.utils import StateInterface, Logger, OSInterface @@ -20,7 +21,7 @@ def __init__(self, initial_config: dict): if self._CONFIG["general"]["test_mode"]: return - def run(self, new_config: dict): + def run(self, new_config: dict) -> None: self._CONFIG = new_config if self._CONFIG["general"]["test_mode"]: logger.debug("Skipping SunTracking in test mode") @@ -55,7 +56,7 @@ def run(self, new_config: dict): logger.info("Stop CamTracker. Preparing for reinitialization.") self.stop_sun_tracking_automation() - def ct_application_running(self): + def ct_application_running(self) -> bool: """Checks if CamTracker is already running by identifying the window. False if Application is currently not running on OS @@ -73,12 +74,13 @@ def ct_application_running(self): "paused", ] - def start_sun_tracking_automation(self): + def start_sun_tracking_automation(self) -> None: """Uses os.startfile() to start up the CamTracker executable with additional parameter -automation. The paramter - automation will instruct CamTracker to automatically move the mirrors to the expected sun position during startup. """ + # delete stop.txt file in camtracker folder if present self.clean_stop_file() @@ -86,14 +88,17 @@ def start_sun_tracking_automation(self): # works only > python3.10 # without cwd CT will have trouble loading its internal database) - os.startfile( - os.path.basename(ct_path), - cwd=os.path.dirname(ct_path), - arguments="-autostart", - show_cmd=2, - ) - - def stop_sun_tracking_automation(self): + try: + os.startfile( # type: ignore + os.path.basename(ct_path), + cwd=os.path.dirname(ct_path), + arguments="-autostart", + show_cmd=2, + ) + except AttributeError: + pass + + def stop_sun_tracking_automation(self) -> None: """Tells the CamTracker application to end program and move mirrors to parking position. @@ -107,7 +112,7 @@ def stop_sun_tracking_automation(self): f = open(os.path.join(camtracker_directory, "stop.txt"), "w") f.close() - def clean_stop_file(self): + def clean_stop_file(self) -> None: """CamTracker needs a stop.txt file to safely shutdown. This file needs to be removed after CamTracker shutdown. """ @@ -118,80 +123,53 @@ def clean_stop_file(self): if os.path.exists(stop_file_path): os.remove(stop_file_path) - def read_ct_log_learn_az_elev(self): - """Reads the CamTracker Logfile: LEARN_Az_Elev.dat. - - Returns a list of string parameter: - [ - Julian Date, - Tracker Elevation, - Tracker Azimuth, - Elev Offset from Astro, - Az Offset from Astro, - Ellipse distance/px + def read_ct_log_learn_az_elev(self) -> tuple[float, float, float, float, float, float]: + """ + Reads the CamTracker Logfile: LEARN_Az_Elev.dat. + + Returns a list of string parameter: [ + Julian Date, + Tracker Elevation, + Tracker Azimuth, + Elev Offset from Astro, + Az Offset from Astro, + Ellipse distance/px ] + + Raises AssertionError if log file is invalid """ - # read azimuth and elevation motor offsets from camtracker logfiles - target = self._CONFIG["camtracker"]["learn_az_elev_path"] - if not os.path.isfile(target): - return [None, None, None, None, None, None] + # read azimuth and elevation motor offsets from camtracker logfiles + ct_logfile_path = self._CONFIG["camtracker"]["learn_az_elev_path"] + assert os.path.isfile(ct_logfile_path), "camtracker logfile not found" - f = open(target, "r") - last_line = f.readlines()[-1] - f.close() + with open(ct_logfile_path) as f: + last_line = f.readlines()[-1] # last_line: [Julian Date, Tracker Elevation, Tracker Azimuth, # Elev Offset from Astro, Az Offset from Astro, Ellipse distance/px] - last_line = last_line.replace(" ", "").replace("\n", "").split(",") - - # convert julian day to greg calendar as tuple (Year, Month, Day) - jddate = jdcal.jd2gcal(float(last_line[0]), 0)[:3] - - # get current date(example below) - # date = (Year, Month, Day) - now = datetime.datetime.now() - date = (now.year, now.month, now.day) - - # if the in the log file read date is up-to-date - if date == jddate: - return last_line - else: - return [None, None, None, None, None, None] + str_values = last_line.replace(" ", "").replace("\n", "").split(",") - def __read_ct_log_sunintensity(self): - """Reads the CamTracker Logile: SunIntensity.dat. - - Returns the sun intensity as either 'good', 'bad', 'None'. - """ - # check sun status logged by camtracker - target = self._CONFIG["camtracker"]["sun_intensity_path"] - - if not os.path.isfile(target): - return - - f = open(target, "r") - last_line = f.readlines()[-1] - f.close() - - sun_intensity = last_line.split(",")[3].replace(" ", "").replace("\n", "") + try: + assert len(str_values) == 6 + float_values: Any = tuple([float(v) for v in str_values]) + except (AssertionError, ValueError): + raise AssertionError(f'invalid last logfile line "{last_line}"') # convert julian day to greg calendar as tuple (Year, Month, Day) - jddate = jdcal.jd2gcal( - float(last_line.replace(" ", "").replace("\n", "").split(",")[0]), 0 - )[:3] + jddate = jdcal.jd2gcal(float(last_line[0]), 0)[:3] - # get current date(example below) - # date = (Year, Month, Day) + # assert that the log file is up-to-date now = datetime.datetime.now() - date = (now.year, now.month, now.day) + assert jddate == ( + now.year, + now.month, + now.day, + ), f'date in file is too old: "{last_line}"' - # if file is up to date - if date == jddate: - # returns either 'good' or 'bad' - return sun_intensity + return float_values - def validate_tracker_position(self): + def validate_tracker_position(self) -> bool: """Reads motor offsets and compares it with defined threshold. Returns @@ -199,41 +177,27 @@ def validate_tracker_position(self): False -> CamTracker lost sun position """ + # fails if file integrity is broken tracker_status = self.read_ct_log_learn_az_elev() - if None in tracker_status: - return - - elev_offset = float(tracker_status[3]) - az_offeset = float(tracker_status[4]) - threshold = float(self._CONFIG["camtracker"]["motor_offset_threshold"]) + elev_offset = tracker_status[3] + az_offeset = tracker_status[4] + threshold = self._CONFIG["camtracker"]["motor_offset_threshold"] - if (abs(elev_offset) > threshold) or (abs(az_offeset) > threshold): - return False + return (abs(elev_offset) <= threshold) and (abs(az_offeset) <= threshold) - return True - - def test_setup(self): - if sys.platform != "win32": - return - - ct_is_running = self.ct_application_running - if not ct_is_running: + def test_setup(self) -> None: + """ + Test whether starting and stopping of CamTracker works + """ + if not self.ct_application_running(): self.start_sun_tracking_automation() - try_count = 0 - while try_count < 10: - if self.ct_application_running: + for _ in range(10): + if self.ct_application_running(): break - try_count += 1 time.sleep(6) - assert self.ct_application_running - - # time.sleep(20) - + assert self.ct_application_running() self.stop_sun_tracking_automation() time.sleep(10) - - assert not self.ct_application_running - - assert False + assert not self.ct_application_running() diff --git a/packages/core/modules/system_checks.py b/packages/core/modules/system_checks.py index 542b331f..7b2a61ac 100644 --- a/packages/core/modules/system_checks.py +++ b/packages/core/modules/system_checks.py @@ -7,7 +7,7 @@ class SystemChecks: def __init__(self, initial_config: dict): self._CONFIG = initial_config - def run(self, new_config: dict): + def run(self, new_config: dict) -> None: self._CONFIG = new_config logger.info("Running SystemChecks") diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index 9a4cfbf8..e298a459 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -40,6 +40,6 @@ def should_be_running(self) -> bool: pass @abc.abstractmethod - def main(self): + def main(self) -> None: """Main entrypoint of the thread""" pass diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 01802dcc..cf93b3ff 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -168,6 +168,9 @@ def adjust_exposure() -> None: to the value where the overall mean pixel value color is closest to 50. """ + assert _Helios.available_exposures is not None + assert len(_Helios.available_exposures) > 0 + exposure_results = [] for e in _Helios.available_exposures: _Helios.update_camera_settings(exposure=e) @@ -180,8 +183,8 @@ def adjust_exposure() -> None: cv.imwrite(os.path.join(AUTOEXPOSURE_IMG_DIR, f"exposure-{e}.jpg"), img) logger.debug(f"exposure results: {exposure_results}") - new_exposure = min(exposure_results, key=lambda r: abs(r["mean"] - 50))["exposure"] + new_exposure = min(exposure_results, key=lambda r: abs(r["mean"] - 50))["exposure"] _Helios.update_camera_settings(exposure=new_exposure) if new_exposure != _Helios.current_exposure: @@ -214,7 +217,9 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: circle_cx, circle_cy, circle_r = ImageProcessing.get_circle_location(binary_mask) # only consider edges and make them bold - edges_only = np.array(cv.Canny(single_valued_pixels, 40, 40), dtype=np.float32) + edges_only: np.ndarray = np.array( + cv.Canny(single_valued_pixels, 40, 40), dtype=np.float32 + ) edges_only_dilated = cv.dilate( edges_only, cv.getStructuringElement(cv.MORPH_ELLIPSE, (5, 5)) ) @@ -395,7 +400,8 @@ def main(self, infinite_loop: bool = True, headless: bool = False) -> None: time.sleep(time_to_wait) if not infinite_loop: - return status_history + break + # return status_history except Exception as e: status_history.empty() diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index d20f6e34..1d89b53b 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -3,7 +3,6 @@ import json import os import shutil -from typing import Optional import invoke import paramiko import time @@ -37,7 +36,9 @@ def __init__(self, date_string: str, config: dict): ) self.transfer_process = fabric.transfer.Transfer(self.connection) + self.config = config self.date_string = date_string + self.src_dir_path = os.path.join(config["upload"]["src_directory"], date_string) self.src_meta_path = os.path.join(self.src_dir_path, "upload-meta.json") assert os.path.isdir(self.src_dir_path), f"{self.src_dir_path} is not a directory" diff --git a/packages/core/utils/functions/ring_list.py b/packages/core/utils/functions/ring_list.py index dea63eb6..3eea0f94 100644 --- a/packages/core/utils/functions/ring_list.py +++ b/packages/core/utils/functions/ring_list.py @@ -1,3 +1,6 @@ +# TODO add static types + + class RingList: """ Base code created by Flavio Catalani on Tue, 5 Jul 2005 (PSF). @@ -27,7 +30,7 @@ def append(self, x): def get(self): return self.__data__ - def remove(self): + def remove(self) -> None: if self.__cur__ > 0: del self.__data__[self.__cur__ - 1] self.__cur__ -= 1 diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 9d0039ac..c21f3736 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -29,11 +29,11 @@ def _is_valid_ip_adress(field, value, error): error(field, "String has to be a valid IPv4 address") -def get_dict_schema(s: dict) -> dict: +def get_dict_schema(s: dict) -> dict: # type: ignore return {"type": "dict", "schema": s} -def get_nullable_dict_schema(s: dict) -> dict: +def get_nullable_dict_schema(s: dict) -> dict: # type: ignore return {"type": "dict", "schema": s, "nullable": True} diff --git a/packages/core/utils/interfaces/os_interface.py b/packages/core/utils/interfaces/os_interface.py index 519646cf..7017122f 100644 --- a/packages/core/utils/interfaces/os_interface.py +++ b/packages/core/utils/interfaces/os_interface.py @@ -34,7 +34,7 @@ def get_disk_space() -> float: return disk.percent @staticmethod - def validate_disk_space(): + def validate_disk_space() -> None: """Raises an error if the diskspace is less than 10%""" if OSInterface.get_disk_space() > 90: raise OSInterface.StorageError( @@ -75,10 +75,11 @@ def get_system_battery() -> int: return 100 @staticmethod - def validate_system_battery(): + def validate_system_battery() -> None: """Raises LowEnergyError if system battery runs lower than 20%.""" - if psutil.sensors_battery(): - if psutil.sensors_battery().percent < 20.0: + battery_state = psutil.sensors_battery() + if battery_state is not None: + if battery_state.percent < 20: raise OSInterface.LowEnergyError( "The battery of the system is below 20%. Please check the power supply." ) diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 6b56f585..24a1148e 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -40,7 +40,7 @@ class StateInterface: @staticmethod @with_filelock(STATE_LOCK_PATH) - def initialize(): + def initialize() -> None: """ This will create two files: @@ -85,7 +85,7 @@ def read(persistent: bool = False) -> dict: @staticmethod @with_filelock(STATE_LOCK_PATH) - def update(update: dict, persistent: bool = False): + def update(update: dict, persistent: bool = False) -> None: """ Update the (persistent) state file and return its content. The update object should only include the properties to be diff --git a/scripts/run_type_analysis.sh b/scripts/run_type_analysis.sh index a0a9d46f..1d3d5680 100644 --- a/scripts/run_type_analysis.sh +++ b/scripts/run_type_analysis.sh @@ -1 +1 @@ -python -m mypy run-pyra-core.py \ No newline at end of file +python -m mypy run-pyra-core.py --strict \ No newline at end of file From a5312f7b7124d6ef43e2a280cfaa2e8139ce6f1a Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 17:04:50 +0200 Subject: [PATCH 038/132] #96 (6) - fix strict typing issues --- packages/core/main.py | 4 +- packages/core/modules/opus_measurement.py | 2 +- .../core/utils/interfaces/state_interface.py | 42 ++++++++++++++----- packages/core/utils/types.py | 12 +++++- scripts/play.py | 22 ++++++++++ 5 files changed, 68 insertions(+), 14 deletions(-) create mode 100644 scripts/play.py diff --git a/packages/core/main.py b/packages/core/main.py index d73eecce..d287cf33 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -45,7 +45,7 @@ def update_exception_state( Logger.log_activity_event("errors-resolved") # if no errors until now - StateInterface.update({"current_exceptions": current_exceptions}, persistent=True) + StateInterface.update_persistent({"current_exceptions": current_exceptions}) return updated_current_exceptions except Exception as e: @@ -90,7 +90,7 @@ def run() -> None: helios_thread_instance = threads.helios_thread.HeliosThread(config) upload_thread_instance = threads.upload_thread.UploadThread(config) - current_exceptions = StateInterface.read(persistent=True)["current_exceptions"] + current_exceptions = StateInterface.read_persistent()["current_exceptions"] while True: start_time = time.time() diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index 26701a2a..53e294ed 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -349,7 +349,7 @@ def check_for_experiment_change(self) -> None: assert sys.platform == "win32" if self._CONFIG["opus"]["experiment_path"] != self.current_experiment: - if StateInterface.read(persistent=True)["active_opus_macro_id"] == None: + if StateInterface.read_persistent()["active_opus_macro_id"] == None: self.load_experiment() else: self.stop_macro() diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 24a1148e..679faefe 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -1,7 +1,7 @@ import json import os import shutil -from packages.core.utils import with_filelock, update_dict_recursively +from packages.core.utils import types, with_filelock, update_dict_recursively from .plc_interface import EMPTY_PLC_STATE dir = os.path.dirname @@ -29,12 +29,13 @@ }, } -EMPTY_PERSISTENT_STATE_OBJECT: dict = { +EMPTY_PERSISTENT_STATE_OBJECT: types.PersistentStateDict = { "active_opus_macro_id": None, "current_exceptions": [], } # TODO: Validate structure with cerberus (assertion) +# we could possibly use pydantic for that class StateInterface: @@ -77,25 +78,46 @@ def initialize() -> None: @staticmethod @with_filelock(STATE_LOCK_PATH) - def read(persistent: bool = False) -> dict: - """Read the (persistent) state file and return its content""" - file_path = PERSISTENT_STATE_FILE_PATH if persistent else STATE_FILE_PATH - with open(file_path, "r") as f: + def read() -> dict: + """Read the state file and return its content""" + with open(STATE_FILE_PATH, "r") as f: return json.load(f) @staticmethod @with_filelock(STATE_LOCK_PATH) - def update(update: dict, persistent: bool = False) -> None: + def read_persistent() -> types.PersistentStateDict: + """Read the persistent state file and return its content""" + with open(PERSISTENT_STATE_FILE_PATH, "r") as f: + return json.load(f) + + @staticmethod + @with_filelock(STATE_LOCK_PATH) + def update(update: dict) -> None: + """ + Update the (persistent) state file and return its content. + The update object should only include the properties to be + changed in contrast to containing the whole file. + """ + + with open(STATE_FILE_PATH, "r") as f: + current_state = json.load(f) + + new_state = update_dict_recursively(current_state, update) + with open(STATE_FILE_PATH, "w") as f: + json.dump(new_state, f, indent=4) + + @staticmethod + @with_filelock(STATE_LOCK_PATH) + def update_persistent(update: types.PartialPersistentStateDict) -> None: """ Update the (persistent) state file and return its content. The update object should only include the properties to be changed in contrast to containing the whole file. """ - file_path = PERSISTENT_STATE_FILE_PATH if persistent else STATE_FILE_PATH - with open(file_path, "r") as f: + with open(PERSISTENT_STATE_FILE_PATH, "r") as f: current_state = json.load(f) new_state = update_dict_recursively(current_state, update) - with open(file_path, "w") as f: + with open(PERSISTENT_STATE_FILE_PATH, "w") as f: json.dump(new_state, f, indent=4) diff --git a/packages/core/utils/types.py b/packages/core/utils/types.py index 76316d9d..f40dc8dc 100644 --- a/packages/core/utils/types.py +++ b/packages/core/utils/types.py @@ -1,4 +1,4 @@ -from typing import TypedDict +from typing import Optional, TypedDict class UploadMetaDict(TypedDict): @@ -13,3 +13,13 @@ class PartialUploadMetaDict(TypedDict, total=False): fileList: list[str] createdTime: float lastModifiedTime: float + + +class PersistentStateDict(TypedDict): + active_opus_macro_id: Optional[int] + current_exceptions: list[str] + + +class PartialPersistentStateDict(TypedDict, total=False): + active_opus_macro_id: Optional[int] + current_exceptions: list[str] diff --git a/scripts/play.py b/scripts/play.py new file mode 100644 index 00000000..6ceaa1f4 --- /dev/null +++ b/scripts/play.py @@ -0,0 +1,22 @@ +from typing import TypedDict + + +class UploadMetaDict(TypedDict): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + +class PartialUploadMetaDict(TypedDict, total=False): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + +a: PartialUploadMetaDict = { + "complete": False, + "createdTime": 0.0, + "lastModifiedTime": 0.0, +} From 13e704b4163f6a48d0b3ea43a934b13786a4d361 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 18:00:28 +0200 Subject: [PATCH 039/132] #96 (7) - strict typing issues, validate upload meta at runtime --- packages/core/__init__.py | 2 +- packages/core/threads/upload_thread.py | 17 +++++---- packages/core/utils/__init__.py | 3 +- .../core/utils/decorators/with_filelock.py | 9 +++-- packages/core/utils/functions/logger.py | 16 ++++---- packages/core/utils/functions/ring_list.py | 26 +++++++------ .../core/utils/interfaces/state_interface.py | 9 +++-- packages/core/utils/types.py | 25 ------------ packages/core/utils/types/__init__.py | 2 + packages/core/utils/types/persistent_state.py | 11 ++++++ packages/core/utils/types/upload_meta.py | 38 +++++++++++++++++++ poetry.lock | 20 +++++++++- pyproject.toml | 1 + scripts/run_type_analysis.sh | 2 +- 14 files changed, 117 insertions(+), 64 deletions(-) delete mode 100644 packages/core/utils/types.py create mode 100644 packages/core/utils/types/__init__.py create mode 100644 packages/core/utils/types/persistent_state.py create mode 100644 packages/core/utils/types/upload_meta.py diff --git a/packages/core/__init__.py b/packages/core/__init__.py index bd3c66f5..8ba720f3 100644 --- a/packages/core/__init__.py +++ b/packages/core/__init__.py @@ -1,3 +1,3 @@ -from .utils import types +from .utils.types import upload_meta from .utils.interfaces import config_validation from . import modules, main, threads diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 1d89b53b..35eeac01 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -8,7 +8,9 @@ import time import fabric # type: ignore import re -from packages.core.utils import ConfigInterface, Logger, types +import pydantic +from packages.core.utils import ConfigInterface, Logger +from packages.core.utils.types import UploadMetaTypes from .abstract_thread_base import AbstractThreadBase logger = Logger(origin="upload") @@ -49,7 +51,7 @@ def __init__(self, date_string: str, config: dict): config["upload"]["dst_directory"] ), f"remote {config['upload']['dst_directory']} is not a directory" - self.meta_content: types.UploadMetaDict = { + self.meta_content: UploadMetaTypes.Dict = { "complete": False, "fileList": [], "createdTime": round(time.time(), 3), @@ -128,12 +130,13 @@ def __fetch_meta(self) -> None: try: assert os.path.isfile(self.src_meta_path) with open(self.src_meta_path, "r") as f: - # TODO: validate this with cerberus - self.meta_content = json.load(f) - except (AssertionError, json.JSONDecodeError) as e: + new_meta_content = json.load(f) + UploadMetaTypes.validate_object(new_meta_content) + self.meta_content = new_meta_content + except (AssertionError, json.JSONDecodeError, pydantic.ValidationError) as e: raise InvalidUploadState(str(e)) - def __update_meta(self, new_meta_content_partial: types.PartialUploadMetaDict) -> None: + def __update_meta(self, new_meta_content_partial: UploadMetaTypes.PartialDict) -> None: """ Update the local upload-meta.json file and overwrite the meta file on the server @@ -243,7 +246,7 @@ def __is_valid_date(date_string: str) -> bool: return False @staticmethod - def get_directories_to_be_uploaded(ifg_src_path) -> list[str]: + def get_directories_to_be_uploaded(ifg_src_path: str) -> list[str]: if not os.path.isdir(ifg_src_path): return [] diff --git a/packages/core/utils/__init__.py b/packages/core/utils/__init__.py index 9bcca9be..051138b6 100644 --- a/packages/core/utils/__init__.py +++ b/packages/core/utils/__init__.py @@ -13,4 +13,5 @@ from .interfaces import PLCInterface from .interfaces import OSInterface -from . import types +from .types import UploadMetaTypes +from .types import PersistentStateTypes diff --git a/packages/core/utils/decorators/with_filelock.py b/packages/core/utils/decorators/with_filelock.py index 6ecc8f5a..11e2fdcc 100644 --- a/packages/core/utils/decorators/with_filelock.py +++ b/packages/core/utils/decorators/with_filelock.py @@ -1,4 +1,5 @@ import filelock +from typing import Any, Callable, TypeVar, cast # FileLock = Mark, that a file is being used and other programs # should not interfere. A file "*.lock" will be created and the @@ -7,13 +8,15 @@ # A timeout of -1 means that the code waits forever +F = TypeVar("F", bound=Callable[..., Any]) -def with_filelock(file_lock_path, timeout=-1): - def with_fixed_filelock(function): + +def with_filelock(file_lock_path: str, timeout: float = -1): + def with_fixed_filelock(function: F) -> F: def locked_function(*args, **kwargs): with filelock.FileLock(file_lock_path, timeout=timeout): return function(*args, **kwargs) - return locked_function + return cast(F, locked_function) return with_fixed_filelock diff --git a/packages/core/utils/functions/logger.py b/packages/core/utils/functions/logger.py index 8decccc6..1ef632b8 100644 --- a/packages/core/utils/functions/logger.py +++ b/packages/core/utils/functions/logger.py @@ -31,23 +31,23 @@ def __init__(self, origin="pyra.core", just_print: bool = False): self.origin = origin self.just_print = just_print - def debug(self, message: str): + def debug(self, message: str) -> None: self._write_log_line("DEBUG", message) - def info(self, message: str): + def info(self, message: str) -> None: self._write_log_line("INFO", message) - def warning(self, message: str): + def warning(self, message: str) -> None: self._write_log_line("WARNING", message) - def error(self, message: str): + def error(self, message: str) -> None: self._write_log_line("ERROR", message) - def exception(self, e: Exception): + def exception(self, e: Exception) -> None: tb = "\n".join(traceback.format_exception(e)) self._write_log_line("EXCEPTION", f"{type(e).__name__} occured: {tb}") - def _write_log_line(self, level: str, message: str): + def _write_log_line(self, level: str, message: str) -> None: now = datetime.now() utc_offset = round((datetime.now() - datetime.utcnow()).total_seconds() / 3600, 1) if round(utc_offset) == utc_offset: @@ -73,7 +73,7 @@ def _write_log_line(self, level: str, message: str): Logger.last_archive_time = now @staticmethod - def archive(keep_last_hour=False): + def archive(keep_last_hour: bool = False) -> None: with filelock.FileLock(LOG_FILES_LOCK): with open(DEBUG_LOG_FILE, "r") as f: log_lines_in_file = f.readlines() @@ -118,7 +118,7 @@ def archive(keep_last_hour=False): f.writelines(archive_log_date_groups[date][t] + [""]) @staticmethod - def log_activity_event(event_label: str): + def log_activity_event(event_label: str) -> None: """ Log things like: * start-measurements diff --git a/packages/core/utils/functions/ring_list.py b/packages/core/utils/functions/ring_list.py index 3eea0f94..d092cec0 100644 --- a/packages/core/utils/functions/ring_list.py +++ b/packages/core/utils/functions/ring_list.py @@ -7,16 +7,18 @@ class RingList: Added sum() and reinitialize() functions. """ - def __init__(self, length): - self.__max__ = length - self.empty() + def __init__(self, length: int): + self.__max__: int = length + self.__data__: list[int] = [] + self.__full__: int = 0 + self.__cur__: int = 0 - def empty(self): + def empty(self) -> None: self.__data__ = [] self.__full__ = 0 self.__cur__ = 0 - def append(self, x): + def append(self, x: int) -> None: if self.__full__ == 1: for i in range(0, self.__cur__ - 1): self.__data__[i] = self.__data__[i + 1] @@ -27,7 +29,7 @@ def append(self, x): if self.__cur__ == self.__max__: self.__full__ = 1 - def get(self): + def get(self) -> list[int]: return self.__data__ def remove(self) -> None: @@ -35,16 +37,16 @@ def remove(self) -> None: del self.__data__[self.__cur__ - 1] self.__cur__ -= 1 - def size(self): + def size(self) -> int: return self.__cur__ - def maxsize(self): + def maxsize(self) -> int: return self.__max__ - def sum(self): - return float(sum(self.get())) + def sum(self) -> int: + return sum(self.get()) - def reinitialize(self, length): + def reinitialize(self, length: int) -> None: self.__max__ = length self.__full__ = 0 self.__cur__ = 0 @@ -54,5 +56,5 @@ def reinitialize(self, length): for item in handover_list: self.append(item) - def __str__(self): + def __str__(self) -> str: return str(self.__data__) diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 679faefe..f2f14416 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -1,7 +1,8 @@ import json import os import shutil -from packages.core.utils import types, with_filelock, update_dict_recursively +from packages.core.utils import with_filelock, update_dict_recursively +from packages.core.utils.types import upload_meta from .plc_interface import EMPTY_PLC_STATE dir = os.path.dirname @@ -29,7 +30,7 @@ }, } -EMPTY_PERSISTENT_STATE_OBJECT: types.PersistentStateDict = { +EMPTY_PERSISTENT_STATE_OBJECT: upload_meta.PersistentStateDict = { "active_opus_macro_id": None, "current_exceptions": [], } @@ -85,7 +86,7 @@ def read() -> dict: @staticmethod @with_filelock(STATE_LOCK_PATH) - def read_persistent() -> types.PersistentStateDict: + def read_persistent() -> upload_meta.PersistentStateDict: """Read the persistent state file and return its content""" with open(PERSISTENT_STATE_FILE_PATH, "r") as f: return json.load(f) @@ -108,7 +109,7 @@ def update(update: dict) -> None: @staticmethod @with_filelock(STATE_LOCK_PATH) - def update_persistent(update: types.PartialPersistentStateDict) -> None: + def update_persistent(update: upload_meta.PartialPersistentStateDict) -> None: """ Update the (persistent) state file and return its content. The update object should only include the properties to be diff --git a/packages/core/utils/types.py b/packages/core/utils/types.py deleted file mode 100644 index f40dc8dc..00000000 --- a/packages/core/utils/types.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Optional, TypedDict - - -class UploadMetaDict(TypedDict): - complete: bool - fileList: list[str] - createdTime: float - lastModifiedTime: float - - -class PartialUploadMetaDict(TypedDict, total=False): - complete: bool - fileList: list[str] - createdTime: float - lastModifiedTime: float - - -class PersistentStateDict(TypedDict): - active_opus_macro_id: Optional[int] - current_exceptions: list[str] - - -class PartialPersistentStateDict(TypedDict, total=False): - active_opus_macro_id: Optional[int] - current_exceptions: list[str] diff --git a/packages/core/utils/types/__init__.py b/packages/core/utils/types/__init__.py new file mode 100644 index 00000000..bbc98682 --- /dev/null +++ b/packages/core/utils/types/__init__.py @@ -0,0 +1,2 @@ +from .upload_meta import UploadMetaTypes +from .persistent_state import PersistentStateTypes diff --git a/packages/core/utils/types/persistent_state.py b/packages/core/utils/types/persistent_state.py new file mode 100644 index 00000000..3d61597f --- /dev/null +++ b/packages/core/utils/types/persistent_state.py @@ -0,0 +1,11 @@ +from typing import Optional, TypedDict + + +class PersistentStateTypes: + class Dict(TypedDict): + active_opus_macro_id: Optional[int] + current_exceptions: list[str] + + class PartialDict(TypedDict, total=False): + active_opus_macro_id: Optional[int] + current_exceptions: list[str] diff --git a/packages/core/utils/types/upload_meta.py b/packages/core/utils/types/upload_meta.py new file mode 100644 index 00000000..3a8a8b33 --- /dev/null +++ b/packages/core/utils/types/upload_meta.py @@ -0,0 +1,38 @@ +from typing import Optional, TypedDict + +import pydantic + + +class UploadMetaTypes: + @staticmethod + class Dict(TypedDict): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + @staticmethod + class PartialDict(TypedDict, total=False): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + @staticmethod + def validate_object(o: dict, partial: bool = False) -> None: + """ + Check, whether a given object is a correct UploadMetaDict + Raises a pydantic.ValidationError if the object is invalid. + + This should always be used when loading the object from a + JSON file! + """ + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) + + +class _ValidationModel(pydantic.BaseModel): + regular: Optional[UploadMetaTypes.Dict] + partial: Optional[UploadMetaTypes.PartialDict] diff --git a/poetry.lock b/poetry.lock index 5dccf65d..08dd886f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -363,6 +363,21 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pydantic" +version = "1.9.2" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pyerfa" version = "2.0.0.1" @@ -508,14 +523,14 @@ python-versions = "*" name = "typing-extensions" version = "4.3.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "41558403cd5eef318cc8bcf8d8350a6ead45476d6e12caf5cd0eff76a5a5ae2b" +content-hash = "7f108e4291d261ccafe43af936d8915adde43154dd69f36d547a212f72097211" [metadata.files] astropy = [ @@ -768,6 +783,7 @@ pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +pydantic = [] pyerfa = [ {file = "pyerfa-2.0.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:278832de7803f2fb0ef4b14263200f98dfdb3eaa78dc63835d93796fd8fc42c6"}, {file = "pyerfa-2.0.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:629248cebc8626a52e80f69d4e2f30cc6e751f57803f5ba7ec99edd09785d181"}, diff --git a/pyproject.toml b/pyproject.toml index adbaee0c..b78bf958 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ astropy = "5.0.4" jdcal = "1.4.1" psutil = "5.9.1" fabric = "^2.7.1" +pydantic = "^1.9.2" [tool.poetry.dev-dependencies] pytest = "7.1.2" diff --git a/scripts/run_type_analysis.sh b/scripts/run_type_analysis.sh index 1d3d5680..a82147ab 100644 --- a/scripts/run_type_analysis.sh +++ b/scripts/run_type_analysis.sh @@ -1 +1 @@ -python -m mypy run-pyra-core.py --strict \ No newline at end of file +python -m mypy run-pyra-core.py --strict --implicit-reexport --no-warn-unused-ignores \ No newline at end of file From 7951b46774267aee6aff6520c95cda51e179d9b1 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 18:24:55 +0200 Subject: [PATCH 040/132] #96 (8) - add static typing to higher level decorators --- packages/core/modules/sun_tracking.py | 4 +-- packages/core/threads/helios_thread.py | 8 +++-- .../core/utils/decorators/with_filelock.py | 29 ++++++++++++++----- .../core/utils/functions/image_processing.py | 18 +++++++----- .../functions/update_dict_recursively.py | 5 +++- .../utils/interfaces/config_validation.py | 2 +- .../core/utils/interfaces/state_interface.py | 20 ++++++++----- packages/core/utils/types/persistent_state.py | 20 +++++++++++++ 8 files changed, 77 insertions(+), 29 deletions(-) diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index ce0fab2c..a0404a7f 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -152,7 +152,7 @@ def read_ct_log_learn_az_elev(self) -> tuple[float, float, float, float, float, try: assert len(str_values) == 6 - float_values: Any = tuple([float(v) for v in str_values]) + float_values = tuple([float(v) for v in str_values]) except (AssertionError, ValueError): raise AssertionError(f'invalid last logfile line "{last_line}"') @@ -167,7 +167,7 @@ def read_ct_log_learn_az_elev(self) -> tuple[float, float, float, float, float, now.day, ), f'date in file is too old: "{last_line}"' - return float_values + return float_values # type: ignore def validate_tracker_position(self) -> bool: """Reads motor offsets and compares it with defined threshold. diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index cf93b3ff..8581d62b 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -184,7 +184,9 @@ def adjust_exposure() -> None: logger.debug(f"exposure results: {exposure_results}") - new_exposure = min(exposure_results, key=lambda r: abs(r["mean"] - 50))["exposure"] + new_exposure: Any = min(exposure_results, key=lambda r: abs(r["mean"] - 50))[ + "exposure" + ] _Helios.update_camera_settings(exposure=new_exposure) if new_exposure != _Helios.current_exposure: @@ -220,13 +222,13 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: edges_only: np.ndarray = np.array( cv.Canny(single_valued_pixels, 40, 40), dtype=np.float32 ) - edges_only_dilated = cv.dilate( + edges_only_dilated: cv.Mat = cv.dilate( edges_only, cv.getStructuringElement(cv.MORPH_ELLIPSE, (5, 5)) ) # blacken the outer 10% of the circle radius edges_only_dilated *= ImageProcessing.get_circle_mask( - edges_only_dilated.shape, circle_r * 0.9, circle_cx, circle_cy + edges_only_dilated.shape, round(circle_r * 0.9), circle_cx, circle_cy ) # determine how many pixels inside the circle are made up of "edge pixels" diff --git a/packages/core/utils/decorators/with_filelock.py b/packages/core/utils/decorators/with_filelock.py index 11e2fdcc..c537ac32 100644 --- a/packages/core/utils/decorators/with_filelock.py +++ b/packages/core/utils/decorators/with_filelock.py @@ -1,5 +1,6 @@ import filelock from typing import Any, Callable, TypeVar, cast +from functools import wraps # FileLock = Mark, that a file is being used and other programs # should not interfere. A file "*.lock" will be created and the @@ -8,15 +9,29 @@ # A timeout of -1 means that the code waits forever +# def with_filelock(file_lock_path: str, timeout: float = -1): +# def with_fixed_filelock(f): +# def locked_function(*args, **kwargs): +# with filelock.FileLock(file_lock_path, timeout=timeout): +# return function(*args, **kwargs) +# return locked_function +# return with_fixed_filelock +# +# typing of higher level decorators: +# https://github.com/python/mypy/issues/1551#issuecomment-253978622 + F = TypeVar("F", bound=Callable[..., Any]) -def with_filelock(file_lock_path: str, timeout: float = -1): - def with_fixed_filelock(function: F) -> F: - def locked_function(*args, **kwargs): - with filelock.FileLock(file_lock_path, timeout=timeout): - return function(*args, **kwargs) +class with_filelock: + def __init__(self, file_lock_path: str, timeout: float = -1) -> None: + self.file_lock_path = file_lock_path + self.timeout = timeout - return cast(F, locked_function) + def __call__(self, f: F) -> F: + @wraps(f) + def wrapper(*args, **kwargs) -> Any: + with filelock.FileLock(self.file_lock_path, timeout=self.timeout): + return function(*args, **kwargs) - return with_fixed_filelock + return cast(F, wrapper) diff --git a/packages/core/utils/functions/image_processing.py b/packages/core/utils/functions/image_processing.py index a393da24..b09db495 100644 --- a/packages/core/utils/functions/image_processing.py +++ b/packages/core/utils/functions/image_processing.py @@ -6,7 +6,9 @@ class ImageProcessing: # circle code adapted from https://stackoverflow.com/a/39074620/8255842 @staticmethod - def get_circle_mask(img_shape: tuple[int, int], radius: int, center_x: int, center_y: int): + def get_circle_mask( + img_shape: tuple[int, int], radius: int, center_x: int, center_y: int + ) -> cv.Mat: """ input: image width/height, circle radius/center_x/center_y @@ -26,13 +28,13 @@ def get_circle_mask(img_shape: tuple[int, int], radius: int, center_x: int, cent return (np.abs(np.hypot(center_x - x, center_y - y)) < radius).astype(np.uint8) @staticmethod - def moving_average(xs, n=3): + def moving_average(xs: list[float], n: int = 3) -> float: ret = np.cumsum(xs) ret[n:] = ret[n:] - ret[:-n] return ret[n - 1 :] / n @staticmethod - def get_binary_mask(frame): + def get_binary_mask(frame: cv.Mat) -> cv.Mat: """ input: gray image matrix (2D matrix) with integer values for each pixel output: binary mask (same shape) that has 0s for dark pixels and 1s for bright pixels @@ -60,7 +62,7 @@ def get_binary_mask(frame): return binary_mask @staticmethod - def get_circle_location(binary_mask): + def get_circle_location(binary_mask: cv.Mat) -> tuple[int, int, int]: """ input: binary mask (2D array) like [[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] @@ -93,15 +95,17 @@ def get_circle_location(binary_mask): @staticmethod def add_markings_to_image( - img, edge_fraction: int, circle_cx: int, circle_cy: int, circle_r - ): + img: cv.Mat, edge_fraction: int, circle_cx: int, circle_cy: int, circle_r: int + ) -> cv.Mat: img = cv.circle(img, (circle_cx, circle_cy), circle_r, (100, 0, 0), 2) img = cv.circle(img, (circle_cx, circle_cy), round(circle_r * 0.9), (100, 0, 0), 2) img = ImageProcessing.add_text_to_image(img, f"{round(edge_fraction * 100, 2)}%") return img @staticmethod - def add_text_to_image(img, text, color=(200, 0, 0)): + def add_text_to_image( + img: cv.Mat, text: str, color: tuple[int, int, int] = (200, 0, 0) + ) -> cv.Mat: cv.putText( img, text=text, diff --git a/packages/core/utils/functions/update_dict_recursively.py b/packages/core/utils/functions/update_dict_recursively.py index fe1cf317..f25a3677 100644 --- a/packages/core/utils/functions/update_dict_recursively.py +++ b/packages/core/utils/functions/update_dict_recursively.py @@ -1,4 +1,7 @@ -def update_dict_recursively(old_object, new_object): +from typing import Any + + +def update_dict_recursively(old_object: Any, new_object: Any) -> Any: if old_object is None or new_object is None: return new_object diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index c21f3736..18578e4a 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -20,7 +20,7 @@ def _file_path_exists(field, value, error): # type: ignore error(field, "Path has to be an existing file") -def _is_valid_ip_adress(field, value, error): +def _is_valid_ip_adress(field, value, error): # type: ignore try: assert len(value.split(".")) == 4 assert all([n.isnumeric() for n in value.split(".")]) diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index f2f14416..618d8a30 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -1,8 +1,7 @@ import json import os import shutil -from packages.core.utils import with_filelock, update_dict_recursively -from packages.core.utils.types import upload_meta +from packages.core.utils import with_filelock, update_dict_recursively, PersistentStateTypes from .plc_interface import EMPTY_PLC_STATE dir = os.path.dirname @@ -18,7 +17,7 @@ PERSISTENT_STATE_FILE_PATH = os.path.join(PROJECT_DIR, "logs", "persistent-state.json") -EMPTY_STATE_OBJECT: dict = { +EMPTY_STATE_OBJECT: PersistentStateTypes.Dict = { "helios_indicates_good_conditions": None, "measurements_should_be_running": False, "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), @@ -30,7 +29,7 @@ }, } -EMPTY_PERSISTENT_STATE_OBJECT: upload_meta.PersistentStateDict = { +EMPTY_PERSISTENT_STATE_OBJECT: PersistentStateTypes.PartialDict = { "active_opus_macro_id": None, "current_exceptions": [], } @@ -82,14 +81,18 @@ def initialize() -> None: def read() -> dict: """Read the state file and return its content""" with open(STATE_FILE_PATH, "r") as f: - return json.load(f) + new_object = json.load(f) + # TODO: PersistentStateTypes.validate_object(new_object) + return new_object @staticmethod @with_filelock(STATE_LOCK_PATH) - def read_persistent() -> upload_meta.PersistentStateDict: + def read_persistent() -> PersistentStateTypes.Dict: """Read the persistent state file and return its content""" with open(PERSISTENT_STATE_FILE_PATH, "r") as f: - return json.load(f) + new_object = json.load(f) + PersistentStateTypes.validate_object(new_object) + return new_object @staticmethod @with_filelock(STATE_LOCK_PATH) @@ -109,7 +112,7 @@ def update(update: dict) -> None: @staticmethod @with_filelock(STATE_LOCK_PATH) - def update_persistent(update: upload_meta.PartialPersistentStateDict) -> None: + def update_persistent(update: PersistentStateTypes.PartialDict) -> None: """ Update the (persistent) state file and return its content. The update object should only include the properties to be @@ -118,6 +121,7 @@ def update_persistent(update: upload_meta.PartialPersistentStateDict) -> None: with open(PERSISTENT_STATE_FILE_PATH, "r") as f: current_state = json.load(f) + PersistentStateTypes.validate_object(current_state) new_state = update_dict_recursively(current_state, update) with open(PERSISTENT_STATE_FILE_PATH, "w") as f: diff --git a/packages/core/utils/types/persistent_state.py b/packages/core/utils/types/persistent_state.py index 3d61597f..d410f64a 100644 --- a/packages/core/utils/types/persistent_state.py +++ b/packages/core/utils/types/persistent_state.py @@ -1,4 +1,5 @@ from typing import Optional, TypedDict +import pydantic class PersistentStateTypes: @@ -9,3 +10,22 @@ class Dict(TypedDict): class PartialDict(TypedDict, total=False): active_opus_macro_id: Optional[int] current_exceptions: list[str] + + @staticmethod + def validate_object(o: dict, partial: bool = False) -> None: + """ + Check, whether a given object is a correct PersistentStateDict + Raises a pydantic.ValidationError if the object is invalid. + + This should always be used when loading the object from a + JSON file! + """ + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) + + +class _ValidationModel(pydantic.BaseModel): + regular: Optional[PersistentStateTypes.Dict] + partial: Optional[PersistentStateTypes.PartialDict] From 6f16b683dd733de223a884511861509a2387fb6c Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 18:50:43 +0200 Subject: [PATCH 041/132] #96 (9) - fix strict typing issues --- packages/core/modules/sun_tracking.py | 2 +- packages/core/threads/abstract_thread_base.py | 4 +-- packages/core/threads/helios_thread.py | 10 +++----- packages/core/threads/upload_thread.py | 4 +-- .../core/utils/decorators/with_filelock.py | 6 ++--- packages/core/utils/functions/astronomy.py | 8 +++--- .../utils/functions/exception_email_client.py | 15 +++++------ .../core/utils/functions/image_processing.py | 2 +- packages/core/utils/functions/logger.py | 6 ++--- .../utils/interfaces/config_validation.py | 2 +- .../core/utils/interfaces/plc_interface.py | 25 +++++++++++-------- .../core/utils/interfaces/state_interface.py | 4 +-- packages/core/utils/types/persistent_state.py | 4 +-- packages/core/utils/types/upload_meta.py | 4 +-- 14 files changed, 49 insertions(+), 47 deletions(-) diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index a0404a7f..d9b9d732 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -182,7 +182,7 @@ def validate_tracker_position(self) -> bool: elev_offset = tracker_status[3] az_offeset = tracker_status[4] - threshold = self._CONFIG["camtracker"]["motor_offset_threshold"] + threshold: float = self._CONFIG["camtracker"]["motor_offset_threshold"] return (abs(elev_offset) <= threshold) and (abs(az_offeset) <= threshold) diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index e298a459..99a7cc91 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -9,12 +9,12 @@ class AbstractThreadBase(abc.ABC): An abstract base class for thread classes used in PYRA """ - def __init__(self, config: dict, logger_origin: str): + def __init__(self, config: dict, logger_origin: str) -> None: self.__thread: Optional[threading.Thread] = None self.__logger: Logger = Logger(origin=logger_origin) self.config: dict = config - def update_thread_state(self, new_config: dict): + def update_thread_state(self, new_config: dict) -> None: """ Make sure that the thread loop is (not) running, based on config.upload diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 8581d62b..f1c7006d 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -184,9 +184,7 @@ def adjust_exposure() -> None: logger.debug(f"exposure results: {exposure_results}") - new_exposure: Any = min(exposure_results, key=lambda r: abs(r["mean"] - 50))[ - "exposure" - ] + new_exposure = min(exposure_results, key=lambda r: abs(r["mean"] - 50))["exposure"] # type: ignore _Helios.update_camera_settings(exposure=new_exposure) if new_exposure != _Helios.current_exposure: @@ -219,9 +217,7 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: circle_cx, circle_cy, circle_r = ImageProcessing.get_circle_location(binary_mask) # only consider edges and make them bold - edges_only: np.ndarray = np.array( - cv.Canny(single_valued_pixels, 40, 40), dtype=np.float32 - ) + edges_only: cv.Mat = np.array(cv.Canny(single_valued_pixels, 40, 40), dtype=np.float32) edges_only_dilated: cv.Mat = cv.dilate( edges_only, cv.getStructuringElement(cv.MORPH_ELLIPSE, (5, 5)) ) @@ -289,7 +285,7 @@ class HeliosThread(AbstractThreadBase): to the StateInterface. """ - def __init__(self, config: dict): + def __init__(self, config: dict) -> None: super().__init__(config, "helios") def should_be_running(self) -> bool: diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 35eeac01..0c4ab94b 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -30,7 +30,7 @@ class DirectoryUploadClient: the actual upload process. """ - def __init__(self, date_string: str, config: dict): + def __init__(self, date_string: str, config: dict) -> None: self.connection = fabric.connection.Connection( f"{config['upload']['user']}@{config['upload']['host']}", connect_kwargs={"password": config["upload"]["password"]}, @@ -281,7 +281,7 @@ class UploadThread(AbstractThreadBase): 📁 ... """ - def __init__(self, config: dict): + def __init__(self, config: dict) -> None: super().__init__(config, "upload") def should_be_running(self) -> bool: diff --git a/packages/core/utils/decorators/with_filelock.py b/packages/core/utils/decorators/with_filelock.py index c537ac32..f5166c8c 100644 --- a/packages/core/utils/decorators/with_filelock.py +++ b/packages/core/utils/decorators/with_filelock.py @@ -25,12 +25,12 @@ class with_filelock: def __init__(self, file_lock_path: str, timeout: float = -1) -> None: - self.file_lock_path = file_lock_path - self.timeout = timeout + self.file_lock_path: str = file_lock_path + self.timeout: float = timeout def __call__(self, f: F) -> F: @wraps(f) - def wrapper(*args, **kwargs) -> Any: + def wrapper(*args: tuple[Any], **kwargs: dict[str, Any]) -> Any: with filelock.FileLock(self.file_lock_path, timeout=self.timeout): return function(*args, **kwargs) diff --git a/packages/core/utils/functions/astronomy.py b/packages/core/utils/functions/astronomy.py index 92798ba3..08b71b78 100644 --- a/packages/core/utils/functions/astronomy.py +++ b/packages/core/utils/functions/astronomy.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Any, Optional import astropy.coordinates as astropy_coordinates # type: ignore import astropy.time as astropy_time # type: ignore import astropy.units as astropy_units # type: ignore @@ -6,13 +6,15 @@ # TODO: pass config via functions instea of indirectly # more code but way simpler +# TODO: add static typic (simplify code while doing that (less astropy stuff)) + class Astronomy: CONFIG: Optional[dict] = None units = astropy_units @staticmethod - def get_current_sun_elevation(): + def get_current_sun_elevation() -> Any: """calc_sun_angle_deg(location loc): Computes and returns the current sun angle in degree, based on the location loc, computed by get_tracker_position(), and current time. Therefore, the pack- ages time and astrophy are required. @@ -54,7 +56,7 @@ def __get_location_from_camtracker_config() -> tuple[float, float, float]: return (lat, lon, alt) @staticmethod - def __get_astropy_location(): + def __get_astropy_location() -> Any: """ get_tracker_position(): Reads out the height, the longitude and the latitude of the system from CamTrackerConfig.txt, and computes the location diff --git a/packages/core/utils/functions/exception_email_client.py b/packages/core/utils/functions/exception_email_client.py index fc8b2ab1..07fbeb3d 100644 --- a/packages/core/utils/functions/exception_email_client.py +++ b/packages/core/utils/functions/exception_email_client.py @@ -10,14 +10,14 @@ PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) -def get_pyra_version(): +def get_pyra_version() -> str: with open(os.path.join(PROJECT_DIR, "packages", "ui", "package.json")) as f: pyra_version: str = json.load(f)["version"] assert pyra_version.startswith("4.") return pyra_version -def get_commit_sha(): +def get_commit_sha() -> str: commit_sha_process = subprocess.run( ["git", "rev-parse", "--verify", "HEAD", "--short"], stdout=subprocess.PIPE, @@ -29,7 +29,7 @@ def get_commit_sha(): return commit_sha -def get_current_log_lines(): +def get_current_log_lines() -> list[str]: with open(f"{PROJECT_DIR}/logs/info.log") as f: latest_log_lines = f.readlines() @@ -40,12 +40,13 @@ def get_current_log_lines(): included_iterations += 1 log_lines_in_email.append(l) if included_iterations == 2: - return log_lines_in_email[::-1] + break + return log_lines_in_email[::-1] class ExceptionEmailClient: @staticmethod - def _send_email(config: dict, text: str, html: str, subject: str): + def _send_email(config: dict, text: str, html: str, subject: str) -> None: sender_email = config["error_email"]["sender_address"] sender_password = config["error_email"]["sender_password"] recipients = config["error_email"]["recipients"].replace(" ", "").split(",") @@ -68,7 +69,7 @@ def _send_email(config: dict, text: str, html: str, subject: str): ) @staticmethod - def handle_resolved_exception(config: dict): + def handle_resolved_exception(config: dict) -> None: if not config["error_email"]["notify_recipients"]: return @@ -103,7 +104,7 @@ def handle_resolved_exception(config: dict): ExceptionEmailClient._send_email(config, text, html, subject) @staticmethod - def handle_occured_exception(config: dict, exception: Exception): + def handle_occured_exception(config: dict, exception: Exception) -> None: if not config["error_email"]["notify_recipients"]: return diff --git a/packages/core/utils/functions/image_processing.py b/packages/core/utils/functions/image_processing.py index b09db495..bdfce44f 100644 --- a/packages/core/utils/functions/image_processing.py +++ b/packages/core/utils/functions/image_processing.py @@ -28,7 +28,7 @@ def get_circle_mask( return (np.abs(np.hypot(center_x - x, center_y - y)) < radius).astype(np.uint8) @staticmethod - def moving_average(xs: list[float], n: int = 3) -> float: + def moving_average(xs: list[float], n: int = 3) -> np.ndarray: ret = np.cumsum(xs) ret[n:] = ret[n:] - ret[:-n] return ret[n - 1 :] / n diff --git a/packages/core/utils/functions/logger.py b/packages/core/utils/functions/logger.py index 1ef632b8..5577cb4f 100644 --- a/packages/core/utils/functions/logger.py +++ b/packages/core/utils/functions/logger.py @@ -15,7 +15,7 @@ # manually. Doesn't really make a performance difference -def log_line_has_time(log_line: str): +def log_line_has_time(log_line: str) -> bool: try: assert len(log_line) >= 10 datetime.strptime(log_line[:10], "%Y-%m-%d") @@ -27,7 +27,7 @@ def log_line_has_time(log_line: str): class Logger: last_archive_time = datetime.now() - def __init__(self, origin="pyra.core", just_print: bool = False): + def __init__(self, origin="pyra.core", just_print: bool = False) -> None: self.origin = origin self.just_print = just_print @@ -100,7 +100,7 @@ def archive(keep_last_hour: bool = False) -> None: if len(lines_to_be_archived) == 0: return - archive_log_date_groups = {} + archive_log_date_groups: dict[str, dict[str, list[str]]] = {} line_date = lines_to_be_archived[0][:10].replace("-", "") for line in lines_to_be_archived: if log_line_has_time(line): diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py index 18578e4a..cd672fe0 100644 --- a/packages/core/utils/interfaces/config_validation.py +++ b/packages/core/utils/interfaces/config_validation.py @@ -37,7 +37,7 @@ def get_nullable_dict_schema(s: dict) -> dict: # type: ignore return {"type": "dict", "schema": s, "nullable": True} -def get_config_file_schema(strict: boolean): +def get_config_file_schema(strict: bool) -> dict: """ Returns a cerberus schema for the config. With strict=false, the checks whether file paths or directories exist will be diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 5c057edd..c75ddac8 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -1,5 +1,5 @@ import dataclasses -from typing import Optional +from typing import Any, Optional import snap7 # type: ignore import time import os @@ -71,7 +71,7 @@ class PLCState: power: PLCPowerState connections: PLCConnectionsState - def to_dict(self): + def to_dict(self) -> dict[str, Any]: out = {} for field in dataclasses.fields(self): field_value = getattr(self, field.name) @@ -198,7 +198,7 @@ def read(self) -> PLCState: # TODO: self.plc.read_multi_vars() - plc_db_content = {} + plc_db_content: dict[int, int] = {} if self.config["tum_plc"]["version"] == 1: plc_db_size = {3: 6, 8: 26, 25: 10} else: @@ -213,12 +213,12 @@ def read(self) -> PLCState: def _get_int(spec: Optional[list[int]]) -> Optional[int]: if spec is None: return None - return snap7.util.get_int(plc_db_content[spec[0]], spec[1]) + return snap7.util.get_int(plc_db_content[spec[0]], spec[1]) # type: ignore def _get_bool(spec: Optional[list[int]]) -> Optional[bool]: if spec is None: return None - return snap7.util.get_bool(plc_db_content[spec[0]], spec[1], spec[3]) + return snap7.util.get_bool(plc_db_content[spec[0]], spec[1], spec[3]) # type: ignore s = self.specification @@ -273,12 +273,15 @@ def __sleep_while_cpu_is_busy(self) -> None: time.sleep(2) def __read_int(self, action: list[int]) -> int: - """Reads an INT value in the PLC database.""" + """ + Reads an INT value in the PLC database. + + action is tuple: db_number, start, size + """ assert len(action) == 3 - db_number, start, size = action - msg = self.plc.db_read(db_number, start, size) - value = snap7.util.get_int(msg, 0) + msg: bytearray = self.plc.db_read(*action) + value: int = snap7.util.get_int(msg, 0) self.__sleep_while_cpu_is_busy() @@ -300,8 +303,8 @@ def __read_bool(self, action: list[int]) -> bool: assert len(action) == 4 db_number, start, size, bool_index = action - msg = self.plc.db_read(db_number, start, size) - value = snap7.util.get_bool(msg, 0, bool_index) + msg: bytearray = self.plc.db_read(db_number, start, size) + value: bool = snap7.util.get_bool(msg, 0, bool_index) self.__sleep_while_cpu_is_busy() diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 618d8a30..793a488d 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -17,7 +17,7 @@ PERSISTENT_STATE_FILE_PATH = os.path.join(PROJECT_DIR, "logs", "persistent-state.json") -EMPTY_STATE_OBJECT: PersistentStateTypes.Dict = { +EMPTY_STATE_OBJECT: dict = { "helios_indicates_good_conditions": None, "measurements_should_be_running": False, "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), @@ -90,7 +90,7 @@ def read() -> dict: def read_persistent() -> PersistentStateTypes.Dict: """Read the persistent state file and return its content""" with open(PERSISTENT_STATE_FILE_PATH, "r") as f: - new_object = json.load(f) + new_object: PersistentStateTypes.Dict = json.load(f) PersistentStateTypes.validate_object(new_object) return new_object diff --git a/packages/core/utils/types/persistent_state.py b/packages/core/utils/types/persistent_state.py index d410f64a..81b7b569 100644 --- a/packages/core/utils/types/persistent_state.py +++ b/packages/core/utils/types/persistent_state.py @@ -1,4 +1,4 @@ -from typing import Optional, TypedDict +from typing import Any, Optional, TypedDict import pydantic @@ -12,7 +12,7 @@ class PartialDict(TypedDict, total=False): current_exceptions: list[str] @staticmethod - def validate_object(o: dict, partial: bool = False) -> None: + def validate_object(o: Any, partial: bool = False) -> None: """ Check, whether a given object is a correct PersistentStateDict Raises a pydantic.ValidationError if the object is invalid. diff --git a/packages/core/utils/types/upload_meta.py b/packages/core/utils/types/upload_meta.py index 3a8a8b33..8536ede7 100644 --- a/packages/core/utils/types/upload_meta.py +++ b/packages/core/utils/types/upload_meta.py @@ -1,4 +1,4 @@ -from typing import Optional, TypedDict +from typing import Any, Optional, TypedDict import pydantic @@ -19,7 +19,7 @@ class PartialDict(TypedDict, total=False): lastModifiedTime: float @staticmethod - def validate_object(o: dict, partial: bool = False) -> None: + def validate_object(o: Any, partial: bool = False) -> None: """ Check, whether a given object is a correct UploadMetaDict Raises a pydantic.ValidationError if the object is invalid. From c46575db36d8d4d3c528493d1329525469748ff1 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 17 Aug 2022 18:53:53 +0200 Subject: [PATCH 042/132] #96 (10) - fix strict typing issues --- packages/core/modules/enclosure_control.py | 4 ++-- packages/core/modules/opus_measurement.py | 3 +++ packages/core/utils/functions/logger.py | 6 +++--- packages/core/utils/interfaces/plc_interface.py | 4 ++-- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index fb7626d8..52c3f0eb 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -123,7 +123,7 @@ def run(self, new_config: dict) -> None: # PLC.ACTORS SETTERS - def move_cover(self, value) -> None: + def move_cover(self, value: int) -> None: logger.debug(f"Received request to move cover to position {value} degrees.") # rain check before moving cover. PLC will deny cover requests during rain anyway @@ -146,7 +146,7 @@ def force_cover_close(self) -> None: self.plc_interface.set_cover_angle(0) self.plc_interface.set_manual_control(False) - def wait_for_cover_closing(self, throw_error=True) -> None: + def wait_for_cover_closing(self, throw_error: bool = True) -> None: """Waits steps of 5s for the enclosure cover to close. Raises the custom error CoverError if clover doesn't close in a given diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index 53e294ed..a832ad05 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -1,10 +1,13 @@ import os import sys import time +from typing import Any from packages.core.utils import Logger, StateInterface, Astronomy # these imports are provided by pywin32 +win32ui: Any = None +dde: Any = None if sys.platform == "win32": import win32ui # type: ignore import dde # type: ignore diff --git a/packages/core/utils/functions/logger.py b/packages/core/utils/functions/logger.py index 5577cb4f..8c281d1c 100644 --- a/packages/core/utils/functions/logger.py +++ b/packages/core/utils/functions/logger.py @@ -27,9 +27,9 @@ def log_line_has_time(log_line: str) -> bool: class Logger: last_archive_time = datetime.now() - def __init__(self, origin="pyra.core", just_print: bool = False) -> None: - self.origin = origin - self.just_print = just_print + def __init__(self, origin: str = "pyra.core", just_print: bool = False) -> None: + self.origin: str = origin + self.just_print: bool = just_print def debug(self, message: str) -> None: self._write_log_line("DEBUG", message) diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index c75ddac8..17a4454c 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -105,13 +105,13 @@ class PLCError(Exception): * set_auto_temperature/_manual_temperature """ - def __init__(self, config: dict): + def __init__(self, config: dict) -> None: self.config = config self.specification = PLC_SPECIFICATION_VERSIONS[config["tum_plc"]["version"]] # CONNECTION/CLASS MANAGEMENT - def update_config(self, new_config: dict): + def update_config(self, new_config: dict) -> None: """ Update the internally used config (executed at the) beginning of enclosure-control's run-function. From 1a94c6ad1e270dc33697d0932518097ee653ea0c Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 15:56:02 +0200 Subject: [PATCH 043/132] #96 (11) - improve type structure --- packages/core/threads/upload_thread.py | 9 ++- packages/core/utils/__init__.py | 3 +- .../core/utils/interfaces/state_interface.py | 24 ++++---- packages/core/utils/types/__init__.py | 10 +++- packages/core/utils/types/persistent_state.py | 48 +++++++-------- packages/core/utils/types/state.py | 42 ++++++++++++++ packages/core/utils/types/upload_meta.py | 58 +++++++++---------- 7 files changed, 119 insertions(+), 75 deletions(-) create mode 100644 packages/core/utils/types/state.py diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 0c4ab94b..b0f17616 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -9,8 +9,7 @@ import fabric # type: ignore import re import pydantic -from packages.core.utils import ConfigInterface, Logger -from packages.core.utils.types import UploadMetaTypes +from packages.core.utils import ConfigInterface, Logger, types from .abstract_thread_base import AbstractThreadBase logger = Logger(origin="upload") @@ -51,7 +50,7 @@ def __init__(self, date_string: str, config: dict) -> None: config["upload"]["dst_directory"] ), f"remote {config['upload']['dst_directory']} is not a directory" - self.meta_content: UploadMetaTypes.Dict = { + self.meta_content: types.UploadMetaDict = { "complete": False, "fileList": [], "createdTime": round(time.time(), 3), @@ -131,12 +130,12 @@ def __fetch_meta(self) -> None: assert os.path.isfile(self.src_meta_path) with open(self.src_meta_path, "r") as f: new_meta_content = json.load(f) - UploadMetaTypes.validate_object(new_meta_content) + types.validate_upload_meta_dict(new_meta_content) self.meta_content = new_meta_content except (AssertionError, json.JSONDecodeError, pydantic.ValidationError) as e: raise InvalidUploadState(str(e)) - def __update_meta(self, new_meta_content_partial: UploadMetaTypes.PartialDict) -> None: + def __update_meta(self, new_meta_content_partial: types.UploadMetaDictPartial) -> None: """ Update the local upload-meta.json file and overwrite the meta file on the server diff --git a/packages/core/utils/__init__.py b/packages/core/utils/__init__.py index 051138b6..9bcca9be 100644 --- a/packages/core/utils/__init__.py +++ b/packages/core/utils/__init__.py @@ -13,5 +13,4 @@ from .interfaces import PLCInterface from .interfaces import OSInterface -from .types import UploadMetaTypes -from .types import PersistentStateTypes +from . import types diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index 793a488d..fe094525 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -1,7 +1,7 @@ import json import os import shutil -from packages.core.utils import with_filelock, update_dict_recursively, PersistentStateTypes +from packages.core.utils import with_filelock, update_dict_recursively, types from .plc_interface import EMPTY_PLC_STATE dir = os.path.dirname @@ -17,7 +17,7 @@ PERSISTENT_STATE_FILE_PATH = os.path.join(PROJECT_DIR, "logs", "persistent-state.json") -EMPTY_STATE_OBJECT: dict = { +EMPTY_STATE_OBJECT: types.StateDict = { "helios_indicates_good_conditions": None, "measurements_should_be_running": False, "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), @@ -29,7 +29,7 @@ }, } -EMPTY_PERSISTENT_STATE_OBJECT: PersistentStateTypes.PartialDict = { +EMPTY_PERSISTENT_STATE_OBJECT: types.PersistentStateDict = { "active_opus_macro_id": None, "current_exceptions": [], } @@ -78,25 +78,25 @@ def initialize() -> None: @staticmethod @with_filelock(STATE_LOCK_PATH) - def read() -> dict: + def read() -> types.StateDict: """Read the state file and return its content""" with open(STATE_FILE_PATH, "r") as f: - new_object = json.load(f) - # TODO: PersistentStateTypes.validate_object(new_object) + new_object: types.StateDict = json.load(f) + types.validate_state_dict(new_object) return new_object @staticmethod @with_filelock(STATE_LOCK_PATH) - def read_persistent() -> PersistentStateTypes.Dict: + def read_persistent() -> types.PersistentStateDict: """Read the persistent state file and return its content""" with open(PERSISTENT_STATE_FILE_PATH, "r") as f: - new_object: PersistentStateTypes.Dict = json.load(f) - PersistentStateTypes.validate_object(new_object) + new_object: types.PersistentStateDict = json.load(f) + types.validate_persistent_state_dict(new_object) return new_object @staticmethod @with_filelock(STATE_LOCK_PATH) - def update(update: dict) -> None: + def update(update: types.StateDictPartial) -> None: """ Update the (persistent) state file and return its content. The update object should only include the properties to be @@ -112,7 +112,7 @@ def update(update: dict) -> None: @staticmethod @with_filelock(STATE_LOCK_PATH) - def update_persistent(update: PersistentStateTypes.PartialDict) -> None: + def update_persistent(update: types.PersistentStateDictPartial) -> None: """ Update the (persistent) state file and return its content. The update object should only include the properties to be @@ -121,7 +121,7 @@ def update_persistent(update: PersistentStateTypes.PartialDict) -> None: with open(PERSISTENT_STATE_FILE_PATH, "r") as f: current_state = json.load(f) - PersistentStateTypes.validate_object(current_state) + types.validate_persistent_state_dict(current_state) new_state = update_dict_recursively(current_state, update) with open(PERSISTENT_STATE_FILE_PATH, "w") as f: diff --git a/packages/core/utils/types/__init__.py b/packages/core/utils/types/__init__.py index bbc98682..4cb02c7e 100644 --- a/packages/core/utils/types/__init__.py +++ b/packages/core/utils/types/__init__.py @@ -1,2 +1,8 @@ -from .upload_meta import UploadMetaTypes -from .persistent_state import PersistentStateTypes +from .upload_meta import UploadMetaDict, UploadMetaDictPartial +from .upload_meta import validate_upload_meta_dict + +from .persistent_state import PersistentStateDict, PersistentStateDictPartial +from .persistent_state import validate_persistent_state_dict + +from .state import StateDict, StateDictPartial +from .state import validate_state_dict diff --git a/packages/core/utils/types/persistent_state.py b/packages/core/utils/types/persistent_state.py index 81b7b569..10d261e5 100644 --- a/packages/core/utils/types/persistent_state.py +++ b/packages/core/utils/types/persistent_state.py @@ -2,30 +2,30 @@ import pydantic -class PersistentStateTypes: - class Dict(TypedDict): - active_opus_macro_id: Optional[int] - current_exceptions: list[str] - - class PartialDict(TypedDict, total=False): - active_opus_macro_id: Optional[int] - current_exceptions: list[str] - - @staticmethod - def validate_object(o: Any, partial: bool = False) -> None: - """ - Check, whether a given object is a correct PersistentStateDict - Raises a pydantic.ValidationError if the object is invalid. - - This should always be used when loading the object from a - JSON file! - """ - if partial: - _ValidationModel(partial=o) - else: - _ValidationModel(regular=o) +class PersistentStateDict(TypedDict): + active_opus_macro_id: Optional[int] + current_exceptions: list[str] + + +class PersistentStateDictPartial(TypedDict, total=False): + active_opus_macro_id: Optional[int] + current_exceptions: list[str] + + +def validate_persistent_state_dict(o: Any, partial: bool = False) -> None: + """ + Check, whether a given object is a correct PersistentStateDict + Raises a pydantic.ValidationError if the object is invalid. + + This should always be used when loading the object from a + JSON file! + """ + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) class _ValidationModel(pydantic.BaseModel): - regular: Optional[PersistentStateTypes.Dict] - partial: Optional[PersistentStateTypes.PartialDict] + regular: Optional[PersistentStateDict] + partial: Optional[PersistentStateDictPartial] diff --git a/packages/core/utils/types/state.py b/packages/core/utils/types/state.py new file mode 100644 index 00000000..58403d07 --- /dev/null +++ b/packages/core/utils/types/state.py @@ -0,0 +1,42 @@ +from typing import Any, Optional, TypedDict +import pydantic + + +class _OSStateDict(TypedDict): + cpu_usage: Optional[list[float]] + memory_usage: Optional[float] + last_boot_time: Optional[str] + filled_disk_space_fraction: Optional[float] + + +class StateDict(TypedDict): + helios_indicates_good_conditions: Optional[int] + measurements_should_be_running: bool + enclosure_plc_readings: Any + os_state: _OSStateDict + + +class StateDictPartial(TypedDict, total=False): + helios_indicates_good_conditions: Optional[int] + measurements_should_be_running: bool + enclosure_plc_readings: Any + os_state: _OSStateDict + + +def validate_state_dict(o: Any, partial: bool = False) -> None: + """ + Check, whether a given object is a correct StateDict + Raises a pydantic.ValidationError if the object is invalid. + + This should always be used when loading the object from a + JSON file! + """ + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) + + +class _ValidationModel(pydantic.BaseModel): + regular: Optional[StateDict] + partial: Optional[StateDictPartial] diff --git a/packages/core/utils/types/upload_meta.py b/packages/core/utils/types/upload_meta.py index 8536ede7..2212119d 100644 --- a/packages/core/utils/types/upload_meta.py +++ b/packages/core/utils/types/upload_meta.py @@ -3,36 +3,34 @@ import pydantic -class UploadMetaTypes: - @staticmethod - class Dict(TypedDict): - complete: bool - fileList: list[str] - createdTime: float - lastModifiedTime: float - - @staticmethod - class PartialDict(TypedDict, total=False): - complete: bool - fileList: list[str] - createdTime: float - lastModifiedTime: float - - @staticmethod - def validate_object(o: Any, partial: bool = False) -> None: - """ - Check, whether a given object is a correct UploadMetaDict - Raises a pydantic.ValidationError if the object is invalid. - - This should always be used when loading the object from a - JSON file! - """ - if partial: - _ValidationModel(partial=o) - else: - _ValidationModel(regular=o) +class UploadMetaDict(TypedDict): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + +class UploadMetaDictPartial(TypedDict, total=False): + complete: bool + fileList: list[str] + createdTime: float + lastModifiedTime: float + + +def validate_upload_meta_dict(o: Any, partial: bool = False) -> None: + """ + Check, whether a given object is a correct UploadMetaDict + Raises a pydantic.ValidationError if the object is invalid. + + This should always be used when loading the object from a + JSON file! + """ + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) class _ValidationModel(pydantic.BaseModel): - regular: Optional[UploadMetaTypes.Dict] - partial: Optional[UploadMetaTypes.PartialDict] + regular: Optional[UploadMetaDict] + partial: Optional[UploadMetaDictPartial] From e1d5d1cd64f29528b66e9cb2479bc17d7a15cc16 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 20:34:51 +0200 Subject: [PATCH 044/132] #96 (12) - fix strict typing issues --- packages/core/modules/measurement_conditions.py | 6 ++++-- packages/core/utils/types/state.py | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index e8febdab..e07f3e2f 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -5,7 +5,9 @@ # TODO: add type annotation -def get_times_from_tuples(triggers) -> tuple[datetime.time, datetime.time, datetime.time]: +def get_times_from_tuples( + triggers: dict, +) -> tuple[datetime.time, datetime.time, datetime.time]: now = datetime.datetime.now() current_time = datetime.time(now.hour, now.minute, now.second) start_time = datetime.time(**triggers["start_time"]) @@ -14,7 +16,7 @@ def get_times_from_tuples(triggers) -> tuple[datetime.time, datetime.time, datet class MeasurementConditions: - def __init__(self, initial_config: dict): + def __init__(self, initial_config: dict) -> None: self._CONFIG = initial_config def run(self, new_config: dict) -> None: diff --git a/packages/core/utils/types/state.py b/packages/core/utils/types/state.py index 58403d07..2537be3d 100644 --- a/packages/core/utils/types/state.py +++ b/packages/core/utils/types/state.py @@ -10,16 +10,16 @@ class _OSStateDict(TypedDict): class StateDict(TypedDict): - helios_indicates_good_conditions: Optional[int] + helios_indicates_good_conditions: Optional[bool] measurements_should_be_running: bool - enclosure_plc_readings: Any + enclosure_plc_readings: dict os_state: _OSStateDict class StateDictPartial(TypedDict, total=False): helios_indicates_good_conditions: Optional[int] measurements_should_be_running: bool - enclosure_plc_readings: Any + enclosure_plc_readings: dict os_state: _OSStateDict From 73ab1f2cb252fa49344b972aec0901c3aa31eb5a Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 20:42:21 +0200 Subject: [PATCH 045/132] #96 (13) - add config type with first level keys --- packages/core/utils/types/__init__.py | 7 +++-- packages/core/utils/types/config.py | 45 +++++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 2 deletions(-) create mode 100644 packages/core/utils/types/config.py diff --git a/packages/core/utils/types/__init__.py b/packages/core/utils/types/__init__.py index 4cb02c7e..9a4204f8 100644 --- a/packages/core/utils/types/__init__.py +++ b/packages/core/utils/types/__init__.py @@ -1,8 +1,11 @@ -from .upload_meta import UploadMetaDict, UploadMetaDictPartial -from .upload_meta import validate_upload_meta_dict +from .config import ConfigDict, ConfigDictPartial +from .config import validate_config_dict from .persistent_state import PersistentStateDict, PersistentStateDictPartial from .persistent_state import validate_persistent_state_dict from .state import StateDict, StateDictPartial from .state import validate_state_dict + +from .upload_meta import UploadMetaDict, UploadMetaDictPartial +from .upload_meta import validate_upload_meta_dict diff --git a/packages/core/utils/types/config.py b/packages/core/utils/types/config.py new file mode 100644 index 00000000..4f2b9961 --- /dev/null +++ b/packages/core/utils/types/config.py @@ -0,0 +1,45 @@ +from typing import Any, Optional, TypedDict +import pydantic + + +class ConfigDict(TypedDict): + general: dict + opus: dict + camtracker: dict + error_email: dict + measurement_decision: dict + measurement_triggers: dict + tum_plc: Optional[dict] + helios: Optional[dict] + upload: Optional[dict] + + +class ConfigDictPartial(TypedDict, total=False): + general: dict + opus: dict + camtracker: dict + error_email: dict + measurement_decision: dict + measurement_triggers: dict + tum_plc: Optional[dict] + helios: Optional[dict] + upload: Optional[dict] + + +def validate_config_dict(o: Any, partial: bool = False) -> None: + """ + Check, whether a given object is a correct ConfigDict + Raises a pydantic.ValidationError if the object is invalid. + + This should always be used when loading the object from a + JSON file! + """ + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) + + +class _ValidationModel(pydantic.BaseModel): + regular: Optional[ConfigDict] + partial: Optional[ConfigDictPartial] From 11b785c13adfd9fe6925a632e2c8de3c16f16027 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 20:51:55 +0200 Subject: [PATCH 046/132] #96 (14) - use config type everywhere --- packages/core/main.py | 3 ++- packages/core/modules/enclosure_control.py | 6 +++--- .../core/modules/measurement_conditions.py | 21 +++++++++---------- packages/core/modules/opus_measurement.py | 6 +++--- packages/core/modules/sun_tracking.py | 6 +++--- packages/core/modules/system_checks.py | 6 +++--- packages/core/threads/abstract_thread_base.py | 8 +++---- packages/core/threads/helios_thread.py | 3 ++- packages/core/threads/upload_thread.py | 4 ++-- packages/core/utils/functions/astronomy.py | 3 ++- .../utils/functions/exception_email_client.py | 7 ++++--- .../core/utils/interfaces/config_interface.py | 4 ++-- .../core/utils/interfaces/plc_interface.py | 4 ++-- 13 files changed, 42 insertions(+), 39 deletions(-) diff --git a/packages/core/main.py b/packages/core/main.py index d287cf33..856acfcd 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -7,13 +7,14 @@ StateInterface, Logger, ExceptionEmailClient, + types, ) logger = Logger(origin="main") def update_exception_state( - config: dict, current_exceptions: list[str], new_exception: Optional[Exception] + config: types.ConfigDict, current_exceptions: list[str], new_exception: Optional[Exception] ) -> list[str]: """ Take a list of current_exceptions (all exceptions that are diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 52c3f0eb..03b36b05 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -1,6 +1,6 @@ import time from snap7.exceptions import Snap7Exception # type: ignore -from packages.core.utils import StateInterface, Logger, Astronomy, PLCInterface +from packages.core.utils import StateInterface, Logger, Astronomy, PLCInterface, types logger = Logger(origin="enclosure-control") @@ -18,7 +18,7 @@ class CoverError(Exception): class MotorFailedError(Exception): pass - def __init__(self, initial_config: dict): + def __init__(self, initial_config: types.ConfigDict): self.config = initial_config self.initialized = False self.last_plc_connection_time = time.time() @@ -38,7 +38,7 @@ def __initialize(self) -> None: self.last_cycle_automation_status = 0 self.initialized = True - def run(self, new_config: dict) -> None: + def run(self, new_config: types.ConfigDict) -> None: self.config = new_config if self.config["tum_plc"] is None: diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index e07f3e2f..e37d4c69 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -1,25 +1,25 @@ import datetime -from packages.core.utils import Astronomy, StateInterface, Logger +from packages.core.utils import Astronomy, StateInterface, Logger, types logger = Logger(origin="measurement-conditions") # TODO: add type annotation -def get_times_from_tuples( - triggers: dict, -) -> tuple[datetime.time, datetime.time, datetime.time]: +def is_time_trigger_active( + config: types.ConfigDict, +) -> bool: now = datetime.datetime.now() current_time = datetime.time(now.hour, now.minute, now.second) - start_time = datetime.time(**triggers["start_time"]) - end_time = datetime.time(**triggers["stop_time"]) - return current_time, start_time, end_time + start_time = datetime.time(**config["measurement_triggers"]["start_time"]) + end_time = datetime.time(**config["measurement_triggers"]["stop_time"]) + return (current_time > start_time) and (current_time < end_time) class MeasurementConditions: - def __init__(self, initial_config: dict) -> None: + def __init__(self, initial_config: types.ConfigDict) -> None: self._CONFIG = initial_config - def run(self, new_config: dict) -> None: + def run(self, new_config: types.ConfigDict) -> None: self._CONFIG = new_config if self._CONFIG["general"]["test_mode"]: logger.debug("Skipping MeasurementConditions in test mode") @@ -82,8 +82,7 @@ def _get_automatic_decision(self) -> bool: if triggers["consider_time"]: logger.info("Time as a trigger is considered.") - current_time, start_time, end_time = get_times_from_tuples(triggers) - time_is_valid = (current_time > start_time) and (current_time < end_time) + time_is_valid = is_time_trigger_active(self._CONFIG) logger.debug(f"Time conditions are {'' if time_is_valid else 'not '}fulfilled.") if not time_is_valid: return False diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index a832ad05..bb146d92 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -2,7 +2,7 @@ import sys import time from typing import Any -from packages.core.utils import Logger, StateInterface, Astronomy +from packages.core.utils import Logger, StateInterface, Astronomy, types # these imports are provided by pywin32 @@ -22,7 +22,7 @@ class OpusMeasurement: established DDE connection. """ - def __init__(self, initial_config: dict): + def __init__(self, initial_config: types.ConfigDict): self._CONFIG = initial_config self.initialized = False self.current_experiment = self._CONFIG["opus"]["experiment_path"] @@ -41,7 +41,7 @@ def __initialize(self) -> None: self.last_cycle_automation_status = 0 self.initialized = True - def run(self, new_config: dict) -> None: + def run(self, new_config: types.ConfigDict) -> None: self._CONFIG = new_config if self._CONFIG["general"]["test_mode"] or (sys.platform != "win32"): logger.debug("Skipping OpusMeasurement in test mode and on non-windows systems") diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index d9b9d732..354a8340 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -8,20 +8,20 @@ from typing import Any import jdcal # type: ignore import datetime -from packages.core.utils import StateInterface, Logger, OSInterface +from packages.core.utils import StateInterface, Logger, OSInterface, types logger = Logger(origin="sun-tracking") class SunTracking: - def __init__(self, initial_config: dict): + def __init__(self, initial_config: types.ConfigDict): self._CONFIG = initial_config self.last_start_time = time.time() if self._CONFIG["general"]["test_mode"]: return - def run(self, new_config: dict) -> None: + def run(self, new_config: types.ConfigDict) -> None: self._CONFIG = new_config if self._CONFIG["general"]["test_mode"]: logger.debug("Skipping SunTracking in test mode") diff --git a/packages/core/modules/system_checks.py b/packages/core/modules/system_checks.py index 7b2a61ac..8e721db2 100644 --- a/packages/core/modules/system_checks.py +++ b/packages/core/modules/system_checks.py @@ -1,13 +1,13 @@ -from packages.core.utils import Logger, OSInterface, StateInterface +from packages.core.utils import Logger, OSInterface, StateInterface, types logger = Logger(origin="system-checks") class SystemChecks: - def __init__(self, initial_config: dict): + def __init__(self, initial_config: types.ConfigDict): self._CONFIG = initial_config - def run(self, new_config: dict) -> None: + def run(self, new_config: types.ConfigDict) -> None: self._CONFIG = new_config logger.info("Running SystemChecks") diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index 99a7cc91..aa264188 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -1,7 +1,7 @@ import abc import threading from typing import Optional -from packages.core.utils.functions.logger import Logger +from packages.core.utils import Logger, types class AbstractThreadBase(abc.ABC): @@ -9,12 +9,12 @@ class AbstractThreadBase(abc.ABC): An abstract base class for thread classes used in PYRA """ - def __init__(self, config: dict, logger_origin: str) -> None: + def __init__(self, config: types.ConfigDict, logger_origin: str) -> None: self.__thread: Optional[threading.Thread] = None self.__logger: Logger = Logger(origin=logger_origin) - self.config: dict = config + self.config: types.ConfigDict = config - def update_thread_state(self, new_config: dict) -> None: + def update_thread_state(self, new_config: types.ConfigDict) -> None: """ Make sure that the thread loop is (not) running, based on config.upload diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index f1c7006d..b434f8ee 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -11,6 +11,7 @@ RingList, Astronomy, ImageProcessing, + types, ) from .abstract_thread_base import AbstractThreadBase @@ -285,7 +286,7 @@ class HeliosThread(AbstractThreadBase): to the StateInterface. """ - def __init__(self, config: dict) -> None: + def __init__(self, config: types.ConfigDict) -> None: super().__init__(config, "helios") def should_be_running(self) -> bool: diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index b0f17616..d5285119 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -29,7 +29,7 @@ class DirectoryUploadClient: the actual upload process. """ - def __init__(self, date_string: str, config: dict) -> None: + def __init__(self, date_string: str, config: types.ConfigDict) -> None: self.connection = fabric.connection.Connection( f"{config['upload']['user']}@{config['upload']['host']}", connect_kwargs={"password": config["upload"]["password"]}, @@ -280,7 +280,7 @@ class UploadThread(AbstractThreadBase): 📁 ... """ - def __init__(self, config: dict) -> None: + def __init__(self, config: types.ConfigDict) -> None: super().__init__(config, "upload") def should_be_running(self) -> bool: diff --git a/packages/core/utils/functions/astronomy.py b/packages/core/utils/functions/astronomy.py index 08b71b78..5fab6748 100644 --- a/packages/core/utils/functions/astronomy.py +++ b/packages/core/utils/functions/astronomy.py @@ -2,6 +2,7 @@ import astropy.coordinates as astropy_coordinates # type: ignore import astropy.time as astropy_time # type: ignore import astropy.units as astropy_units # type: ignore +from .. import types # TODO: pass config via functions instea of indirectly # more code but way simpler @@ -10,7 +11,7 @@ class Astronomy: - CONFIG: Optional[dict] = None + CONFIG: Optional[types.ConfigDict] = None units = astropy_units @staticmethod diff --git a/packages/core/utils/functions/exception_email_client.py b/packages/core/utils/functions/exception_email_client.py index 07fbeb3d..4b5f62fc 100644 --- a/packages/core/utils/functions/exception_email_client.py +++ b/packages/core/utils/functions/exception_email_client.py @@ -5,6 +5,7 @@ from email.mime.multipart import MIMEMultipart import subprocess import traceback +from .. import types dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) @@ -46,7 +47,7 @@ def get_current_log_lines() -> list[str]: class ExceptionEmailClient: @staticmethod - def _send_email(config: dict, text: str, html: str, subject: str) -> None: + def _send_email(config: types.ConfigDict, text: str, html: str, subject: str) -> None: sender_email = config["error_email"]["sender_address"] sender_password = config["error_email"]["sender_password"] recipients = config["error_email"]["recipients"].replace(" ", "").split(",") @@ -69,7 +70,7 @@ def _send_email(config: dict, text: str, html: str, subject: str) -> None: ) @staticmethod - def handle_resolved_exception(config: dict) -> None: + def handle_resolved_exception(config: types.ConfigDict) -> None: if not config["error_email"]["notify_recipients"]: return @@ -104,7 +105,7 @@ def handle_resolved_exception(config: dict) -> None: ExceptionEmailClient._send_email(config, text, html, subject) @staticmethod - def handle_occured_exception(config: dict, exception: Exception) -> None: + def handle_occured_exception(config: types.ConfigDict, exception: Exception) -> None: if not config["error_email"]["notify_recipients"]: return diff --git a/packages/core/utils/interfaces/config_interface.py b/packages/core/utils/interfaces/config_interface.py index 4384bc72..34980cc8 100644 --- a/packages/core/utils/interfaces/config_interface.py +++ b/packages/core/utils/interfaces/config_interface.py @@ -1,6 +1,6 @@ import json import os -from packages.core.utils import Astronomy, with_filelock +from packages.core.utils import Astronomy, with_filelock, types from .config_validation import ConfigValidation dir = os.path.dirname @@ -13,7 +13,7 @@ class ConfigInterface: @staticmethod @with_filelock(CONFIG_LOCK_PATH) - def read() -> dict: + def read() -> types.ConfigDict: """ Read the contents of the current config.json file. The function will validate its integrity and raises diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 17a4454c..3cf7b74b 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -105,13 +105,13 @@ class PLCError(Exception): * set_auto_temperature/_manual_temperature """ - def __init__(self, config: dict) -> None: + def __init__(self, config: types.ConfigDict) -> None: self.config = config self.specification = PLC_SPECIFICATION_VERSIONS[config["tum_plc"]["version"]] # CONNECTION/CLASS MANAGEMENT - def update_config(self, new_config: dict) -> None: + def update_config(self, new_config: types.ConfigDict) -> None: """ Update the internally used config (executed at the) beginning of enclosure-control's run-function. From 3d58de03a4bd568ee391a968ffadddc4d1d517c7 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 22:01:54 +0200 Subject: [PATCH 047/132] #96 (14) - add all attributes to config type --- .../core/utils/interfaces/plc_interface.py | 2 +- packages/core/utils/types/config.py | 186 ++++++++++++++++-- 2 files changed, 168 insertions(+), 20 deletions(-) diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 3cf7b74b..9cacc6e6 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -4,7 +4,7 @@ import time import os from snap7.exceptions import Snap7Exception # type: ignore -from packages.core.utils import Logger, StateInterface +from packages.core.utils import Logger, StateInterface, types from .plc_specification import PLC_SPECIFICATION_VERSIONS logger = Logger(origin="plc-interface") diff --git a/packages/core/utils/types/config.py b/packages/core/utils/types/config.py index 4f2b9961..93acb0dd 100644 --- a/packages/core/utils/types/config.py +++ b/packages/core/utils/types/config.py @@ -1,29 +1,177 @@ -from typing import Any, Optional, TypedDict +from typing import Any, Literal, Optional, TypedDict import pydantic +_TimeDict = TypedDict("start_time", {"hour": int, "minute": int, "second": int}) +_TimeDictPartial = TypedDict( + "start_time", {"hour": int, "minute": int, "second": int}, total=False +) + + +class _ConfigSubDicts: + @staticmethod + class General(TypedDict): + seconds_per_core_interval: float + test_mode: bool + station_id: str + min_sun_elevation: float + + @staticmethod + class GeneralPartial(TypedDict, total=False): + seconds_per_core_interval: float + test_mode: bool + station_id: str + min_sun_elevation: float + + @staticmethod + class Opus(TypedDict): + em27_ip: str + executable_path: str + experiment_path: str + macro_path: str + username: str + password: str + + @staticmethod + class OpusPartial(TypedDict, total=False): + em27_ip: str + executable_path: str + experiment_path: str + macro_path: str + username: str + password: str + + @staticmethod + class Camtracker(TypedDict): + config_path: str + executable_path: str + learn_az_elev_path: str + sun_intensity_path: str + motor_offset_threshold: str + + @staticmethod + class CamtrackerPartial(TypedDict, total=False): + config_path: str + executable_path: str + learn_az_elev_path: str + sun_intensity_path: str + motor_offset_threshold: str + + @staticmethod + class ErrorEmail(TypedDict): + sender_address: str + sender_password: str + notify_recipients: bool + recipients: str + + @staticmethod + class ErrorEmailPartial(TypedDict, total=False): + sender_address: str + sender_password: str + notify_recipients: bool + recipients: str + + @staticmethod + class MeasurementDecision(TypedDict): + mode: Literal["automatic", "manual", "cli"] + manual_decision_result: bool + cli_decision_result: bool + + @staticmethod + class MeasurementDecisionPartial(TypedDict, total=False): + mode: Literal["automatic", "manual", "cli"] + manual_decision_result: bool + cli_decision_result: bool + + @staticmethod + class MeasurementTriggers(TypedDict): + consider_time: bool + consider_sun_elevation: bool + consider_helios: bool + start_time: _TimeDict + stop_time: _TimeDict + min_sun_elevation: float + + @staticmethod + class MeasurementTriggersPartial(TypedDict, total=False): + consider_time: bool + consider_sun_elevation: bool + consider_helios: bool + start_time: _TimeDict + stop_time: _TimeDict + min_sun_elevation: float + + @staticmethod + class TumPlc(TypedDict): + ip: str + version: Literal[0, 1] + controlled_by_user: bool + + @staticmethod + class TumPlcPartial(TypedDict, total=False): + ip: str + version: Literal[0, 1] + controlled_by_user: bool + + @staticmethod + class Helios(TypedDict): + camera_id: int + evaluation_size: int + seconds_per_interval: float + measurement_threshold: float + save_images: bool + + @staticmethod + class HeliosPartial(TypedDict, total=False): + camera_id: int + evaluation_size: int + seconds_per_interval: float + measurement_threshold: float + save_images: bool + + @staticmethod + class Upload(TypedDict): + is_active: bool + host: str + user: str + password: str + src_directory: str + dst_directory: str + remove_src_after_upload: bool + + @staticmethod + class UploadPartial(TypedDict, total=False): + is_active: bool + host: str + user: str + password: str + src_directory: str + dst_directory: str + remove_src_after_upload: bool + + class ConfigDict(TypedDict): - general: dict - opus: dict - camtracker: dict - error_email: dict - measurement_decision: dict - measurement_triggers: dict - tum_plc: Optional[dict] - helios: Optional[dict] - upload: Optional[dict] + general: _ConfigSubDicts.General + opus: _ConfigSubDicts.Opus + camtracker: _ConfigSubDicts.Camtracker + error_email: _ConfigSubDicts.ErrorEmail + measurement_decision: _ConfigSubDicts.MeasurementDecision + measurement_triggers: _ConfigSubDicts.MeasurementTriggers + tum_plc: Optional[_ConfigSubDicts.TumPlc] + helios: Optional[_ConfigSubDicts.Helios] + upload: Optional[_ConfigSubDicts.Upload] class ConfigDictPartial(TypedDict, total=False): - general: dict - opus: dict - camtracker: dict - error_email: dict - measurement_decision: dict - measurement_triggers: dict - tum_plc: Optional[dict] - helios: Optional[dict] - upload: Optional[dict] + general: _ConfigSubDicts.GeneralPartial + opus: _ConfigSubDicts.OpusPartial + camtracker: _ConfigSubDicts.CamtrackerPartial + error_email: _ConfigSubDicts.ErrorEmailPartial + measurement_decision: _ConfigSubDicts.MeasurementDecisionPartial + measurement_triggers: _ConfigSubDicts.MeasurementTriggersPartial + tum_plc: Optional[_ConfigSubDicts.TumPlcPartial] + helios: Optional[_ConfigSubDicts.HeliosPartial] + upload: Optional[_ConfigSubDicts.UploadPartial] def validate_config_dict(o: Any, partial: bool = False) -> None: From db539b1b5af37482d9e6f36aa698d2e608890560 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 22:25:17 +0200 Subject: [PATCH 048/132] #96 (15) - fix strict typing issues --- packages/core/modules/enclosure_control.py | 10 +++++-- packages/core/modules/opus_measurement.py | 2 +- packages/core/modules/sun_tracking.py | 4 +-- packages/core/threads/helios_thread.py | 11 +++++--- packages/core/threads/upload_thread.py | 16 ++++++----- .../core/utils/functions/image_processing.py | 5 ++-- .../core/utils/interfaces/config_interface.py | 9 ++++--- .../core/utils/interfaces/plc_interface.py | 27 +++++++++---------- .../utils/interfaces/plc_specification.py | 4 +-- packages/core/utils/types/config.py | 22 +++++++-------- 10 files changed, 64 insertions(+), 46 deletions(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 03b36b05..4470a896 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -30,7 +30,11 @@ def __init__(self, initial_config: types.ConfigDict): return def __initialize(self) -> None: - self.plc_interface = PLCInterface(self.config) + assert self.config["tum_plc"] is not None + + self.plc_interface = PLCInterface( + self.config["tum_plc"]["version"], self.config["tum_plc"]["ip"] + ) self.plc_interface.connect() self.plc_interface.set_auto_temperature(True) self.plc_state = self.plc_interface.read() @@ -55,7 +59,9 @@ def run(self, new_config: types.ConfigDict) -> None: if not self.initialized: self.__initialize() else: - self.plc_interface.update_config(self.config) + self.plc_interface.update_config( + self.config["tum_plc"]["version"], self.config["tum_plc"]["ip"] + ) self.plc_interface.connect() # TODO: possibly end function if plc is not connected diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index bb146d92..898dfff2 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -223,7 +223,7 @@ def __is_em27_responsive(self) -> bool: False -> Not Connected""" assert sys.platform == "win32" - response = os.system("ping -n 1 " + self._CONFIG["em27"]["ip"]) + response = os.system("ping -n 1 " + self._CONFIG["opus"]["em27_ip"]) return response == 0 def start_opus(self) -> None: diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index 354a8340..361a5df6 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -180,8 +180,8 @@ def validate_tracker_position(self) -> bool: # fails if file integrity is broken tracker_status = self.read_ct_log_learn_az_elev() - elev_offset = tracker_status[3] - az_offeset = tracker_status[4] + elev_offset: float = tracker_status[3] + az_offeset: float = tracker_status[4] threshold: float = self._CONFIG["camtracker"]["motor_offset_threshold"] return (abs(elev_offset) <= threshold) and (abs(az_offeset) <= threshold) diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index b434f8ee..28cfc67a 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -309,19 +309,24 @@ def main(self, infinite_loop: bool = True, headless: bool = False) -> None: _CONFIG = ConfigInterface.read() self.config = _CONFIG + # Check for termination + if (_CONFIG["helios"] is None) or (not self.should_be_running()): + return + status_history = RingList(_CONFIG["helios"]["evaluation_size"]) current_state = None repeated_camera_error_count = 0 while True: + start_time = time.time() + _CONFIG = ConfigInterface.read() + # Check for termination - if not self.should_be_running(): + if (_CONFIG["helios"] is None) or (not self.should_be_running()): return try: - start_time = time.time() - _CONFIG = ConfigInterface.read() # init camera connection if _Helios.cam is None: diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index d5285119..df745a9e 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -30,6 +30,8 @@ class DirectoryUploadClient: """ def __init__(self, date_string: str, config: types.ConfigDict) -> None: + assert config["upload"] is not None + self.connection = fabric.connection.Connection( f"{config['upload']['user']}@{config['upload']['host']}", connect_kwargs={"password": config["upload"]["password"]}, @@ -37,7 +39,7 @@ def __init__(self, date_string: str, config: types.ConfigDict) -> None: ) self.transfer_process = fabric.transfer.Transfer(self.connection) - self.config = config + self.upload_config = config["upload"] self.date_string = date_string self.src_dir_path = os.path.join(config["upload"]["src_directory"], date_string) @@ -83,7 +85,7 @@ def __get_remote_directory_checksum(self) -> str: """ local_script_path = os.path.join(PROJECT_DIR, "scripts", "get_upload_dir_checksum.py") remote_script_path = ( - self.config["upload"]["src_directory"] + "/get_upload_dir_checksum.py" + self.upload_config["src_directory"] + "/get_upload_dir_checksum.py" ) self.transfer_process.put(local_script_path, remote_script_path) @@ -294,11 +296,13 @@ def should_be_running(self) -> bool: def main(self) -> None: """Main entrypoint of the thread""" while True: - config = ConfigInterface.read() - self.config = config + self.config = ConfigInterface.read() + + if self.config["upload"] is None: + return src_dates_strings = DirectoryUploadClient.get_directories_to_be_uploaded( - config["upload"]["src_directory"] + self.config["upload"]["src_directory"] ) for src_date_string in src_dates_strings: @@ -307,7 +311,7 @@ def main(self) -> None: return try: - client = DirectoryUploadClient(src_date_string, config) + client = DirectoryUploadClient(src_date_string, self.config) client.run() except TimeoutError as e: logger.error(f"could not reach host (uploading {src_date_string}): {e}") diff --git a/packages/core/utils/functions/image_processing.py b/packages/core/utils/functions/image_processing.py index bdfce44f..16f3aff8 100644 --- a/packages/core/utils/functions/image_processing.py +++ b/packages/core/utils/functions/image_processing.py @@ -1,3 +1,4 @@ +from typing import Iterable import cv2 as cv # type: ignore import numpy as np @@ -28,10 +29,10 @@ def get_circle_mask( return (np.abs(np.hypot(center_x - x, center_y - y)) < radius).astype(np.uint8) @staticmethod - def moving_average(xs: list[float], n: int = 3) -> np.ndarray: + def moving_average(xs: list[float], n: int = 3) -> list[float]: ret = np.cumsum(xs) ret[n:] = ret[n:] - ret[:-n] - return ret[n - 1 :] / n + return list(ret[n - 1 :] / n) @staticmethod def get_binary_mask(frame: cv.Mat) -> cv.Mat: diff --git a/packages/core/utils/interfaces/config_interface.py b/packages/core/utils/interfaces/config_interface.py index 34980cc8..0030dc41 100644 --- a/packages/core/utils/interfaces/config_interface.py +++ b/packages/core/utils/interfaces/config_interface.py @@ -1,5 +1,6 @@ import json import os +from typing import Any from packages.core.utils import Astronomy, with_filelock, types from .config_validation import ConfigValidation @@ -22,7 +23,9 @@ def read() -> types.ConfigDict: file_is_valid, validation_exception = ConfigValidation.check_current_config_file() assert file_is_valid, str(validation_exception) with open(CONFIG_FILE_PATH, "r") as f: - _CONFIG = json.load(f) + new_object: Any = json.load(f) + types.validate_config_dict(new_object) + config: types.ConfigDict = new_object - Astronomy.CONFIG = _CONFIG - return _CONFIG + Astronomy.CONFIG = config + return config diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 9cacc6e6..69a3e70c 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -1,5 +1,5 @@ import dataclasses -from typing import Any, Optional +from typing import Any, Literal, Optional import snap7 # type: ignore import time import os @@ -105,24 +105,26 @@ class PLCError(Exception): * set_auto_temperature/_manual_temperature """ - def __init__(self, config: types.ConfigDict) -> None: - self.config = config - self.specification = PLC_SPECIFICATION_VERSIONS[config["tum_plc"]["version"]] + def __init__(self, plc_version: Literal[1, 2], plc_ip: str) -> None: + self.plc_version = plc_version + self.plc_ip = plc_ip + self.specification = PLC_SPECIFICATION_VERSIONS[plc_version] # CONNECTION/CLASS MANAGEMENT - def update_config(self, new_config: types.ConfigDict) -> None: + def update_config(self, new_plc_version: Literal[1, 2], new_plc_ip: str) -> None: """ Update the internally used config (executed at the) beginning of enclosure-control's run-function. Reconnecting to PLC, when IP has changed. """ - if self.config["tum_plc"]["ip"] != new_config["tum_plc"]["ip"]: + if (self.plc_version != new_plc_version) or (self.plc_ip != new_plc_ip): logger.debug("PLC ip has changed, reconnecting now") self.disconnect() + self.plc_version = new_plc_version + self.plc_ip = new_plc_ip self.connect() - self.config = new_config def connect(self) -> None: """ @@ -136,7 +138,7 @@ def connect(self) -> None: if (time.time() - start_time) > 30: raise Snap7Exception("Connect to PLC timed out.") - self.plc.connect(self.config["tum_plc"]["ip"], 0, 1) + self.plc.connect(self.plc_ip, 0, 1) time.sleep(0.2) if self.plc.get_connected(): @@ -164,7 +166,7 @@ def disconnect(self) -> None: def is_responsive(self) -> bool: """Pings the PLC""" - return os.system("ping -n 1 " + self.config["tum_plc"]["ip"]) == 0 + return os.system("ping -n 1 " + self.plc_ip) == 0 # DIRECT READ FUNCTIONS @@ -199,10 +201,7 @@ def read(self) -> PLCState: # TODO: self.plc.read_multi_vars() plc_db_content: dict[int, int] = {} - if self.config["tum_plc"]["version"] == 1: - plc_db_size = {3: 6, 8: 26, 25: 10} - else: - plc_db_size = {3: 5, 6: 17, 8: 25} + plc_db_size = {1: {3: 6, 8: 26, 25: 10}, 2: {3: 5, 6: 17, 8: 25}}[self.plc_version] for db_index, db_size in plc_db_size.items(): plc_db_content[db_index] = self.plc.db_read(db_index, 0, db_size) @@ -415,7 +414,7 @@ def set_manual_temperature(self, new_state: bool) -> None: def reset(self) -> None: """Does not check, whether the value has been changed""" - if self.config["tum_plc"]["version"] == 1: + if self.plc_version == 1: self.__write_bool(self.specification.control.reset, False) else: self.__write_bool(self.specification.control.reset, True) diff --git a/packages/core/utils/interfaces/plc_specification.py b/packages/core/utils/interfaces/plc_specification.py index 936c4370..e5ed631f 100644 --- a/packages/core/utils/interfaces/plc_specification.py +++ b/packages/core/utils/interfaces/plc_specification.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Optional +from typing import Literal, Optional # TODO: use typeddict @@ -67,7 +67,7 @@ class PLCSpecification: # these are the pins used on the TUM-PLC for all functionality -PLC_SPECIFICATION_VERSIONS: dict[int, PLCSpecification] = { +PLC_SPECIFICATION_VERSIONS: dict[Literal[1, 2], PLCSpecification] = { 1: PLCSpecification( actors=PLCActorsSpecification( current_angle=[25, 6, 2], diff --git a/packages/core/utils/types/config.py b/packages/core/utils/types/config.py index 93acb0dd..0ac8525a 100644 --- a/packages/core/utils/types/config.py +++ b/packages/core/utils/types/config.py @@ -2,9 +2,9 @@ import pydantic -_TimeDict = TypedDict("start_time", {"hour": int, "minute": int, "second": int}) -_TimeDictPartial = TypedDict( - "start_time", {"hour": int, "minute": int, "second": int}, total=False +TimeDict = TypedDict("TimeDict", {"hour": int, "minute": int, "second": int}) +TimeDictPartial = TypedDict( + "TimeDictPartial", {"hour": int, "minute": int, "second": int}, total=False ) @@ -47,7 +47,7 @@ class Camtracker(TypedDict): executable_path: str learn_az_elev_path: str sun_intensity_path: str - motor_offset_threshold: str + motor_offset_threshold: float @staticmethod class CamtrackerPartial(TypedDict, total=False): @@ -55,7 +55,7 @@ class CamtrackerPartial(TypedDict, total=False): executable_path: str learn_az_elev_path: str sun_intensity_path: str - motor_offset_threshold: str + motor_offset_threshold: float @staticmethod class ErrorEmail(TypedDict): @@ -88,8 +88,8 @@ class MeasurementTriggers(TypedDict): consider_time: bool consider_sun_elevation: bool consider_helios: bool - start_time: _TimeDict - stop_time: _TimeDict + start_time: TimeDict + stop_time: TimeDict min_sun_elevation: float @staticmethod @@ -97,20 +97,20 @@ class MeasurementTriggersPartial(TypedDict, total=False): consider_time: bool consider_sun_elevation: bool consider_helios: bool - start_time: _TimeDict - stop_time: _TimeDict + start_time: TimeDictPartial + stop_time: TimeDictPartial min_sun_elevation: float @staticmethod class TumPlc(TypedDict): ip: str - version: Literal[0, 1] + version: Literal[1, 2] controlled_by_user: bool @staticmethod class TumPlcPartial(TypedDict, total=False): ip: str - version: Literal[0, 1] + version: Literal[1, 2] controlled_by_user: bool @staticmethod From f70cc550c077b4e464d94988240bbb7d91d17d65 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Thu, 18 Aug 2022 22:49:58 +0200 Subject: [PATCH 049/132] #96 (16) - fix all strict typing issues wohoo! --- packages/core/modules/enclosure_control.py | 28 +-- .../core/utils/interfaces/plc_interface.py | 195 +++++++----------- .../core/utils/interfaces/state_interface.py | 2 +- packages/core/utils/types/__init__.py | 2 + packages/core/utils/types/plc_state.py | 105 ++++++++++ packages/core/utils/types/state.py | 8 +- 6 files changed, 205 insertions(+), 135 deletions(-) create mode 100644 packages/core/utils/types/plc_state.py diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 4470a896..aa8285be 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -74,7 +74,7 @@ def run(self, new_config: types.ConfigDict) -> None: # read current state of actors and sensors in enclosure logger.info("New continuous readings.") - StateInterface.update({"enclosure_plc_readings": self.plc_state.to_dict()}) + StateInterface.update({"enclosure_plc_readings": self.plc_state}) if self.config["tum_plc"]["controlled_by_user"]: logger.debug( @@ -85,7 +85,7 @@ def run(self, new_config: types.ConfigDict) -> None: # dawn/dusk detection: powerup/down spectrometer self.auto_set_power_spectrometer() - if self.plc_state.state.motor_failed: + if self.plc_state["state"]["motor_failed"]: raise EnclosureControl.MotorFailedError( "URGENT: stop all actions, check cover in person" ) @@ -133,7 +133,7 @@ def move_cover(self, value: int) -> None: logger.debug(f"Received request to move cover to position {value} degrees.") # rain check before moving cover. PLC will deny cover requests during rain anyway - if self.plc_state.state.rain: + if self.plc_state["state"]["rain"]: logger.debug("Denied to move cover due to rain detected.") else: self.plc_interface.set_manual_control(True) @@ -144,7 +144,7 @@ def force_cover_close(self) -> None: if not self.initialized: self.__initialize() - if self.plc_state.state.reset_needed: + if self.plc_state["state"]["reset_needed"]: self.plc_interface.reset() self.plc_interface.set_sync_to_tracker(False) @@ -185,10 +185,10 @@ def auto_set_power_spectrometer(self) -> None: if current_sun_elevation is not None: sun_is_above_minimum = current_sun_elevation >= min_power_elevation - if sun_is_above_minimum and (not self.plc_state.power.spectrometer): + if sun_is_above_minimum and (not self.plc_state["power"]["spectrometer"]): self.plc_interface.set_power_spectrometer(True) logger.info("Powering up the spectrometer.") - if (not sun_is_above_minimum) and self.plc_state.power.spectrometer: + if (not sun_is_above_minimum) and self.plc_state["power"]["spectrometer"]: self.plc_interface.set_power_spectrometer(False) logger.info("Powering down the spectrometer.") @@ -196,10 +196,10 @@ def sync_cover_to_measurement_status(self) -> None: if self.last_cycle_automation_status != self.measurements_should_be_running: if self.measurements_should_be_running: # flank change 0 -> 1: set cover mode: sync to tracker - if self.plc_state.state.reset_needed: + if self.plc_state["state"]["reset_needed"]: self.plc_interface.reset() time.sleep(10) - if not self.plc_state.state.rain: + if not self.plc_state["state"]["rain"]: self.plc_interface.set_sync_to_tracker(True) logger.info("Syncing Cover to Tracker.") else: @@ -210,20 +210,20 @@ def sync_cover_to_measurement_status(self) -> None: self.wait_for_cover_closing(throw_error=False) def verify_cover_position(self) -> None: - if (not self.measurements_should_be_running) & (not self.plc_state.state.rain): - if not self.plc_state.state.cover_closed: + if (not self.measurements_should_be_running) & (not self.plc_state["state"]["rain"]): + if not self.plc_state["state"]["cover_closed"]: logger.info("Cover is still open. Trying to close again.") self.force_cover_close() self.wait_for_cover_closing() def verify_cover_sync(self) -> None: if self.measurements_should_be_running and ( - not self.plc_state.control.sync_to_tracker + not self.plc_state["control"]["sync_to_tracker"] ): logger.debug("Set sync to tracker to True to match measurement status.") self.plc_interface.set_sync_to_tracker(True) - if ( - not self.measurements_should_be_running - ) and self.plc_state.control.sync_to_tracker: + if (not self.measurements_should_be_running) and self.plc_state["control"][ + "sync_to_tracker" + ]: logger.debug("Set sync to tracker to False to match measurement status.") self.plc_interface.set_sync_to_tracker(False) diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index 69a3e70c..dd490649 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -11,85 +11,44 @@ dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) - -# TODO: possibly rewrite this using typeddict - - -@dataclasses.dataclass -class PLCActorsState: - current_angle: Optional[int] = None - fan_speed: Optional[int] = None - - -@dataclasses.dataclass -class PLCControlState: - auto_temp_mode: Optional[bool] = None - manual_control: Optional[bool] = None - manual_temp_mode: Optional[bool] = None - sync_to_tracker: Optional[bool] = None - - -@dataclasses.dataclass -class PLCSensorsState: - humidity: Optional[int] = None - temperature: Optional[int] = None - - -@dataclasses.dataclass -class PLCStateState: - cover_closed: Optional[bool] = None - motor_failed: Optional[bool] = None - rain: Optional[bool] = None - reset_needed: Optional[bool] = None - ups_alert: Optional[bool] = None - - -@dataclasses.dataclass -class PLCPowerState: - camera: Optional[bool] = None - computer: Optional[bool] = None - heater: Optional[bool] = None - router: Optional[bool] = None - spectrometer: Optional[bool] = None - - -@dataclasses.dataclass -class PLCConnectionsState: - camera: Optional[bool] = None - computer: Optional[bool] = None - heater: Optional[bool] = None - router: Optional[bool] = None - spectrometer: Optional[bool] = None - - -@dataclasses.dataclass -class PLCState: - actors: PLCActorsState - control: PLCControlState - sensors: PLCSensorsState - state: PLCStateState - power: PLCPowerState - connections: PLCConnectionsState - - def to_dict(self) -> dict[str, Any]: - out = {} - for field in dataclasses.fields(self): - field_value = getattr(self, field.name) - if field_value is not None: - field_value = getattr(self, field.name).__dict__ - out[field.name] = field_value - return out - - # used when initializing the state.json file -EMPTY_PLC_STATE = PLCState( - actors=PLCActorsState(), - control=PLCControlState(), - sensors=PLCSensorsState(), - state=PLCStateState(), - power=PLCPowerState(), - connections=PLCConnectionsState(), -) +EMPTY_PLC_STATE: types.PlcStateDict = { + "actors": { + "fan_speed": None, + "current_angle": None, + }, + "control": { + "auto_temp_mode": None, + "manual_control": None, + "manual_temp_mode": None, + "sync_to_tracker": None, + }, + "sensors": { + "humidity": None, + "temperature": None, + }, + "state": { + "cover_closed": None, + "motor_failed": None, + "rain": None, + "reset_needed": None, + "ups_alert": None, + }, + "power": { + "camera": None, + "computer": None, + "heater": None, + "router": None, + "spectrometer": None, + }, + "connections": { + "camera": None, + "computer": None, + "heater": None, + "router": None, + "spectrometer": None, + }, +} class PLCInterface: @@ -193,7 +152,7 @@ def get_cover_angle(self) -> int: # BULK READ - def read(self) -> PLCState: + def read(self) -> types.PlcStateDict: """ Read the whole state of the PLC """ @@ -221,43 +180,43 @@ def _get_bool(spec: Optional[list[int]]) -> Optional[bool]: s = self.specification - return PLCState( - actors=PLCActorsState( - fan_speed=_get_int(s.actors.fan_speed), - current_angle=_get_int(s.actors.current_angle), - ), - control=PLCControlState( - auto_temp_mode=_get_bool(s.control.auto_temp_mode), - manual_control=_get_bool(s.control.manual_control), - manual_temp_mode=_get_bool(s.control.manual_temp_mode), - sync_to_tracker=_get_bool(s.control.sync_to_tracker), - ), - sensors=PLCSensorsState( - humidity=_get_int(s.sensors.humidity), - temperature=_get_int(s.sensors.temperature), - ), - state=PLCStateState( - cover_closed=_get_bool(s.state.cover_closed), - motor_failed=_get_bool(s.state.motor_failed), - rain=_get_bool(s.state.rain), - reset_needed=_get_bool(s.state.reset_needed), - ups_alert=_get_bool(s.state.ups_alert), - ), - power=PLCPowerState( - camera=_get_bool(s.power.camera), - computer=_get_bool(s.power.computer), - heater=_get_bool(s.power.heater), - router=_get_bool(s.power.router), - spectrometer=_get_bool(s.power.spectrometer), - ), - connections=PLCConnectionsState( - camera=_get_bool(s.connections.camera), - computer=_get_bool(s.connections.computer), - heater=_get_bool(s.connections.heater), - router=_get_bool(s.connections.router), - spectrometer=_get_bool(s.connections.spectrometer), - ), - ) + return { + "actors": { + "fan_speed": _get_int(s.actors.fan_speed), + "current_angle": _get_int(s.actors.current_angle), + }, + "control": { + "auto_temp_mode": _get_bool(s.control.auto_temp_mode), + "manual_control": _get_bool(s.control.manual_control), + "manual_temp_mode": _get_bool(s.control.manual_temp_mode), + "sync_to_tracker": _get_bool(s.control.sync_to_tracker), + }, + "sensors": { + "humidity": _get_int(s.sensors.humidity), + "temperature": _get_int(s.sensors.temperature), + }, + "state": { + "cover_closed": _get_bool(s.state.cover_closed), + "motor_failed": _get_bool(s.state.motor_failed), + "rain": _get_bool(s.state.rain), + "reset_needed": _get_bool(s.state.reset_needed), + "ups_alert": _get_bool(s.state.ups_alert), + }, + "power": { + "camera": _get_bool(s.power.camera), + "computer": _get_bool(s.power.computer), + "heater": _get_bool(s.power.heater), + "router": _get_bool(s.power.router), + "spectrometer": _get_bool(s.power.spectrometer), + }, + "connections": { + "camera": _get_bool(s.connections.camera), + "computer": _get_bool(s.connections.computer), + "heater": _get_bool(s.connections.heater), + "router": _get_bool(s.connections.router), + "spectrometer": _get_bool(s.connections.spectrometer), + }, + } # LOW LEVEL READ FUNCTIONS @@ -322,7 +281,9 @@ def __write_bool(self, action: list[int], value: bool) -> None: # PLC.POWER SETTERS - def __update_bool(self, new_state: bool, spec: list[int], partial_plc_state: dict) -> None: + def __update_bool( + self, new_state: bool, spec: list[int], partial_plc_state: types.PlcStateDictPartial + ) -> None: """ 1. low-level direct-write new_state to PLC according to spec 2. low-level direct-read of plc's value according to spec diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/utils/interfaces/state_interface.py index fe094525..a615a068 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/utils/interfaces/state_interface.py @@ -20,7 +20,7 @@ EMPTY_STATE_OBJECT: types.StateDict = { "helios_indicates_good_conditions": None, "measurements_should_be_running": False, - "enclosure_plc_readings": EMPTY_PLC_STATE.to_dict(), + "enclosure_plc_readings": EMPTY_PLC_STATE, "os_state": { "cpu_usage": None, "memory_usage": None, diff --git a/packages/core/utils/types/__init__.py b/packages/core/utils/types/__init__.py index 9a4204f8..4fd0ce69 100644 --- a/packages/core/utils/types/__init__.py +++ b/packages/core/utils/types/__init__.py @@ -4,6 +4,8 @@ from .persistent_state import PersistentStateDict, PersistentStateDictPartial from .persistent_state import validate_persistent_state_dict +from .plc_state import PlcStateDict, PlcStateDictPartial + from .state import StateDict, StateDictPartial from .state import validate_state_dict diff --git a/packages/core/utils/types/plc_state.py b/packages/core/utils/types/plc_state.py new file mode 100644 index 00000000..f215d7f5 --- /dev/null +++ b/packages/core/utils/types/plc_state.py @@ -0,0 +1,105 @@ +from typing import Optional, TypedDict + + +class _PlcStateDictActors(TypedDict): + fan_speed: Optional[int] + current_angle: Optional[int] + + +class _PlcStateDictActorsPartial(TypedDict, total=False): + camera: Optional[bool] + fan_speed: Optional[int] + current_angle: Optional[int] + + +class _PlcStateDictControl(TypedDict): + auto_temp_mode: Optional[bool] + manual_control: Optional[bool] + manual_temp_mode: Optional[bool] + sync_to_tracker: Optional[bool] + + +class _PlcStateDictControlPartial(TypedDict, total=False): + camera: Optional[bool] + auto_temp_mode: Optional[bool] + manual_control: Optional[bool] + manual_temp_mode: Optional[bool] + sync_to_tracker: Optional[bool] + + +class _PlcStateDictSensors(TypedDict): + humidity: Optional[int] + temperature: Optional[int] + + +class _PlcStateDictSensorsPartial(TypedDict, total=False): + camera: Optional[bool] + humidity: Optional[int] + temperature: Optional[int] + + +class _PlcStateDictState(TypedDict): + cover_closed: Optional[bool] + motor_failed: Optional[bool] + rain: Optional[bool] + reset_needed: Optional[bool] + ups_alert: Optional[bool] + + +class _PlcStateDictStatePartial(TypedDict, total=False): + camera: Optional[bool] + cover_closed: Optional[bool] + motor_failed: Optional[bool] + rain: Optional[bool] + reset_needed: Optional[bool] + ups_alert: Optional[bool] + + +class _PlcStateDictPower(TypedDict): + camera: Optional[bool] + computer: Optional[bool] + heater: Optional[bool] + router: Optional[bool] + spectrometer: Optional[bool] + + +class _PlcStateDictPowerPartial(TypedDict, total=False): + camera: Optional[bool] + computer: Optional[bool] + heater: Optional[bool] + router: Optional[bool] + spectrometer: Optional[bool] + + +class _PlcStateDictConnections(TypedDict): + camera: Optional[bool] + computer: Optional[bool] + heater: Optional[bool] + router: Optional[bool] + spectrometer: Optional[bool] + + +class _PlcStateDictConnectionsPartial(TypedDict, total=False): + camera: Optional[bool] + computer: Optional[bool] + heater: Optional[bool] + router: Optional[bool] + spectrometer: Optional[bool] + + +class PlcStateDict(TypedDict): + actors: _PlcStateDictActors + control: _PlcStateDictControl + sensors: _PlcStateDictSensors + state: _PlcStateDictState + power: _PlcStateDictPower + connections: _PlcStateDictConnections + + +class PlcStateDictPartial(TypedDict, total=False): + actors: _PlcStateDictActorsPartial + control: _PlcStateDictControlPartial + sensors: _PlcStateDictSensorsPartial + state: _PlcStateDictStatePartial + power: _PlcStateDictPowerPartial + connections: _PlcStateDictConnectionsPartial diff --git a/packages/core/utils/types/state.py b/packages/core/utils/types/state.py index 2537be3d..5576b9ba 100644 --- a/packages/core/utils/types/state.py +++ b/packages/core/utils/types/state.py @@ -1,6 +1,8 @@ -from typing import Any, Optional, TypedDict +from typing import Any, Union, Optional, TypedDict import pydantic +from . import PlcStateDict, PlcStateDictPartial + class _OSStateDict(TypedDict): cpu_usage: Optional[list[float]] @@ -12,14 +14,14 @@ class _OSStateDict(TypedDict): class StateDict(TypedDict): helios_indicates_good_conditions: Optional[bool] measurements_should_be_running: bool - enclosure_plc_readings: dict + enclosure_plc_readings: PlcStateDict os_state: _OSStateDict class StateDictPartial(TypedDict, total=False): helios_indicates_good_conditions: Optional[int] measurements_should_be_running: bool - enclosure_plc_readings: dict + enclosure_plc_readings: Union[PlcStateDict, PlcStateDictPartial] os_state: _OSStateDict From 4cb1724c873ccac9784b7322082800c8de8e2889 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Fri, 19 Aug 2022 15:17:13 +0200 Subject: [PATCH 050/132] #96 (17) - implement cerberus checks in new config validation --- .../core/utils/interfaces/config_interface.py | 5 +- .../core/utils/interfaces/os_interface.py | 3 - packages/core/utils/types/config.py | 79 ++++++++++++++++++- 3 files changed, 79 insertions(+), 8 deletions(-) diff --git a/packages/core/utils/interfaces/config_interface.py b/packages/core/utils/interfaces/config_interface.py index 0030dc41..66a721a1 100644 --- a/packages/core/utils/interfaces/config_interface.py +++ b/packages/core/utils/interfaces/config_interface.py @@ -2,7 +2,6 @@ import os from typing import Any from packages.core.utils import Astronomy, with_filelock, types -from .config_validation import ConfigValidation dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) @@ -18,10 +17,8 @@ def read() -> types.ConfigDict: """ Read the contents of the current config.json file. The function will validate its integrity and raises - an AssertionError if the file is not valid. + an Exception if the file is not valid. """ - file_is_valid, validation_exception = ConfigValidation.check_current_config_file() - assert file_is_valid, str(validation_exception) with open(CONFIG_FILE_PATH, "r") as f: new_object: Any = json.load(f) types.validate_config_dict(new_object) diff --git a/packages/core/utils/interfaces/os_interface.py b/packages/core/utils/interfaces/os_interface.py index 7017122f..7302b18c 100644 --- a/packages/core/utils/interfaces/os_interface.py +++ b/packages/core/utils/interfaces/os_interface.py @@ -120,6 +120,3 @@ def get_process_status( return p.status() return "not-found" - - -OSInterface.StorageError diff --git a/packages/core/utils/types/config.py b/packages/core/utils/types/config.py index 0ac8525a..0b40312e 100644 --- a/packages/core/utils/types/config.py +++ b/packages/core/utils/types/config.py @@ -1,4 +1,6 @@ -from typing import Any, Literal, Optional, TypedDict +from ast import Call +import os +from typing import Any, Callable, Literal, Optional, TypedDict, Union import pydantic @@ -174,6 +176,13 @@ class ConfigDictPartial(TypedDict, total=False): upload: Optional[_ConfigSubDicts.UploadPartial] +class ValidationError(Exception): + """ + Will be raised in any custom checks on config dicts + have failed: file-existence, ip-format, min/max-range + """ + + def validate_config_dict(o: Any, partial: bool = False) -> None: """ Check, whether a given object is a correct ConfigDict @@ -187,6 +196,74 @@ def validate_config_dict(o: Any, partial: bool = False) -> None: else: _ValidationModel(regular=o) + new_object: ConfigDict = o + + def get_nested_dict_property(property_path: str) -> Any: + prop = new_object + for key in property_path.split("."): + prop = prop[key] # type: ignore + return prop + + def assert_min_max(property_path: str, min_value: float, max_value: float) -> None: + prop: float = get_nested_dict_property(property_path) + error_message = f"config.{property_path} must be in range [{min_value}, {max_value}]" + assert prop >= min_value, error_message + assert prop >= max_value, error_message + + def assert_file_path(property_path: str) -> None: + prop: str = get_nested_dict_property(property_path) + assert os.path.isfile(prop), f"config.{property_path} is not a file" + + def assert_ip_address(property_path: str) -> None: + prop: str = get_nested_dict_property(property_path) + error_message = f"config.{property_path} is not a valid ip address" + values: list[str] = prop.split(".") + assert len(values) == 4, error_message + assert all([x.isnumeric() for x in values]), error_message + assert all([0 <= int(x) <= 255 for x in values]), error_message + + assertions: list[Callable[[], None]] = [ + lambda: assert_min_max("general.seconds_per_core_interval", 5, 600), + lambda: assert_min_max("general.min_sun_elevation", 0, 90), + lambda: assert_ip_address("opus.em27_ip"), + lambda: assert_file_path("opus.executable_path"), + lambda: assert_file_path("opus.experiment_path"), + lambda: assert_file_path("opus.macro_path"), + lambda: assert_file_path("camtracker.config_path"), + lambda: assert_file_path("camtracker.executable_path"), + lambda: assert_file_path("camtracker.learn_az_elev_path"), + lambda: assert_file_path("camtracker.sun_intensity_path"), + lambda: assert_min_max("camtracker.motor_offset_threshold", -360, 360), + lambda: assert_min_max("measurement_triggers.min_sun_elevation", 0, 90), + lambda: assert_min_max("measurement_triggers.start_time.hour", 0, 23), + lambda: assert_min_max("measurement_triggers.stop_time.hour", 0, 23), + lambda: assert_min_max("measurement_triggers.start_time.minute", 0, 59), + lambda: assert_min_max("measurement_triggers.stop_time.minute", 0, 59), + lambda: assert_min_max("measurement_triggers.start_time.second", 0, 59), + lambda: assert_min_max("measurement_triggers.stop_time.second", 0, 59), + lambda: assert_ip_address("tum_plc.ip"), + lambda: assert_min_max("helios.camera_id", 0, 999999), + lambda: assert_min_max("helios.evaluation_size", 1, 100), + lambda: assert_min_max("helios.seconds_per_interval", 5, 600), + lambda: assert_min_max("helios.measurement_threshold", 0.1, 1), + lambda: assert_ip_address("upload.host"), + lambda: assert_file_path("upload.src_directory"), + ] + + failed_checks = [] + + for assertion in assertions: + # KeyErrors will be ignored (for partial objects) + try: + assertion() + except AssertionError as a: + failed_checks.append(a) + + if len(failed_checks) > 0: + raise ValidationError( + ("ConfigDictPartial" if partial else "ConfigDict") + f": {failed_checks}" + ) + class _ValidationModel(pydantic.BaseModel): regular: Optional[ConfigDict] From 01afb9b4493c4ed3cfb5ab3a79bba783133d4bc2 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Fri, 19 Aug 2022 15:43:03 +0200 Subject: [PATCH 051/132] #96 (18) - move plc specification from dataclass to typed dict --- .../core/utils/interfaces/plc_interface.py | 86 +++---- .../utils/interfaces/plc_specification.py | 216 +++++++----------- packages/core/utils/types/__init__.py | 2 + .../core/utils/types/plc_specification.py | 56 +++++ 4 files changed, 178 insertions(+), 182 deletions(-) create mode 100644 packages/core/utils/types/plc_specification.py diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/utils/interfaces/plc_interface.py index dd490649..b41710e2 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/utils/interfaces/plc_interface.py @@ -1,5 +1,4 @@ -import dataclasses -from typing import Any, Literal, Optional +from typing import Literal, Optional import snap7 # type: ignore import time import os @@ -83,6 +82,7 @@ def update_config(self, new_plc_version: Literal[1, 2], new_plc_ip: str) -> None self.disconnect() self.plc_version = new_plc_version self.plc_ip = new_plc_ip + self.specification = PLC_SPECIFICATION_VERSIONS[self.plc_version] self.connect() def connect(self) -> None: @@ -130,25 +130,25 @@ def is_responsive(self) -> bool: # DIRECT READ FUNCTIONS def rain_is_detected(self) -> bool: - return self.__read_bool(self.specification.state.rain) + return self.__read_bool(self.specification["state"]["rain"]) def cover_is_closed(self) -> bool: """ Reads the single value "state.cover_closed" """ - return self.__read_bool(self.specification.state.cover_closed) + return self.__read_bool(self.specification["state"]["cover_closed"]) def reset_is_needed(self) -> bool: """ Reads the single value "state.reset_needed" """ - return self.__read_bool(self.specification.state.reset_needed) + return self.__read_bool(self.specification["state"]["reset_needed"]) def get_cover_angle(self) -> int: """ Reads the single value "actors.current_angle" """ - return self.__read_int(self.specification.actors.current_angle) + return self.__read_int(self.specification["actors"]["current_angle"]) # BULK READ @@ -182,39 +182,39 @@ def _get_bool(spec: Optional[list[int]]) -> Optional[bool]: return { "actors": { - "fan_speed": _get_int(s.actors.fan_speed), - "current_angle": _get_int(s.actors.current_angle), + "fan_speed": _get_int(s["actors"]["fan_speed"]), + "current_angle": _get_int(s["actors"]["current_angle"]), }, "control": { - "auto_temp_mode": _get_bool(s.control.auto_temp_mode), - "manual_control": _get_bool(s.control.manual_control), - "manual_temp_mode": _get_bool(s.control.manual_temp_mode), - "sync_to_tracker": _get_bool(s.control.sync_to_tracker), + "auto_temp_mode": _get_bool(s["control"]["auto_temp_mode"]), + "manual_control": _get_bool(s["control"]["manual_control"]), + "manual_temp_mode": _get_bool(s["control"]["manual_temp_mode"]), + "sync_to_tracker": _get_bool(s["control"]["sync_to_tracker"]), }, "sensors": { - "humidity": _get_int(s.sensors.humidity), - "temperature": _get_int(s.sensors.temperature), + "humidity": _get_int(s["sensors"]["humidity"]), + "temperature": _get_int(s["sensors"]["temperature"]), }, "state": { - "cover_closed": _get_bool(s.state.cover_closed), - "motor_failed": _get_bool(s.state.motor_failed), - "rain": _get_bool(s.state.rain), - "reset_needed": _get_bool(s.state.reset_needed), - "ups_alert": _get_bool(s.state.ups_alert), + "cover_closed": _get_bool(s["state"]["cover_closed"]), + "motor_failed": _get_bool(s["state"]["motor_failed"]), + "rain": _get_bool(s["state"]["rain"]), + "reset_needed": _get_bool(s["state"]["reset_needed"]), + "ups_alert": _get_bool(s["state"]["ups_alert"]), }, "power": { - "camera": _get_bool(s.power.camera), - "computer": _get_bool(s.power.computer), - "heater": _get_bool(s.power.heater), - "router": _get_bool(s.power.router), - "spectrometer": _get_bool(s.power.spectrometer), + "camera": _get_bool(s["power"]["camera"]), + "computer": _get_bool(s["power"]["computer"]), + "heater": _get_bool(s["power"]["heater"]), + "router": _get_bool(s["power"]["router"]), + "spectrometer": _get_bool(s["power"]["spectrometer"]), }, "connections": { - "camera": _get_bool(s.connections.camera), - "computer": _get_bool(s.connections.computer), - "heater": _get_bool(s.connections.heater), - "router": _get_bool(s.connections.router), - "spectrometer": _get_bool(s.connections.spectrometer), + "camera": _get_bool(s["connections"]["camera"]), + "computer": _get_bool(s["connections"]["computer"]), + "heater": _get_bool(s["connections"]["heater"]), + "router": _get_bool(s["connections"]["router"]), + "spectrometer": _get_bool(s["connections"]["spectrometer"]), }, } @@ -301,16 +301,16 @@ def set_power_camera(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.power.camera, + self.specification["power"]["camera"], {"power": {"camera": new_state}}, ) def set_power_computer(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" - assert self.specification.power.computer is not None + assert self.specification["power"]["computer"] is not None self.__update_bool( new_state, - self.specification.power.computer, + self.specification["power"]["computer"], {"power": {"computer": new_state}}, ) @@ -318,16 +318,16 @@ def set_power_heater(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.power.heater, + self.specification["power"]["heater"], {"power": {"heater": new_state}}, ) def set_power_router(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" - assert self.specification.power.router is not None + assert self.specification["power"]["router"] is not None self.__update_bool( new_state, - self.specification.power.router, + self.specification["power"]["router"], {"power": {"router": new_state}}, ) @@ -335,7 +335,7 @@ def set_power_spectrometer(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.power.spectrometer, + self.specification["power"]["spectrometer"], {"power": {"spectrometer": new_state}}, ) @@ -345,7 +345,7 @@ def set_sync_to_tracker(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.control.sync_to_tracker, + self.specification["control"]["sync_to_tracker"], {"control": {"sync_to_tracker": new_state}}, ) @@ -353,7 +353,7 @@ def set_manual_control(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.control.manual_control, + self.specification["control"]["manual_control"], {"control": {"manual_control": new_state}}, ) @@ -361,7 +361,7 @@ def set_auto_temperature(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.control.auto_temp_mode, + self.specification["control"]["auto_temp_mode"], {"control": {"auto_temp_mode": new_state}}, ) @@ -369,19 +369,19 @@ def set_manual_temperature(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" self.__update_bool( new_state, - self.specification.control.manual_temp_mode, + self.specification["control"]["manual_temp_mode"], {"control": {"manual_temp_mode": new_state}}, ) def reset(self) -> None: """Does not check, whether the value has been changed""" if self.plc_version == 1: - self.__write_bool(self.specification.control.reset, False) + self.__write_bool(self.specification["control"]["reset"], False) else: - self.__write_bool(self.specification.control.reset, True) + self.__write_bool(self.specification["control"]["reset"], True) # PLC.ACTORS SETTERS def set_cover_angle(self, value: int) -> None: """Does not check, whether the value has been changed""" - self.__write_int(self.specification.actors.move_cover, value) + self.__write_int(self.specification["actors"]["move_cover"], value) diff --git a/packages/core/utils/interfaces/plc_specification.py b/packages/core/utils/interfaces/plc_specification.py index e5ed631f..26e26cb7 100644 --- a/packages/core/utils/interfaces/plc_specification.py +++ b/packages/core/utils/interfaces/plc_specification.py @@ -1,145 +1,83 @@ -from dataclasses import dataclass -from typing import Literal, Optional +from typing import Literal +from .. import types -# TODO: use typeddict # TODO: use tuples (3 ints vs 4 ints) - -@dataclass -class PLCActorsSpecification: - current_angle: list[int] - fan_speed: list[int] - move_cover: list[int] - nominal_angle: list[int] - - -@dataclass -class PLCControlSpecification: - auto_temp_mode: list[int] - manual_control: list[int] - manual_temp_mode: list[int] - reset: list[int] - sync_to_tracker: list[int] - - -@dataclass -class PLCSensorsSpecification: - humidity: list[int] - temperature: list[int] - - -@dataclass -class PLCStateSpecification: - cover_closed: list[int] - motor_failed: Optional[list[int]] - rain: list[int] - reset_needed: list[int] - ups_alert: list[int] - - -@dataclass -class PLCPowerSpecification: - camera: list[int] - computer: Optional[list[int]] - heater: list[int] - router: Optional[list[int]] - spectrometer: list[int] - - -@dataclass -class PLCConnectionsSpecification: - camera: Optional[list[int]] - computer: list[int] - heater: list[int] - router: list[int] - spectrometer: Optional[list[int]] - - -@dataclass -class PLCSpecification: - actors: PLCActorsSpecification - control: PLCControlSpecification - sensors: PLCSensorsSpecification - state: PLCStateSpecification - power: PLCPowerSpecification - connections: PLCConnectionsSpecification - - # these are the pins used on the TUM-PLC for all functionality -PLC_SPECIFICATION_VERSIONS: dict[Literal[1, 2], PLCSpecification] = { - 1: PLCSpecification( - actors=PLCActorsSpecification( - current_angle=[25, 6, 2], - fan_speed=[8, 18, 2], - move_cover=[25, 8, 2], - nominal_angle=[25, 8, 2], - ), - control=PLCControlSpecification( - auto_temp_mode=[8, 24, 1, 2], - manual_control=[8, 24, 1, 5], - manual_temp_mode=[8, 24, 1, 3], - reset=[3, 4, 1, 5], - sync_to_tracker=[8, 16, 1, 0], - ), - sensors=PLCSensorsSpecification(humidity=[8, 22, 2], temperature=[8, 20, 2]), - state=PLCStateSpecification( - cover_closed=[25, 2, 1, 2], - motor_failed=[8, 12, 1, 3], - rain=[8, 6, 1, 0], - reset_needed=[3, 2, 1, 2], - ups_alert=[8, 0, 1, 1], - ), - power=PLCPowerSpecification( - camera=[8, 16, 1, 2], - computer=[8, 16, 1, 6], - heater=[8, 16, 1, 5], - router=[8, 16, 1, 3], - spectrometer=[8, 16, 1, 1], - ), - connections=PLCConnectionsSpecification( - camera=[8, 14, 1, 6], - computer=[8, 14, 1, 3], - heater=[8, 14, 1, 1], - router=[8, 14, 1, 2], - spectrometer=[8, 14, 1, 0], - ), - ), - 2: PLCSpecification( - actors=PLCActorsSpecification( - current_angle=[6, 6, 2], - fan_speed=[8, 4, 2], - move_cover=[6, 8, 2], - nominal_angle=[6, 8, 2], - ), - control=PLCControlSpecification( - auto_temp_mode=[8, 24, 1, 5], - manual_control=[8, 12, 1, 7], - manual_temp_mode=[8, 24, 1, 4], - reset=[3, 4, 1, 5], - sync_to_tracker=[8, 8, 1, 1], - ), - sensors=PLCSensorsSpecification(humidity=[8, 22, 2], temperature=[8, 16, 2]), - state=PLCStateSpecification( - cover_closed=[6, 16, 1, 1], - motor_failed=None, - rain=[3, 0, 1, 0], - reset_needed=[3, 2, 1, 2], - ups_alert=[8, 13, 1, 6], - ), - power=PLCPowerSpecification( - camera=[8, 8, 1, 4], # K5 Relay - computer=None, - heater=[8, 12, 1, 7], # K3 Relay - router=None, # not allowed - spectrometer=[8, 8, 1, 2], # K4 Relay - ), - connections=PLCConnectionsSpecification( - camera=None, - computer=[8, 13, 1, 2], - heater=[8, 6, 1, 1], - router=[8, 12, 1, 4], - spectrometer=None, - ), - ), +PLC_SPECIFICATION_VERSIONS: dict[Literal[1, 2], types.PlcSpecificationDict] = { + 1: { + "actors": { + "current_angle": [25, 6, 2], + "fan_speed": [8, 18, 2], + "move_cover": [25, 8, 2], + "nominal_angle": [25, 8, 2], + }, + "control": { + "auto_temp_mode": [8, 24, 1, 2], + "manual_control": [8, 24, 1, 5], + "manual_temp_mode": [8, 24, 1, 3], + "reset": [3, 4, 1, 5], + "sync_to_tracker": [8, 16, 1, 0], + }, + "sensors": {"humidity": [8, 22, 2], "temperature": [8, 20, 2]}, + "state": { + "cover_closed": [25, 2, 1, 2], + "motor_failed": [8, 12, 1, 3], + "rain": [8, 6, 1, 0], + "reset_needed": [3, 2, 1, 2], + "ups_alert": [8, 0, 1, 1], + }, + "power": { + "camera": [8, 16, 1, 2], + "computer": [8, 16, 1, 6], + "heater": [8, 16, 1, 5], + "router": [8, 16, 1, 3], + "spectrometer": [8, 16, 1, 1], + }, + "connections": { + "camera": [8, 14, 1, 6], + "computer": [8, 14, 1, 3], + "heater": [8, 14, 1, 1], + "router": [8, 14, 1, 2], + "spectrometer": [8, 14, 1, 0], + }, + }, + 2: { + "actors": { + "current_angle": [6, 6, 2], + "fan_speed": [8, 4, 2], + "move_cover": [6, 8, 2], + "nominal_angle": [6, 8, 2], + }, + "control": { + "auto_temp_mode": [8, 24, 1, 5], + "manual_control": [8, 12, 1, 7], + "manual_temp_mode": [8, 24, 1, 4], + "reset": [3, 4, 1, 5], + "sync_to_tracker": [8, 8, 1, 1], + }, + "sensors": {"humidity": [8, 22, 2], "temperature": [8, 16, 2]}, + "state": { + "cover_closed": [6, 16, 1, 1], + "motor_failed": None, + "rain": [3, 0, 1, 0], + "reset_needed": [3, 2, 1, 2], + "ups_alert": [8, 13, 1, 6], + }, + "power": { + "camera": [8, 8, 1, 4], # K5 Relay + "computer": None, + "heater": [8, 12, 1, 7], # K3 Relay + "router": None, # not allowed + "spectrometer": [8, 8, 1, 2], # K4 Relay + }, + "connections": { + "camera": None, + "computer": [8, 13, 1, 2], + "heater": [8, 6, 1, 1], + "router": [8, 12, 1, 4], + "spectrometer": None, + }, + }, } diff --git a/packages/core/utils/types/__init__.py b/packages/core/utils/types/__init__.py index 4fd0ce69..78577381 100644 --- a/packages/core/utils/types/__init__.py +++ b/packages/core/utils/types/__init__.py @@ -4,6 +4,8 @@ from .persistent_state import PersistentStateDict, PersistentStateDictPartial from .persistent_state import validate_persistent_state_dict +from .plc_specification import PlcSpecificationDict + from .plc_state import PlcStateDict, PlcStateDictPartial from .state import StateDict, StateDictPartial diff --git a/packages/core/utils/types/plc_specification.py b/packages/core/utils/types/plc_specification.py new file mode 100644 index 00000000..d05ac58a --- /dev/null +++ b/packages/core/utils/types/plc_specification.py @@ -0,0 +1,56 @@ +from typing import Optional, TypedDict + +# TODO: use tuples (3 ints vs 4 ints) + + +class _PlcSpecificationDictActors(TypedDict): + current_angle: list[int] + fan_speed: list[int] + move_cover: list[int] + nominal_angle: list[int] + + +class _PlcSpecificationDictControl(TypedDict): + auto_temp_mode: list[int] + manual_control: list[int] + manual_temp_mode: list[int] + reset: list[int] + sync_to_tracker: list[int] + + +class _PlcSpecificationDictSensors(TypedDict): + humidity: list[int] + temperature: list[int] + + +class _PlcSpecificationDictState(TypedDict): + cover_closed: list[int] + motor_failed: Optional[list[int]] + rain: list[int] + reset_needed: list[int] + ups_alert: list[int] + + +class _PlcSpecificationDictPower(TypedDict): + camera: list[int] + computer: Optional[list[int]] + heater: list[int] + router: Optional[list[int]] + spectrometer: list[int] + + +class _PlcSpecificationDictConnections(TypedDict): + camera: Optional[list[int]] + computer: list[int] + heater: list[int] + router: list[int] + spectrometer: Optional[list[int]] + + +class PlcSpecificationDict(TypedDict): + actors: _PlcSpecificationDictActors + control: _PlcSpecificationDictControl + sensors: _PlcSpecificationDictSensors + state: _PlcSpecificationDictState + power: _PlcSpecificationDictPower + connections: _PlcSpecificationDictConnections From f5528c942dd82a227af53a96a6aff2f931bd2e4d Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Fri, 19 Aug 2022 16:23:57 +0200 Subject: [PATCH 052/132] #96 (19) - enforce strict typing in CLI --- packages/cli/commands/__init__.py | 6 + packages/cli/commands/config.py | 70 +++-- packages/cli/commands/core.py | 57 ++-- packages/cli/commands/logs.py | 19 +- packages/cli/commands/plc.py | 81 +++--- packages/cli/commands/remove_filelocks.py | 16 +- packages/cli/commands/state.py | 7 +- packages/cli/main.py | 20 +- packages/core/__init__.py | 3 +- packages/core/main.py | 4 +- .../utils/interfaces/config_validation.py | 248 ------------------ packages/core/utils/types/config.py | 5 +- scripts/run_type_analysis.sh | 6 +- 13 files changed, 179 insertions(+), 363 deletions(-) create mode 100644 packages/cli/commands/__init__.py delete mode 100644 packages/core/utils/interfaces/config_validation.py diff --git a/packages/cli/commands/__init__.py b/packages/cli/commands/__init__.py new file mode 100644 index 00000000..cd815ff6 --- /dev/null +++ b/packages/cli/commands/__init__.py @@ -0,0 +1,6 @@ +from .config import config_command_group +from .core import core_command_group +from .logs import logs_command_group +from .plc import plc_command_group +from .remove_filelocks import remove_filelocks +from .state import state_command_group diff --git a/packages/cli/commands/config.py b/packages/cli/commands/config.py index 4b7c58f7..d45e5d62 100644 --- a/packages/cli/commands/config.py +++ b/packages/cli/commands/config.py @@ -3,7 +3,7 @@ import click import os import sys -from packages.core.utils import with_filelock, update_dict_recursively +from packages.core.utils import types, with_filelock, update_dict_recursively dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) @@ -13,16 +13,19 @@ sys.path.append(PROJECT_DIR) -from packages.core.utils import ConfigValidation -error_handler = lambda text: click.echo(click.style(text, fg="red")) -success_handler = lambda text: click.echo(click.style(text, fg="green")) -ConfigValidation.logging_handler = error_handler + +def print_green(text: str) -> None: + click.echo(click.style(text, fg="green")) + + +def print_red(text: str) -> None: + click.echo(click.style(text, fg="red")) @click.command(help="Read the current config.json file.") @with_filelock(CONFIG_LOCK_PATH) -def _get_config(): +def _get_config() -> None: if not os.path.isfile(CONFIG_FILE_PATH): shutil.copyfile(DEFAULT_CONFIG_FILE_PATH, CONFIG_FILE_PATH) with open(CONFIG_FILE_PATH, "r") as f: @@ -31,7 +34,7 @@ def _get_config(): except: raise AssertionError("file not in a valid json format") - ConfigValidation.check_structure(content) + types.validate_config_dict(content, partial=False, skip_filepaths=True) click.echo(json.dumps(content)) @@ -41,35 +44,62 @@ def _get_config(): ) @click.argument("content", default="{}") @with_filelock(CONFIG_LOCK_PATH) -def _update_config(content: str): - # The validation itself might print stuff using the error_handler - if not ConfigValidation.check_partial_config_string(content): +def _update_config(content: str) -> None: + # try to load the dict + try: + new_partial_json = json.loads(content) + except: + print_red("content argument is not a valid JSON string") return - new_partial_json = json.loads(content) - with open(CONFIG_FILE_PATH, "r") as f: - current_json: dict = json.load(f) + # validate the dict's integrity + try: + types.validate_config_dict(new_partial_json, partial=True) + except Exception as e: + print_red(str(e)) + return + # load the current json file + try: + with open(CONFIG_FILE_PATH, "r") as f: + current_json = json.load(f) + except: + print_red("Could not load the current config.json file") + return + + # merge current config and new partial config merged_json = update_dict_recursively(current_json, new_partial_json) with open(CONFIG_FILE_PATH, "w") as f: json.dump(merged_json, f, indent=4) - success_handler("Updated config file") + print_green("Updated config file") @click.command( help=f"Validate the current config.json file.\n\nThe required schema can be found in the documentation." ) @with_filelock(CONFIG_LOCK_PATH) -def _validate_current_config(): - # The validation itself might print stuff using the error_handler - file_is_valid, _ = ConfigValidation.check_current_config_file() - if file_is_valid: - success_handler(f"Current config file is valid") +def _validate_current_config() -> None: + # load the current json file + try: + with open(CONFIG_FILE_PATH, "r") as f: + current_json = json.load(f) + except: + print_red("Could not load the current config.json file") + return + + # validate its integrity + try: + types.validate_config_dict(current_json, partial=False) + except Exception as e: + print_red(str(e)) + return + + print_green(f"Current config file is valid") @click.group() -def config_command_group(): +def config_command_group() -> None: pass diff --git a/packages/cli/commands/core.py b/packages/cli/commands/core.py index 765607bb..3e170f0c 100644 --- a/packages/cli/commands/core.py +++ b/packages/cli/commands/core.py @@ -1,5 +1,5 @@ import subprocess -import time +from typing import Optional import click import os import psutil @@ -16,11 +16,16 @@ CORE_SCRIPT_PATH = os.path.join(PROJECT_DIR, "run-pyra-core.py") SERVER_SCRIPT_PATH = os.path.join(PROJECT_DIR, "packages", "server", "main.py") -error_handler = lambda text: click.echo(click.style(text, fg="red")) -success_handler = lambda text: click.echo(click.style(text, fg="green")) +def print_green(text: str) -> None: + click.echo(click.style(text, fg="green")) -def process_is_running(): + +def print_red(text: str) -> None: + click.echo(click.style(text, fg="red")) + + +def process_is_running() -> Optional[int]: for p in psutil.process_iter(): try: arguments = p.cmdline() @@ -31,8 +36,8 @@ def process_is_running(): return None -def terminate_processes(): - termination_pids = [] +def terminate_processes() -> list[int]: + termination_pids: list[int] = [] for p in psutil.process_iter(): try: arguments = p.cmdline() @@ -48,10 +53,10 @@ def terminate_processes(): @click.command( help="Start pyra-core as a background process. " + "Prevents spawning multiple processes" ) -def _start_pyra_core(): +def _start_pyra_core() -> None: existing_pid = process_is_running() if existing_pid is not None: - error_handler(f"Background process already exists with PID {existing_pid}") + print_red(f"Background process already exists with PID {existing_pid}") else: p = subprocess.Popen( [INTERPRETER_PATH, CORE_SCRIPT_PATH], @@ -59,16 +64,16 @@ def _start_pyra_core(): stderr=subprocess.PIPE, ) Logger.log_activity_event("start-core") - success_handler(f"Started background process with PID {p.pid}") + print_green(f"Started background process with PID {p.pid}") @click.command(help="Stop the pyra-core background process") -def _stop_pyra_core(): +def _stop_pyra_core() -> None: termination_pids = terminate_processes() if len(termination_pids) == 0: - error_handler("No active process to be terminated") + print_red("No active process to be terminated") else: - success_handler( + print_green( f"Terminated {len(termination_pids)} pyra-core background " + f"processe(s) with PID(s) {termination_pids}" ) @@ -84,42 +89,42 @@ def _stop_pyra_core(): enclosure = EnclosureControl(config) enclosure.force_cover_close() enclosure.plc_interface.disconnect() - success_handler("Successfully closed cover") + print_green("Successfully closed cover") except Exception as e: - error_handler(f"Failed to close cover: {e}") + print_red(f"Failed to close cover: {e}") try: tracking = SunTracking(config) if tracking.ct_application_running(): tracking.stop_sun_tracking_automation() - success_handler("Successfully closed CamTracker") + print_green("Successfully closed CamTracker") except Exception as e: - error_handler(f"Failed to close CamTracker: {e}") + print_red(f"Failed to close CamTracker: {e}") try: processes = [p.name() for p in psutil.process_iter()] - for e in ["opus.exe", "OpusCore.exe"]: - if e in processes: - exit_code = os.system(f"taskkill /f /im {e}") + for executable in ["opus.exe", "OpusCore.exe"]: + if executable in processes: + exit_code = os.system(f"taskkill /f /im {executable}") assert ( exit_code == 0 - ), f'taskkill of "{e}" ended with an exit_code of {exit_code}' - success_handler("Successfully closed OPUS") + ), f'taskkill of "{executable}" ended with an exit_code of {exit_code}' + print_green("Successfully closed OPUS") except Exception as e: - error_handler(f"Failed to close OPUS: {e}") + print_red(f"Failed to close OPUS: {e}") @click.command(help="Checks whether the pyra-core background process is running") -def _pyra_core_is_running(): +def _pyra_core_is_running() -> None: existing_pid = process_is_running() if existing_pid is not None: - success_handler(f"pyra-core is running with PID {existing_pid}") + print_green(f"pyra-core is running with PID {existing_pid}") else: - error_handler("pyra-core is not running") + print_red("pyra-core is not running") @click.group() -def core_command_group(): +def core_command_group() -> None: pass diff --git a/packages/cli/commands/logs.py b/packages/cli/commands/logs.py index c20dcff6..6ecc53ca 100644 --- a/packages/cli/commands/logs.py +++ b/packages/cli/commands/logs.py @@ -8,29 +8,34 @@ DEBUG_LOG_FILE = os.path.join(PROJECT_DIR, "logs", "debug.log") LOG_FILES_LOCK = os.path.join(PROJECT_DIR, "logs", ".logs.lock") -error_handler = lambda text: click.echo(click.style(text, fg="red")) -success_handler = lambda text: click.echo(click.style(text, fg="green")) + +def print_green(text: str) -> None: + click.echo(click.style(text, fg="green")) + + +def print_red(text: str) -> None: + click.echo(click.style(text, fg="red")) @click.command(help="Read the current info.log or debug.log file.") @click.option("--level", default="INFO", help="Log level INFO or DEBUG") @with_filelock(LOG_FILES_LOCK) -def _read_logs(level: str): +def _read_logs(level: str) -> None: if level in ["INFO", "DEBUG"]: with open(INFO_LOG_FILE if level == "INFO" else DEBUG_LOG_FILE, "r") as f: click.echo("".join(f.readlines())) else: - error_handler("Level has to be either INFO or DEBUG.") + print_red("Level has to be either INFO or DEBUG.") @click.command(help="Archive the current log files.") -def _archive_logs(): +def _archive_logs() -> None: Logger.archive() - success_handler("done!") + print_green("done!") @click.group() -def logs_command_group(): +def logs_command_group() -> None: pass diff --git a/packages/cli/commands/plc.py b/packages/cli/commands/plc.py index 72b2c9dc..2f5a8cc8 100644 --- a/packages/cli/commands/plc.py +++ b/packages/cli/commands/plc.py @@ -1,10 +1,10 @@ import json import time -from typing import Callable +from typing import Callable, Optional import click import os -from packages.core.modules.enclosure_control import CoverError -from packages.core.utils import StateInterface, ConfigInterface, PLCInterface, PLCError +from packages.core.modules.enclosure_control import EnclosureControl +from packages.core.utils import StateInterface, ConfigInterface, PLCInterface, types from packages.core.utils import with_filelock dir = os.path.dirname @@ -13,37 +13,42 @@ CONFIG_FILE_PATH = os.path.join(PROJECT_DIR, "config", "config.json") CONFIG_LOCK_PATH = os.path.join(PROJECT_DIR, "config", ".config.lock") -error_handler = lambda text: click.echo(click.style(text, fg="red")) -success_handler = lambda text: click.echo(click.style(text, fg="green")) +def print_green(text: str) -> None: + click.echo(click.style(text, fg="green")) -def get_plc_interface(): + +def print_red(text: str) -> None: + click.echo(click.style(text, fg="red")) + + +def get_plc_interface() -> Optional[PLCInterface]: config = ConfigInterface.read() plc_interface = None try: assert config["tum_plc"] is not None, "PLC not configured" assert config["tum_plc"]["controlled_by_user"], "PLC is controlled by automation" - plc_interface = PLCInterface(config) + plc_interface = PLCInterface(config["tum_plc"]["version"], config["tum_plc"]["ip"]) plc_interface.connect() - except (PLCError, AssertionError) as e: - error_handler(f"{e}") + except Exception as e: + print_red(f"{e}") return plc_interface @click.command(help="Read current state from plc.") @click.option("--no-indent", is_flag=True, help="Do not print the JSON in an indented manner") -def _read(no_indent): +def _read(no_indent: bool) -> None: plc_interface = get_plc_interface() if plc_interface is not None: plc_readings = plc_interface.read() - success_handler(json.dumps(plc_readings.to_dict(), indent=(None if no_indent else 2))) + print_green(json.dumps(plc_readings, indent=(None if no_indent else 2))) plc_interface.disconnect() @click.command(help="Run plc function 'reset()'") -def _reset(): +def _reset() -> None: plc_interface = get_plc_interface() if plc_interface is not None: plc_interface.reset() @@ -59,11 +64,13 @@ def _reset(): ) break assert running_time <= 20, "plc took to long to set reset_needed to false" - success_handler("Ok") + print_green("Ok") plc_interface.disconnect() -def wait_until_cover_is_at_angle(plc_interface: PLCInterface, new_cover_angle, timeout=15): +def wait_until_cover_is_at_angle( + plc_interface: PLCInterface, new_cover_angle: int, timeout: float = 15 +) -> None: # waiting until cover is at this angle running_time = 0 while True: @@ -82,14 +89,14 @@ def wait_until_cover_is_at_angle(plc_interface: PLCInterface, new_cover_angle, t break if running_time > timeout: - raise CoverError( + raise EnclosureControl.CoverError( f"Cover took too long to move, latest cover angle: {current_cover_angle}" ) @click.command(help="Run plc function 'move_cover()'") @click.argument("angle") -def _set_cover_angle(angle): +def _set_cover_angle(angle: str) -> None: plc_interface = get_plc_interface() if plc_interface is not None: new_cover_angle = int("".join([c for c in str(angle) if c.isnumeric() or c == "."])) @@ -102,21 +109,25 @@ def _set_cover_angle(angle): plc_interface.set_manual_control(False) wait_until_cover_is_at_angle(plc_interface, new_cover_angle) - success_handler("Ok") + print_green("Ok") plc_interface.disconnect() @with_filelock(CONFIG_LOCK_PATH) -def enable_user_control_in_config(): +def enable_user_control_in_config() -> None: with open(CONFIG_FILE_PATH, "r") as f: - config: dict = json.load(f) - config["tum_plc"]["controlled_by_user"] = True - with open(CONFIG_FILE_PATH, "w") as f: - json.dump(config, f, indent=4) + config = json.load(f) + types.validate_config_dict(config) + + verified_config: types.ConfigDict = config + if verified_config["tum_plc"] is not None: + verified_config["tum_plc"]["controlled_by_user"] = True + with open(CONFIG_FILE_PATH, "w") as f: + json.dump(verified_config, f, indent=4) @click.command(help="Run plc function 'force_cover_close()'") -def _close_cover(): +def _close_cover() -> None: enable_user_control_in_config() plc_interface = get_plc_interface() @@ -127,65 +138,65 @@ def _close_cover(): plc_interface.set_manual_control(False) wait_until_cover_is_at_angle(plc_interface, 0) - success_handler("Ok") + print_green("Ok") plc_interface.disconnect() def set_boolean_plc_state( - state, get_setter_function: Callable[[PLCInterface], Callable[[bool], None]] -): + state: str, get_setter_function: Callable[[PLCInterface], Callable[[bool], None]] +) -> None: plc_interface = get_plc_interface() if plc_interface is not None: assert state in ["true", "false"], 'state has to be either "true" or "false"' get_setter_function(plc_interface)(state == "true") - success_handler("Ok") + print_green("Ok") plc_interface.disconnect() @click.command(help="Run plc function 'set_sync_to_tracker()'") @click.argument("state") -def _set_sync_to_tracker(state): +def _set_sync_to_tracker(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_sync_to_tracker) @click.command(help="Run plc function 'set_auto_temperature()'") @click.argument("state") -def _set_auto_temperature(state): +def _set_auto_temperature(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_auto_temperature) @click.command(help="Run plc function 'set_power_heater()'") @click.argument("state") -def _set_heater_power(state): +def _set_heater_power(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_power_heater) @click.command(help="Run plc function 'set_power_heater()'") @click.argument("state") -def _set_camera_power(state): +def _set_camera_power(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_power_camera) @click.command(help="Run plc function 'set_power_router()'") @click.argument("state") -def _set_router_power(state): +def _set_router_power(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_power_router) @click.command(help="Run plc function 'set_power_spectrometer()'") @click.argument("state") -def _set_spectrometer_power(state): +def _set_spectrometer_power(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_power_spectrometer) @click.command(help="Run plc function 'set_power_computer()'") @click.argument("state") -def _set_computer_power(state): +def _set_computer_power(state: str) -> None: set_boolean_plc_state(state, lambda p: p.set_power_computer) @click.group() -def plc_command_group(): +def plc_command_group() -> None: pass diff --git a/packages/cli/commands/remove_filelocks.py b/packages/cli/commands/remove_filelocks.py index 4a7b642f..77c9dc4b 100644 --- a/packages/cli/commands/remove_filelocks.py +++ b/packages/cli/commands/remove_filelocks.py @@ -5,15 +5,19 @@ PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) -error_handler = lambda text: click.echo(click.style(text, fg="red")) -success_handler = lambda text: click.echo(click.style(text, fg="green")) +def print_green(text: str) -> None: + click.echo(click.style(text, fg="green")) + + +def print_red(text: str) -> None: + click.echo(click.style(text, fg="red")) @click.command( help="Remove all filelocks. Helpful when any of the programs crashed " + "during writing to a file. Should not be necessary normally." ) -def remove_filelocks(): +def remove_filelocks() -> None: lock_files = [ os.path.join(PROJECT_DIR, "config", ".config.lock"), os.path.join(PROJECT_DIR, "logs", ".logs.lock"), @@ -23,7 +27,7 @@ def remove_filelocks(): for f in lock_files: if os.path.isfile(f): os.remove(f) - success_handler(f"Removing {f}") - success_handler("Done!") + print_green(f"Removing {f}") + print_green("Done!") else: - error_handler("Aborting") + print_red("Aborting") diff --git a/packages/cli/commands/state.py b/packages/cli/commands/state.py index 6773bdb1..875ccd0e 100644 --- a/packages/cli/commands/state.py +++ b/packages/cli/commands/state.py @@ -8,14 +8,11 @@ STATE_FILE_PATH = os.path.join(PROJECT_DIR, "runtime-data", "state.json") STATE_LOCK_PATH = os.path.join(PROJECT_DIR, "config", ".state.lock") -error_handler = lambda text: click.echo(click.style(text, fg="red")) -success_handler = lambda text: click.echo(click.style(text, fg="green")) - @click.command(help="Read the current state.json file.") @click.option("--no-indent", is_flag=True, help="Do not print the JSON in an indented manner") @with_filelock(STATE_LOCK_PATH) -def _get_state(no_indent): +def _get_state(no_indent: bool) -> None: if not os.path.isfile(STATE_FILE_PATH): StateInterface.initialize() @@ -33,7 +30,7 @@ def _get_state(no_indent): @click.group() -def state_command_group(): +def state_command_group() -> None: pass diff --git a/packages/cli/main.py b/packages/cli/main.py index b7d1f0df..889b4d28 100644 --- a/packages/cli/main.py +++ b/packages/cli/main.py @@ -6,25 +6,27 @@ PROJECT_DIR = dir(dir(dir(os.path.abspath(__file__)))) sys.path.append(PROJECT_DIR) -from packages.cli.commands.config import config_command_group -from packages.cli.commands.state import state_command_group -from packages.cli.commands.logs import logs_command_group -from packages.cli.commands.core import core_command_group -from packages.cli.commands.plc import plc_command_group -from packages.cli.commands.remove_filelocks import remove_filelocks +from .commands import ( + config_command_group, + core_command_group, + logs_command_group, + plc_command_group, + remove_filelocks, + state_command_group, +) @click.group() -def cli(): +def cli() -> None: pass cli.add_command(config_command_group, name="config") -cli.add_command(state_command_group, name="state") -cli.add_command(logs_command_group, name="logs") cli.add_command(core_command_group, name="core") +cli.add_command(logs_command_group, name="logs") cli.add_command(plc_command_group, name="plc") cli.add_command(remove_filelocks, name="remove-filelocks") +cli.add_command(state_command_group, name="state") if __name__ == "__main__": diff --git a/packages/core/__init__.py b/packages/core/__init__.py index 8ba720f3..161a3895 100644 --- a/packages/core/__init__.py +++ b/packages/core/__init__.py @@ -1,3 +1,2 @@ -from .utils.types import upload_meta -from .utils.interfaces import config_validation +from .utils import types from . import modules, main, threads diff --git a/packages/core/main.py b/packages/core/main.py index 856acfcd..c37d7d0c 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -69,7 +69,7 @@ def run() -> None: try: config = ConfigInterface.read() break - except AssertionError as e: + except Exception as e: logger.error(f"{e}") logger.error(f"Invalid config, waiting 10 seconds") time.sleep(10) @@ -100,7 +100,7 @@ def run() -> None: # load config at the beginning of each mainloop iteration try: config = ConfigInterface.read() - except AssertionError as e: + except Exception as e: logger.error(f"Invalid config, waiting 10 seconds") time.sleep(10) continue diff --git a/packages/core/utils/interfaces/config_validation.py b/packages/core/utils/interfaces/config_validation.py deleted file mode 100644 index cd672fe0..00000000 --- a/packages/core/utils/interfaces/config_validation.py +++ /dev/null @@ -1,248 +0,0 @@ -import json -import os -from typing import Tuple -from xmlrpc.client import boolean -import cerberus # type: ignore -from packages.core.utils import Logger - -dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) -CONFIG_FILE_PATH = os.path.join(PROJECT_DIR, "config", "config.json") - - -def _directory_path_exists(field, value, error): # type: ignore - if not os.path.isfile(value): - error(field, "Path has to be an existing file") - - -def _file_path_exists(field, value, error): # type: ignore - if not os.path.isfile(value): - error(field, "Path has to be an existing file") - - -def _is_valid_ip_adress(field, value, error): # type: ignore - try: - assert len(value.split(".")) == 4 - assert all([n.isnumeric() for n in value.split(".")]) - assert all([(int(n) >= 0) and (int(n) <= 255) for n in value.split(".")]) - except AssertionError: - error(field, "String has to be a valid IPv4 address") - - -def get_dict_schema(s: dict) -> dict: # type: ignore - return {"type": "dict", "schema": s} - - -def get_nullable_dict_schema(s: dict) -> dict: # type: ignore - return {"type": "dict", "schema": s, "nullable": True} - - -def get_config_file_schema(strict: bool) -> dict: - """ - Returns a cerberus schema for the config. With strict=false, - the checks whether file paths or directories exist will be - skipped. Strict-mode is used by the core, Loose-mode is used - by the CLI (which has to work even with invalid paths). - """ - - specs = { - "ip": {"type": "string", "check_with": _is_valid_ip_adress}, - "file": {"type": "string"}, - "directory": {"type": "string"}, - "time": { - "type": "dict", - "schema": { - "hour": {"type": "integer", "min": 0, "max": 23}, - "minute": {"type": "integer", "min": 0, "max": 59}, - "second": {"type": "integer", "min": 0, "max": 59}, - }, - }, - } - - if strict: - specs["file"]["check_with"] = _file_path_exists - specs["directory"]["check_with"] = _directory_path_exists - - return { - "general": get_dict_schema( - { - "seconds_per_core_interval": { - "type": "number", - "min": 5, - "max": 600, - }, - "test_mode": {"type": "boolean"}, - "station_id": {"type": "string"}, - "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, - } - ), - "opus": get_dict_schema( - { - "em27_ip": specs["ip"], - "executable_path": specs["file"], - "experiment_path": specs["file"], - "macro_path": specs["file"], - "username": {"type": "string"}, - "password": {"type": "string"}, - } - ), - "camtracker": get_dict_schema( - { - "config_path": specs["file"], - "executable_path": specs["file"], - "learn_az_elev_path": specs["file"], - "sun_intensity_path": specs["file"], - "motor_offset_threshold": { - "type": "number", - "min": -360, - "max": 360, - }, - } - ), - "error_email": get_dict_schema( - { - "sender_address": {"type": "string"}, - "sender_password": {"type": "string"}, - "notify_recipients": {"type": "boolean"}, - "recipients": {"type": "string"}, - } - ), - "measurement_decision": get_dict_schema( - { - "mode": { - "type": "string", - "allowed": ["automatic", "manual", "cli"], - }, - "manual_decision_result": {"type": "boolean"}, - "cli_decision_result": {"type": "boolean"}, - } - ), - "measurement_triggers": get_dict_schema( - { - "consider_time": {"type": "boolean"}, - "consider_sun_elevation": {"type": "boolean"}, - "consider_helios": {"type": "boolean"}, - "start_time": specs["time"], - "stop_time": specs["time"], - "min_sun_elevation": {"type": "number", "min": 0, "max": 90}, - } - ), - "tum_plc": get_nullable_dict_schema( - { - "ip": specs["ip"], - "version": {"type": "integer", "allowed": [1, 2]}, - "controlled_by_user": {"type": "boolean"}, - } - ), - "helios": get_nullable_dict_schema( - { - "camera_id": {"type": "integer", "min": 0, "max": 999999}, - "evaluation_size": {"type": "integer", "min": 1, "max": 100}, - "seconds_per_interval": { - "type": "number", - "min": 5, - "max": 600, - }, - "measurement_threshold": { - "type": "number", - "min": 0.1, - "max": 1, - }, - "save_images": {"type": "boolean"}, - } - ), - "upload": get_nullable_dict_schema( - { - "is_active": {"type": "boolean"}, - "host": specs["ip"], - "user": {"type": "string"}, - "password": {"type": "string"}, - "src_directory": specs["file"], - "dst_directory": {"type": "string"}, - "remove_src_after_upload": {"type": "boolean"}, - } - ), - } - - -class CerberusException(Exception): - pass - - -logger = Logger(origin="config-validation") - - -class ConfigValidation: - """ - Functions used to validate config objects/files. - - All functions in here do not used filelocks because - higher level functions should do that. - """ - - logging_handler = logger.error - - @staticmethod - def check_dict( - content_object: dict, partial_validation: bool = False, validate_paths: bool = True - ) -> None: - """ - For a given config object, check its integrity. - - "partial_validation" means that keys can be missing. - This is used when updating the config via CLI, since - the errors given when updating should only concern - the passed properties). - - "validate_paths" means that paths (files and directories) - contained in the config object should be checked too - - whether they exist. This path-validation is skipped when - reading the config via CLI because the UI can and should - deal with invalid paths but not with an invalid structure. - - Does not return anything, only raises AssertionErrors. - """ - validator = cerberus.Validator( - get_config_file_schema(strict=validate_paths), - require_all=(not partial_validation), - ) - assert validator.validate(content_object), validator.errors - # Add assertions that cannot be done with cerberus here - - @staticmethod - def check_current_config_file() -> Tuple[bool, Exception]: - """ - Load the contents of the current config file and - validate its full integrity (with filepaths). - """ - try: - assert os.path.isfile(CONFIG_FILE_PATH), "file does not exist" - with open(CONFIG_FILE_PATH, "r") as f: - try: - content_object = json.load(f) - except: - raise AssertionError("file not in a valid json format") - - ConfigValidation.check_dict(content_object, partial_validation=False) - return True, Exception("") - except Exception as e: - ConfigValidation.logging_handler(f"Error in current config file: {e}") - return False, e - - @staticmethod - def check_partial_config_string(content: str) -> bool: - """ - For a given string, check whether its is a valid - partial config object. Used in CLI. - """ - try: - try: - content_dict = json.loads(content) - except: - raise AssertionError("content not in a valid json format") - - ConfigValidation.check_dict(content_dict, partial_validation=True) - return True - except Exception as e: - ConfigValidation.logging_handler(f"Error in new config string: {e}") - return False diff --git a/packages/core/utils/types/config.py b/packages/core/utils/types/config.py index 0b40312e..91ab37b4 100644 --- a/packages/core/utils/types/config.py +++ b/packages/core/utils/types/config.py @@ -183,7 +183,7 @@ class ValidationError(Exception): """ -def validate_config_dict(o: Any, partial: bool = False) -> None: +def validate_config_dict(o: Any, partial: bool = False, skip_filepaths: bool = False) -> None: """ Check, whether a given object is a correct ConfigDict Raises a pydantic.ValidationError if the object is invalid. @@ -212,7 +212,8 @@ def assert_min_max(property_path: str, min_value: float, max_value: float) -> No def assert_file_path(property_path: str) -> None: prop: str = get_nested_dict_property(property_path) - assert os.path.isfile(prop), f"config.{property_path} is not a file" + if not skip_filepaths: + assert os.path.isfile(prop), f"config.{property_path} is not a file" def assert_ip_address(property_path: str) -> None: prop: str = get_nested_dict_property(property_path) diff --git a/scripts/run_type_analysis.sh b/scripts/run_type_analysis.sh index a82147ab..0094ac4b 100644 --- a/scripts/run_type_analysis.sh +++ b/scripts/run_type_analysis.sh @@ -1 +1,5 @@ -python -m mypy run-pyra-core.py --strict --implicit-reexport --no-warn-unused-ignores \ No newline at end of file +echo "running static type analysis for PYRA Core" +python -m mypy run-pyra-core.py --strict --implicit-reexport --no-warn-unused-ignores + +echo "running static type analysis for PYRA CLI" +python -m mypy packages/cli/main.py --strict --implicit-reexport --no-warn-unused-ignores \ No newline at end of file From dfe1d1b26fed281d2565ba2d1f514d63db53af80 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Fri, 19 Aug 2022 17:14:54 +0200 Subject: [PATCH 053/132] #91 - document functions --- packages/core/modules/enclosure_control.py | 8 ++-- .../utils/functions/exception_email_client.py | 31 +++++++++++--- .../core/utils/functions/image_processing.py | 2 + packages/core/utils/functions/logger.py | 18 ++++++++ packages/core/utils/functions/ring_list.py | 3 -- .../functions/update_dict_recursively.py | 42 +++++++++++++------ 6 files changed, 79 insertions(+), 25 deletions(-) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 8183aa2c..6fdb6c62 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -114,8 +114,10 @@ def run(self, new_config: types.ConfigDict) -> None: # Check for critial error: Motor Failed Flag in PLC # In case of present motor failed flag the cover might not be closed in bad weather conditions. # Potentially putting the measurement instrument at risk. - if self.plc_state.state.motor_failed: - raise MotorFailedError("URGENT: stop all actions, check cover in person") + if self.plc_state["state"]["motor_failed"]: + raise EnclosureControl.MotorFailedError( + "URGENT: stop all actions, check cover in person" + ) # Check PLC ip connection (single ping). if self.plc_interface.is_responsive(): @@ -247,7 +249,7 @@ def sync_cover_to_measurement_status(self) -> None: logger.info("Syncing Cover to Tracker.") else: # flank change 1 -> 0: remove cover mode: sync to tracker, close cover - if self.plc_state.state.reset_needed: + if self.plc_state["state"]["reset_needed"]: self.plc_interface.reset() time.sleep(10) self.plc_interface.set_sync_to_tracker(False) diff --git a/packages/core/utils/functions/exception_email_client.py b/packages/core/utils/functions/exception_email_client.py index 4b5f62fc..c55cbb2a 100644 --- a/packages/core/utils/functions/exception_email_client.py +++ b/packages/core/utils/functions/exception_email_client.py @@ -5,6 +5,7 @@ from email.mime.multipart import MIMEMultipart import subprocess import traceback +from typing import Optional from .. import types dir = os.path.dirname @@ -12,25 +13,37 @@ def get_pyra_version() -> str: + """Get the current PYRA version from the UI's package.json file""" with open(os.path.join(PROJECT_DIR, "packages", "ui", "package.json")) as f: pyra_version: str = json.load(f)["version"] assert pyra_version.startswith("4.") return pyra_version -def get_commit_sha() -> str: +def get_commit_sha() -> Optional[str]: + """Get the current commit sha of the PYRA codebase + Returns None if git is not installed or directory is + a git repository.""" commit_sha_process = subprocess.run( ["git", "rev-parse", "--verify", "HEAD", "--short"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) - stdout = commit_sha_process.stdout.decode() - commit_sha = stdout.replace("\n", "").replace(" ", "") - assert len(commit_sha) > 0 - return commit_sha + if commit_sha_process.returncode == 0: + stdout = commit_sha_process.stdout.decode() + commit_sha = stdout.replace("\n", "").replace(" ", "") + assert len(commit_sha) > 0 + if "fatal: not a git repository" in stdout: + return None + return commit_sha + else: + return None def get_current_log_lines() -> list[str]: + """Get the log line from the current info.log file. Only + returns the log lines from the latest two iterations. + """ with open(f"{PROJECT_DIR}/logs/info.log") as f: latest_log_lines = f.readlines() @@ -71,6 +84,8 @@ def _send_email(config: types.ConfigDict, text: str, html: str, subject: str) -> @staticmethod def handle_resolved_exception(config: types.ConfigDict) -> None: + """Send out an email that all exceptions have been resolved + on this station.""" if not config["error_email"]["notify_recipients"]: return @@ -106,6 +121,8 @@ def handle_resolved_exception(config: types.ConfigDict) -> None: @staticmethod def handle_occured_exception(config: types.ConfigDict, exception: Exception) -> None: + """Send out an email that a new exception has occured + on this station.""" if not config["error_email"]["notify_recipients"]: return @@ -119,7 +136,9 @@ def handle_occured_exception(config: types.ConfigDict, exception: Exception) -> text = ( f"{type(exception).__name__} has occured. Details:\n" + f"{tb}\nLast 2 iteration's log lines:{logs}\n" - + f"This email has been generated by Pyra {pyra_version} (commit {commit_sha}) automatically." + + f"This email has been generated by Pyra {pyra_version} " + + (f"(commit {commit_sha}) " if commit_sha is not None else "") + + "automatically." ) pre_tag = '
'
diff --git a/packages/core/utils/functions/image_processing.py b/packages/core/utils/functions/image_processing.py
index 16f3aff8..a216b676 100644
--- a/packages/core/utils/functions/image_processing.py
+++ b/packages/core/utils/functions/image_processing.py
@@ -98,6 +98,7 @@ def get_circle_location(binary_mask: cv.Mat) -> tuple[int, int, int]:
     def add_markings_to_image(
         img: cv.Mat, edge_fraction: int, circle_cx: int, circle_cy: int, circle_r: int
     ) -> cv.Mat:
+        """Put text for edge fraction and mark circles in image"""
         img = cv.circle(img, (circle_cx, circle_cy), circle_r, (100, 0, 0), 2)
         img = cv.circle(img, (circle_cx, circle_cy), round(circle_r * 0.9), (100, 0, 0), 2)
         img = ImageProcessing.add_text_to_image(img, f"{round(edge_fraction * 100, 2)}%")
@@ -107,6 +108,7 @@ def add_markings_to_image(
     def add_text_to_image(
         img: cv.Mat, text: str, color: tuple[int, int, int] = (200, 0, 0)
     ) -> cv.Mat:
+        """Put some text on the bottom left of an image"""
         cv.putText(
             img,
             text=text,
diff --git a/packages/core/utils/functions/logger.py b/packages/core/utils/functions/logger.py
index 8c281d1c..6878336c 100644
--- a/packages/core/utils/functions/logger.py
+++ b/packages/core/utils/functions/logger.py
@@ -16,6 +16,10 @@
 
 
 def log_line_has_time(log_line: str) -> bool:
+    """Returns true when a give log line (string) starts
+    with a valid date. This is not true for exception
+    tracebacks. This log line time is used to determine
+    which file to archive logs lines into."""
     try:
         assert len(log_line) >= 10
         datetime.strptime(log_line[:10], "%Y-%m-%d")
@@ -32,22 +36,29 @@ def __init__(self, origin: str = "pyra.core", just_print: bool = False) -> None:
         self.just_print: bool = just_print
 
     def debug(self, message: str) -> None:
+        """Write a debug log (to debug only). Used for verbose output"""
         self._write_log_line("DEBUG", message)
 
     def info(self, message: str) -> None:
+        """Write an info log (to debug and info)"""
         self._write_log_line("INFO", message)
 
     def warning(self, message: str) -> None:
+        """Write a warning log (to debug and info)"""
         self._write_log_line("WARNING", message)
 
     def error(self, message: str) -> None:
+        """Write an error log (to debug and info)"""
         self._write_log_line("ERROR", message)
 
     def exception(self, e: Exception) -> None:
+        """Log the traceback of an exception"""
         tb = "\n".join(traceback.format_exception(e))
         self._write_log_line("EXCEPTION", f"{type(e).__name__} occured: {tb}")
 
     def _write_log_line(self, level: str, message: str) -> None:
+        """Format the log line string and write it to "logs/debug.log"
+        and possibly "logs/info.log"""
         now = datetime.now()
         utc_offset = round((datetime.now() - datetime.utcnow()).total_seconds() / 3600, 1)
         if round(utc_offset) == utc_offset:
@@ -74,6 +85,13 @@ def _write_log_line(self, level: str, message: str) -> None:
 
     @staticmethod
     def archive(keep_last_hour: bool = False) -> None:
+        """
+        Move all log lines in "logs/info.log" and "logs/debug.log" into
+        an archive file ("logs/archive/YYYYMMDD-debug.log", "...info.log").
+
+        With keep_last_hour = True, log lines less than an hour old will
+        remain.
+        """
         with filelock.FileLock(LOG_FILES_LOCK):
             with open(DEBUG_LOG_FILE, "r") as f:
                 log_lines_in_file = f.readlines()
diff --git a/packages/core/utils/functions/ring_list.py b/packages/core/utils/functions/ring_list.py
index d092cec0..8bc15ec2 100644
--- a/packages/core/utils/functions/ring_list.py
+++ b/packages/core/utils/functions/ring_list.py
@@ -1,6 +1,3 @@
-# TODO add static types
-
-
 class RingList:
     """
     Base code created by Flavio Catalani on Tue, 5 Jul 2005 (PSF).
diff --git a/packages/core/utils/functions/update_dict_recursively.py b/packages/core/utils/functions/update_dict_recursively.py
index f25a3677..6f7c291c 100644
--- a/packages/core/utils/functions/update_dict_recursively.py
+++ b/packages/core/utils/functions/update_dict_recursively.py
@@ -2,9 +2,38 @@
 
 
 def update_dict_recursively(old_object: Any, new_object: Any) -> Any:
+    """
+    For a given dict, update it recursively from a new dict.
+    It will not add any properties and assert that the types
+    remain the same (or null). null->int or int->null is possible
+    but not int->dict or list->int.
+
+    example:
+    old_object = {
+        a: 3,
+        b: {
+            c: 50,
+            e: null
+        }
+    }
+    new_object = {
+        b: {
+            e: 80
+        }
+    }
+    returned_object = {
+        a: 3,
+        b: {
+            c: 50,
+            e: 80
+        }
+    }
+    """
+
     if old_object is None or new_object is None:
         return new_object
 
+    # if the old_object is a dict, loop through
     if type(old_object) == dict:
         assert type(new_object) == dict
         updated_dict = {}
@@ -20,16 +49,3 @@ def update_dict_recursively(old_object: Any, new_object: Any) -> Any:
         else:
             assert type(old_object) == type(new_object)
         return new_object
-
-
-"""
-TODO: Documentation
-{
-    "a": 3,
-    "b": {
-        "d": 40
-    }
-}
-
-{"a": 20}
-"""

From d104fcf473c404de3935fb16f96bb3f1053c1f24 Mon Sep 17 00:00:00 2001
From: dostuffthatmatters 
Date: Fri, 19 Aug 2022 17:17:57 +0200
Subject: [PATCH 054/132] #91 - document decorators

---
 .../core/utils/decorators/with_filelock.py    | 26 +++++++------------
 1 file changed, 10 insertions(+), 16 deletions(-)

diff --git a/packages/core/utils/decorators/with_filelock.py b/packages/core/utils/decorators/with_filelock.py
index f5166c8c..36e649d4 100644
--- a/packages/core/utils/decorators/with_filelock.py
+++ b/packages/core/utils/decorators/with_filelock.py
@@ -2,29 +2,23 @@
 from typing import Any, Callable, TypeVar, cast
 from functools import wraps
 
-# FileLock = Mark, that a file is being used and other programs
-# should not interfere. A file "*.lock" will be created and the
-# existence of this file will make the wrapped function wait until
-# it no longer exists.
-
-# A timeout of -1 means that the code waits forever
-
-# def with_filelock(file_lock_path: str, timeout: float = -1):
-#     def with_fixed_filelock(f):
-#         def locked_function(*args, **kwargs):
-#             with filelock.FileLock(file_lock_path, timeout=timeout):
-#                 return function(*args, **kwargs)
-#         return locked_function
-#     return with_fixed_filelock
-#
 # typing of higher level decorators:
 # https://github.com/python/mypy/issues/1551#issuecomment-253978622
-
 F = TypeVar("F", bound=Callable[..., Any])
 
 
 class with_filelock:
+    """
+    FileLock = Mark, that a file is being used and other programs
+    should not interfere. A file "*.lock" will be created and the
+    content of this file will make the wrapped function possibly
+    wait until other programs are done using it.
+
+    See https://en.wikipedia.org/wiki/Semaphore_(programming)
+    """
+
     def __init__(self, file_lock_path: str, timeout: float = -1) -> None:
+        """A timeout of -1 means that the code waits forever"""
         self.file_lock_path: str = file_lock_path
         self.timeout: float = timeout
 

From 1cc51fc6ff7b2f5a78b9a7b6b82b69b9bf8d9352 Mon Sep 17 00:00:00 2001
From: dostuffthatmatters 
Date: Tue, 23 Aug 2022 14:19:28 +0200
Subject: [PATCH 055/132] Link setup tool in README

---
 README.md | 59 ++-----------------------------------------------------
 1 file changed, 2 insertions(+), 57 deletions(-)

diff --git a/README.md b/README.md
index a82b4b0c..fbfb9112 100644
--- a/README.md
+++ b/README.md
@@ -1,61 +1,6 @@
-**Work in progress! Do not use it yet.**
+# PYRA
 
-
- -# Pyra Version 4 - -## Set up with - -Dependency management using https://python-poetry.org/. - -```bash -# create a virtual environment (copy of the python interpreter) -python3.10 -m venv .venv - -# activate the virtual environment -source .venv/bin/activate # unix -.venv\Scripts\activate.bat # windows - -# when your venv is activated your command line has a (.venv) prefix -# install dependencies using poetry -poetry install -``` - -
- -## Configuration Files - -Two types of config files: - -1. **`setup.json`** contains all information about the static setup: Which parts does the enclosure consist of? This should be written once and only change when the hardware changes. -2. **`parameters.json`** contains all dynamic parameters that can be set when operating pyra. This should be manipulated either via the CLI (coming soon) or the graphical user interface (coming soon, similar to Pyra version <= 3). - -For each file, there is a `*.default.json` file present in the repository. A full reference can be found here soon. - -
- -## CLI - -_documentation coming soon_ - -Make `pyra-cli` command available: - -```bash -alias pyra-cli=".../.venv/bin/python .../packages/cli/main.py" -``` - -TODO: Find a way to set up autocompletion on the `pyra-cli` command. - -
- -## Graphical User Interface - -_documentation coming soon_ - -Less Secure Apps have been deactivated. -https://support.google.com/accounts/answer/6010255?hl=de&visit_id=637914296292859831-802637670&p=less-secure-apps&rd=1 - -Solution: Use "App passwords", which require 2FA +**For installation, see https://github.com/tum-esm/pyra-setup-tool**
From da75428a6455b4c52407d5f8b71337571c3ec870 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Tue, 23 Aug 2022 23:02:58 +0200 Subject: [PATCH 056/132] #99 - extend upload config --- config/upload.config.default.json | 11 +++++++---- packages/core/threads/upload_thread.py | 16 ++++++++-------- packages/core/utils/types/config.py | 22 ++++++++++++++-------- 3 files changed, 29 insertions(+), 20 deletions(-) diff --git a/config/upload.config.default.json b/config/upload.config.default.json index 1f9e4cc6..6ed79785 100644 --- a/config/upload.config.default.json +++ b/config/upload.config.default.json @@ -1,9 +1,12 @@ { - "is_active": false, "host": "1.2.3.4", "user": "...", "password": "...", - "src_directory": "...", - "dst_directory": "...", - "remove_src_after_upload": false + "upload_ifgs": false, + "src_directory_ifgs": "...", + "dst_directory_ifgs": "...", + "remove_src_ifgs_after_upload": false, + "upload_helios": false, + "dst_directory_helios": "...", + "remove_src_helios_after_upload": true } diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index df745a9e..569aba8a 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -42,15 +42,15 @@ def __init__(self, date_string: str, config: types.ConfigDict) -> None: self.upload_config = config["upload"] self.date_string = date_string - self.src_dir_path = os.path.join(config["upload"]["src_directory"], date_string) + self.src_dir_path = os.path.join(config["upload"]["src_directory_ifgs"], date_string) self.src_meta_path = os.path.join(self.src_dir_path, "upload-meta.json") assert os.path.isdir(self.src_dir_path), f"{self.src_dir_path} is not a directory" - self.dst_dir_path = f"{config['upload']['dst_directory']}/{date_string}" + self.dst_dir_path = f"{config['upload']['dst_directory_ifgs']}/{date_string}" self.dst_meta_path = f"{self.dst_dir_path}/upload-meta.json" assert self.transfer_process.is_remote_dir( - config["upload"]["dst_directory"] - ), f"remote {config['upload']['dst_directory']} is not a directory" + config["upload"]["dst_directory_ifgs"] + ), f"remote {config['upload']['dst_directory_ifgs']} is not a directory" self.meta_content: types.UploadMetaDict = { "complete": False, @@ -58,7 +58,7 @@ def __init__(self, date_string: str, config: types.ConfigDict) -> None: "createdTime": round(time.time(), 3), "lastModifiedTime": round(time.time(), 3), } - self.remove_src_after_upload: bool = config["upload"]["remove_src_after_upload"] + self.remove_src_after_upload: bool = config["upload"]["remove_src_ifgs_after_upload"] def __initialize_remote_dir(self) -> None: """ @@ -85,7 +85,7 @@ def __get_remote_directory_checksum(self) -> str: """ local_script_path = os.path.join(PROJECT_DIR, "scripts", "get_upload_dir_checksum.py") remote_script_path = ( - self.upload_config["src_directory"] + "/get_upload_dir_checksum.py" + self.upload_config["src_directory_ifgs"] + "/get_upload_dir_checksum.py" ) self.transfer_process.put(local_script_path, remote_script_path) @@ -290,7 +290,7 @@ def should_be_running(self) -> bool: return ( (not self.config["general"]["test_mode"]) and (self.config["upload"] is not None) - and (self.config["upload"]["is_active"]) + and (self.config["upload"]["upload_ifgs"]) ) def main(self) -> None: @@ -302,7 +302,7 @@ def main(self) -> None: return src_dates_strings = DirectoryUploadClient.get_directories_to_be_uploaded( - self.config["upload"]["src_directory"] + self.config["upload"]["src_directory_ifgs"] ) for src_date_string in src_dates_strings: diff --git a/packages/core/utils/types/config.py b/packages/core/utils/types/config.py index 91ab37b4..d76d1b75 100644 --- a/packages/core/utils/types/config.py +++ b/packages/core/utils/types/config.py @@ -133,23 +133,29 @@ class HeliosPartial(TypedDict, total=False): @staticmethod class Upload(TypedDict): - is_active: bool host: str user: str password: str - src_directory: str - dst_directory: str - remove_src_after_upload: bool + upload_ifgs: bool + src_directory_ifgs: str + dst_directory_ifgs: str + remove_src_ifgs_after_upload: bool + upload_helios: bool + dst_directory_helios: str + remove_src_helios_after_upload: bool @staticmethod class UploadPartial(TypedDict, total=False): - is_active: bool host: str user: str password: str - src_directory: str - dst_directory: str - remove_src_after_upload: bool + upload_ifgs: bool + src_directory_ifgs: str + dst_directory_ifgs: str + remove_src_ifgs_after_upload: bool + upload_helios: bool + dst_directory_helios: str + remove_src_helios_after_upload: bool class ConfigDict(TypedDict): From 2a94b8b59efc18bdb3d8684c41e2258ba52286f6 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 00:07:10 +0200 Subject: [PATCH 057/132] Improve pyra-core codebase structure * less prone to circular imports * import less names (import utils instead of from utils import a, b) * split interfaces and types from utils * merge decorators and functions into utils --- packages/cli/alias/pyra-cli.example.bat | 1 + packages/cli/commands/config.py | 3 +- packages/cli/commands/core.py | 1 - packages/cli/commands/plc.py | 4 +- packages/core/__init__.py | 8 +- .../core/{utils => }/interfaces/__init__.py | 0 .../interfaces/config_interface.py | 8 +- .../{utils => }/interfaces/os_interface.py | 1 - .../{utils => }/interfaces/plc_interface.py | 120 ++++++++++++------ .../{utils => }/interfaces/state_interface.py | 60 +++++++-- packages/core/main.py | 35 +++-- packages/core/modules/enclosure_control.py | 18 +-- .../core/modules/measurement_conditions.py | 18 +-- packages/core/modules/opus_measurement.py | 12 +- packages/core/modules/sun_tracking.py | 10 +- packages/core/modules/system_checks.py | 18 +-- packages/core/threads/abstract_thread_base.py | 4 +- packages/core/threads/helios_thread.py | 46 ++++--- packages/core/threads/upload_thread.py | 6 +- packages/core/{utils => }/types/__init__.py | 0 packages/core/{utils => }/types/config.py | 10 +- .../{utils => }/types/persistent_state.py | 2 +- .../{utils => }/types/plc_specification.py | 0 packages/core/{utils => }/types/plc_state.py | 0 packages/core/{utils => }/types/state.py | 5 +- .../core/{utils => }/types/upload_meta.py | 3 +- packages/core/utils/__init__.py | 23 +--- .../core/utils/{functions => }/astronomy.py | 5 +- packages/core/utils/decorators/__init__.py | 1 - .../{functions => }/exception_email_client.py | 4 +- packages/core/utils/functions/__init__.py | 6 - .../utils/{functions => }/image_processing.py | 1 - .../utils/interfaces/plc_specification.py | 83 ------------ packages/core/utils/{functions => }/logger.py | 2 +- .../core/utils/{functions => }/ring_list.py | 4 +- .../update_dict_recursively.py | 0 .../utils/{decorators => }/with_filelock.py | 2 +- .../utils/{decorators => }/with_timeout.py | 0 38 files changed, 247 insertions(+), 277 deletions(-) rename packages/core/{utils => }/interfaces/__init__.py (100%) rename packages/core/{utils => }/interfaces/config_interface.py (77%) rename packages/core/{utils => }/interfaces/os_interface.py (99%) rename packages/core/{utils => }/interfaces/plc_interface.py (81%) rename packages/core/{utils => }/interfaces/state_interface.py (72%) rename packages/core/{utils => }/types/__init__.py (100%) rename packages/core/{utils => }/types/config.py (96%) rename packages/core/{utils => }/types/persistent_state.py (100%) rename packages/core/{utils => }/types/plc_specification.py (100%) rename packages/core/{utils => }/types/plc_state.py (100%) rename packages/core/{utils => }/types/state.py (95%) rename packages/core/{utils => }/types/upload_meta.py (99%) rename packages/core/utils/{functions => }/astronomy.py (96%) delete mode 100644 packages/core/utils/decorators/__init__.py rename packages/core/utils/{functions => }/exception_email_client.py (98%) delete mode 100644 packages/core/utils/functions/__init__.py rename packages/core/utils/{functions => }/image_processing.py (99%) delete mode 100644 packages/core/utils/interfaces/plc_specification.py rename packages/core/utils/{functions => }/logger.py (99%) rename packages/core/utils/{functions => }/ring_list.py (91%) rename packages/core/utils/{functions => }/update_dict_recursively.py (100%) rename packages/core/utils/{decorators => }/with_filelock.py (95%) rename packages/core/utils/{decorators => }/with_timeout.py (100%) diff --git a/packages/cli/alias/pyra-cli.example.bat b/packages/cli/alias/pyra-cli.example.bat index c53ff007..fa3a7169 100644 --- a/packages/cli/alias/pyra-cli.example.bat +++ b/packages/cli/alias/pyra-cli.example.bat @@ -2,4 +2,5 @@ echo. python C:\Users\...\Documents\pyra-4\packages\cli\main.py % +TODO: remove diff --git a/packages/cli/commands/config.py b/packages/cli/commands/config.py index d45e5d62..febb9ad0 100644 --- a/packages/cli/commands/config.py +++ b/packages/cli/commands/config.py @@ -3,7 +3,8 @@ import click import os import sys -from packages.core.utils import types, with_filelock, update_dict_recursively +from packages.core import types +from packages.core.utils import with_filelock, update_dict_recursively dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) diff --git a/packages/cli/commands/core.py b/packages/cli/commands/core.py index 3e170f0c..11291053 100644 --- a/packages/cli/commands/core.py +++ b/packages/cli/commands/core.py @@ -4,7 +4,6 @@ import os import psutil from packages.core.modules.enclosure_control import EnclosureControl -from packages.core.modules.opus_measurement import OpusMeasurement from packages.core.modules.sun_tracking import SunTracking from packages.core.utils import ConfigInterface, Logger diff --git a/packages/cli/commands/plc.py b/packages/cli/commands/plc.py index 2f5a8cc8..0f4a1eb6 100644 --- a/packages/cli/commands/plc.py +++ b/packages/cli/commands/plc.py @@ -3,9 +3,9 @@ from typing import Callable, Optional import click import os +from packages.core import types from packages.core.modules.enclosure_control import EnclosureControl -from packages.core.utils import StateInterface, ConfigInterface, PLCInterface, types -from packages.core.utils import with_filelock +from packages.core.utils import StateInterface, ConfigInterface, PLCInterface, with_filelock dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) diff --git a/packages/core/__init__.py b/packages/core/__init__.py index 161a3895..f45e623e 100644 --- a/packages/core/__init__.py +++ b/packages/core/__init__.py @@ -1,2 +1,6 @@ -from .utils import types -from . import modules, main, threads +from . import types +from . import utils +from . import interfaces +from . import modules +from . import threads +from . import main diff --git a/packages/core/utils/interfaces/__init__.py b/packages/core/interfaces/__init__.py similarity index 100% rename from packages/core/utils/interfaces/__init__.py rename to packages/core/interfaces/__init__.py diff --git a/packages/core/utils/interfaces/config_interface.py b/packages/core/interfaces/config_interface.py similarity index 77% rename from packages/core/utils/interfaces/config_interface.py rename to packages/core/interfaces/config_interface.py index 66a721a1..e83c4fa1 100644 --- a/packages/core/utils/interfaces/config_interface.py +++ b/packages/core/interfaces/config_interface.py @@ -1,10 +1,10 @@ import json import os from typing import Any -from packages.core.utils import Astronomy, with_filelock, types +from packages.core import types, utils dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) +PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) CONFIG_FILE_PATH = os.path.join(PROJECT_DIR, "config", "config.json") CONFIG_LOCK_PATH = os.path.join(PROJECT_DIR, "config", ".config.lock") @@ -12,7 +12,7 @@ class ConfigInterface: @staticmethod - @with_filelock(CONFIG_LOCK_PATH) + @utils.with_filelock(CONFIG_LOCK_PATH) def read() -> types.ConfigDict: """ Read the contents of the current config.json file. @@ -24,5 +24,5 @@ def read() -> types.ConfigDict: types.validate_config_dict(new_object) config: types.ConfigDict = new_object - Astronomy.CONFIG = config + utils.Astronomy.CONFIG = config return config diff --git a/packages/core/utils/interfaces/os_interface.py b/packages/core/interfaces/os_interface.py similarity index 99% rename from packages/core/utils/interfaces/os_interface.py rename to packages/core/interfaces/os_interface.py index 7302b18c..95466d2c 100644 --- a/packages/core/utils/interfaces/os_interface.py +++ b/packages/core/interfaces/os_interface.py @@ -1,4 +1,3 @@ -from ctypes import Union from typing import Literal import psutil import datetime diff --git a/packages/core/utils/interfaces/plc_interface.py b/packages/core/interfaces/plc_interface.py similarity index 81% rename from packages/core/utils/interfaces/plc_interface.py rename to packages/core/interfaces/plc_interface.py index 905ef3db..186f9d4b 100644 --- a/packages/core/utils/interfaces/plc_interface.py +++ b/packages/core/interfaces/plc_interface.py @@ -3,49 +3,87 @@ import time import os from snap7.exceptions import Snap7Exception # type: ignore -from packages.core.utils import Logger, StateInterface, types -from .plc_specification import PLC_SPECIFICATION_VERSIONS +from packages.core import types, utils, interfaces -logger = Logger(origin="plc-interface") +logger = utils.Logger(origin="plc-interface") dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) - -# used when initializing the state.json file -EMPTY_PLC_STATE: types.PlcStateDict = { - "actors": { - "fan_speed": None, - "current_angle": None, - }, - "control": { - "auto_temp_mode": None, - "manual_control": None, - "manual_temp_mode": None, - "sync_to_tracker": None, - }, - "sensors": { - "humidity": None, - "temperature": None, - }, - "state": { - "cover_closed": None, - "motor_failed": None, - "rain": None, - "reset_needed": None, - "ups_alert": None, - }, - "power": { - "camera": None, - "computer": None, - "heater": None, - "router": None, - "spectrometer": None, +PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) + +# TODO: use tuples (3 ints vs 4 ints) +PLC_SPECIFICATION_VERSIONS: dict[Literal[1, 2], types.PlcSpecificationDict] = { + 1: { + "actors": { + "current_angle": [25, 6, 2], + "fan_speed": [8, 18, 2], + "move_cover": [25, 8, 2], + "nominal_angle": [25, 8, 2], + }, + "control": { + "auto_temp_mode": [8, 24, 1, 2], + "manual_control": [8, 24, 1, 5], + "manual_temp_mode": [8, 24, 1, 3], + "reset": [3, 4, 1, 5], + "sync_to_tracker": [8, 16, 1, 0], + }, + "sensors": {"humidity": [8, 22, 2], "temperature": [8, 20, 2]}, + "state": { + "cover_closed": [25, 2, 1, 2], + "motor_failed": [8, 12, 1, 3], + "rain": [8, 6, 1, 0], + "reset_needed": [3, 2, 1, 2], + "ups_alert": [8, 0, 1, 1], + }, + "power": { + "camera": [8, 16, 1, 2], + "computer": [8, 16, 1, 6], + "heater": [8, 16, 1, 5], + "router": [8, 16, 1, 3], + "spectrometer": [8, 16, 1, 1], + }, + "connections": { + "camera": [8, 14, 1, 6], + "computer": [8, 14, 1, 3], + "heater": [8, 14, 1, 1], + "router": [8, 14, 1, 2], + "spectrometer": [8, 14, 1, 0], + }, }, - "connections": { - "camera": None, - "computer": None, - "heater": None, - "router": None, - "spectrometer": None, + 2: { + "actors": { + "current_angle": [6, 6, 2], + "fan_speed": [8, 4, 2], + "move_cover": [6, 8, 2], + "nominal_angle": [6, 8, 2], + }, + "control": { + "auto_temp_mode": [8, 24, 1, 5], + "manual_control": [8, 12, 1, 7], + "manual_temp_mode": [8, 24, 1, 4], + "reset": [3, 4, 1, 5], + "sync_to_tracker": [8, 8, 1, 1], + }, + "sensors": {"humidity": [8, 22, 2], "temperature": [8, 16, 2]}, + "state": { + "cover_closed": [6, 16, 1, 1], + "motor_failed": None, + "rain": [3, 0, 1, 0], + "reset_needed": [3, 2, 1, 2], + "ups_alert": [8, 13, 1, 6], + }, + "power": { + "camera": [8, 8, 1, 4], # K5 Relay + "computer": None, + "heater": [8, 12, 1, 7], # K3 Relay + "router": None, # not allowed + "spectrometer": [8, 8, 1, 2], # K4 Relay + }, + "connections": { + "camera": None, + "computer": [8, 13, 1, 2], + "heater": [8, 6, 1, 1], + "router": [8, 12, 1, 4], + "spectrometer": None, + }, }, } @@ -297,7 +335,7 @@ def __update_bool( raise PLCInterface.PLCError("PLC state did not change") # TODO: check whether this results in a circular import - StateInterface.update({"enclosure_plc_readings": partial_plc_state}) + interfaces.StateInterface.update({"enclosure_plc_readings": partial_plc_state}) def set_power_camera(self, new_state: bool) -> None: """Raises PLCInterface.PLCError, if value hasn't been changed""" diff --git a/packages/core/utils/interfaces/state_interface.py b/packages/core/interfaces/state_interface.py similarity index 72% rename from packages/core/utils/interfaces/state_interface.py rename to packages/core/interfaces/state_interface.py index a615a068..4572dc31 100644 --- a/packages/core/utils/interfaces/state_interface.py +++ b/packages/core/interfaces/state_interface.py @@ -1,11 +1,10 @@ import json import os import shutil -from packages.core.utils import with_filelock, update_dict_recursively, types -from .plc_interface import EMPTY_PLC_STATE +from packages.core import types, utils dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) +PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) CONFIG_FILE_PATH = os.path.join(PROJECT_DIR, "config", "config.json") CONFIG_LOCK_PATH = os.path.join(PROJECT_DIR, "config", ".config.lock") @@ -20,7 +19,43 @@ EMPTY_STATE_OBJECT: types.StateDict = { "helios_indicates_good_conditions": None, "measurements_should_be_running": False, - "enclosure_plc_readings": EMPTY_PLC_STATE, + "enclosure_plc_readings": { + "actors": { + "fan_speed": None, + "current_angle": None, + }, + "control": { + "auto_temp_mode": None, + "manual_control": None, + "manual_temp_mode": None, + "sync_to_tracker": None, + }, + "sensors": { + "humidity": None, + "temperature": None, + }, + "state": { + "cover_closed": None, + "motor_failed": None, + "rain": None, + "reset_needed": None, + "ups_alert": None, + }, + "power": { + "camera": None, + "computer": None, + "heater": None, + "router": None, + "spectrometer": None, + }, + "connections": { + "camera": None, + "computer": None, + "heater": None, + "router": None, + "spectrometer": None, + }, + }, "os_state": { "cpu_usage": None, "memory_usage": None, @@ -34,13 +69,10 @@ "current_exceptions": [], } -# TODO: Validate structure with cerberus (assertion) -# we could possibly use pydantic for that - class StateInterface: @staticmethod - @with_filelock(STATE_LOCK_PATH) + @utils.with_filelock(STATE_LOCK_PATH) def initialize() -> None: """ This will create two files: @@ -77,7 +109,7 @@ def initialize() -> None: json.dump(EMPTY_PERSISTENT_STATE_OBJECT, f, indent=4) @staticmethod - @with_filelock(STATE_LOCK_PATH) + @utils.with_filelock(STATE_LOCK_PATH) def read() -> types.StateDict: """Read the state file and return its content""" with open(STATE_FILE_PATH, "r") as f: @@ -86,7 +118,7 @@ def read() -> types.StateDict: return new_object @staticmethod - @with_filelock(STATE_LOCK_PATH) + @utils.with_filelock(STATE_LOCK_PATH) def read_persistent() -> types.PersistentStateDict: """Read the persistent state file and return its content""" with open(PERSISTENT_STATE_FILE_PATH, "r") as f: @@ -95,7 +127,7 @@ def read_persistent() -> types.PersistentStateDict: return new_object @staticmethod - @with_filelock(STATE_LOCK_PATH) + @utils.with_filelock(STATE_LOCK_PATH) def update(update: types.StateDictPartial) -> None: """ Update the (persistent) state file and return its content. @@ -106,12 +138,12 @@ def update(update: types.StateDictPartial) -> None: with open(STATE_FILE_PATH, "r") as f: current_state = json.load(f) - new_state = update_dict_recursively(current_state, update) + new_state = utils.update_dict_recursively(current_state, update) with open(STATE_FILE_PATH, "w") as f: json.dump(new_state, f, indent=4) @staticmethod - @with_filelock(STATE_LOCK_PATH) + @utils.with_filelock(STATE_LOCK_PATH) def update_persistent(update: types.PersistentStateDictPartial) -> None: """ Update the (persistent) state file and return its content. @@ -123,6 +155,6 @@ def update_persistent(update: types.PersistentStateDictPartial) -> None: current_state = json.load(f) types.validate_persistent_state_dict(current_state) - new_state = update_dict_recursively(current_state, update) + new_state = utils.update_dict_recursively(current_state, update) with open(PERSISTENT_STATE_FILE_PATH, "w") as f: json.dump(new_state, f, indent=4) diff --git a/packages/core/main.py b/packages/core/main.py index c37d7d0c..382a0143 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -1,16 +1,11 @@ import os import time from typing import Any, Optional -from packages.core import modules, threads -from packages.core.utils import ( - ConfigInterface, - StateInterface, - Logger, - ExceptionEmailClient, - types, -) +from packages.core import types, utils, interfaces, modules, threads -logger = Logger(origin="main") +logger = utils.Logger(origin="main") + +# TODO: Move library ignores to pyproject.toml def update_exception_state( @@ -35,18 +30,18 @@ def update_exception_state( if new_exception is not None: if type(new_exception).__name__ not in current_exceptions: updated_current_exceptions.append(type(new_exception).__name__) - ExceptionEmailClient.handle_occured_exception(config, new_exception) + utils.ExceptionEmailClient.handle_occured_exception(config, new_exception) if len(current_exceptions) == 0: - Logger.log_activity_event("error-occured") + utils.Logger.log_activity_event("error-occured") else: if len(current_exceptions) > 0: updated_current_exceptions = [] - ExceptionEmailClient.handle_resolved_exception(config) + utils.ExceptionEmailClient.handle_resolved_exception(config) logger.info(f"All exceptions have been resolved.") - Logger.log_activity_event("errors-resolved") + utils.Logger.log_activity_event("errors-resolved") # if no errors until now - StateInterface.update_persistent({"current_exceptions": current_exceptions}) + interfaces.StateInterface.update_persistent({"current_exceptions": current_exceptions}) return updated_current_exceptions except Exception as e: @@ -60,18 +55,18 @@ def run() -> None: It loads the config file, validates it runs every module one by one, and possibly restarts the upload- and helios-thread. """ - StateInterface.initialize() + interfaces.StateInterface.initialize() logger.info(f"Starting mainloop inside process with PID {os.getpid()}") # Loop until a valid config has been found. Without # an invalid config, the mainloop cannot initialize while True: try: - config = ConfigInterface.read() + config = interfaces.ConfigInterface.read() break except Exception as e: - logger.error(f"{e}") - logger.error(f"Invalid config, waiting 10 seconds") + logger.exception(e) + logger.error(f"Could not read config, waiting 10 seconds") time.sleep(10) # these modules will be executed one by one in each @@ -91,7 +86,7 @@ def run() -> None: helios_thread_instance = threads.helios_thread.HeliosThread(config) upload_thread_instance = threads.upload_thread.UploadThread(config) - current_exceptions = StateInterface.read_persistent()["current_exceptions"] + current_exceptions = interfaces.StateInterface.read_persistent()["current_exceptions"] while True: start_time = time.time() @@ -99,7 +94,7 @@ def run() -> None: # load config at the beginning of each mainloop iteration try: - config = ConfigInterface.read() + config = interfaces.ConfigInterface.read() except Exception as e: logger.error(f"Invalid config, waiting 10 seconds") time.sleep(10) diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 6fdb6c62..6917f2b3 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -1,8 +1,8 @@ import time from snap7.exceptions import Snap7Exception # type: ignore -from packages.core.utils import StateInterface, Logger, Astronomy, PLCInterface, types +from packages.core import types, utils, interfaces -logger = Logger(origin="enclosure-control") +logger = utils.Logger(origin="enclosure-control") class EnclosureControl: @@ -44,7 +44,7 @@ def __initialize(self) -> None: """Initializes the default PLC settings at startup or activation in config.""" assert self.config["tum_plc"] is not None - self.plc_interface = PLCInterface( + self.plc_interface = interfaces.PLCInterface( self.config["tum_plc"]["version"], self.config["tum_plc"]["ip"] ) self.plc_interface.connect() @@ -73,7 +73,7 @@ def run(self, new_config: types.ConfigDict) -> None: logger.info("Running EnclosureControl") # Check for current measurement status - self.measurements_should_be_running = StateInterface.read()[ + self.measurements_should_be_running = interfaces.StateInterface.read()[ "measurements_should_be_running" ] @@ -99,7 +99,7 @@ def run(self, new_config: types.ConfigDict) -> None: # Push the latest readout of the PLC state to the StateInterface logger.info("New continuous readings.") - StateInterface.update({"enclosure_plc_readings": self.plc_state}) + interfaces.StateInterface.update({"enclosure_plc_readings": self.plc_state}) # Skip writing to the PLC as the user took over control from the automation if self.config["tum_plc"]["controlled_by_user"]: @@ -149,7 +149,9 @@ def run(self, new_config: types.ConfigDict) -> None: now = time.time() seconds_since_error_occured = now - self.last_plc_connection_time if seconds_since_error_occured > 600: - raise PLCInterface.PLCError("Snap7Exception persisting for 10+ minutes") + raise interfaces.PLCInterface.PLCError( + "Snap7Exception persisting for 10+ minutes" + ) else: logger.info( f"Snap7Exception persisting for {round(seconds_since_error_occured/60, 2)}" @@ -219,10 +221,10 @@ def auto_set_power_spectrometer(self) -> None: spectrometer in the morning when minimum angle is satisfied. """ - current_sun_elevation = Astronomy.get_current_sun_elevation() + current_sun_elevation = utils.Astronomy.get_current_sun_elevation() min_power_elevation = ( self.config["general"]["min_sun_elevation"] - 1 - ) * Astronomy.units.deg + ) * utils.Astronomy.units.deg if current_sun_elevation is not None: sun_is_above_minimum = current_sun_elevation >= min_power_elevation diff --git a/packages/core/modules/measurement_conditions.py b/packages/core/modules/measurement_conditions.py index 3036141b..71a68150 100644 --- a/packages/core/modules/measurement_conditions.py +++ b/packages/core/modules/measurement_conditions.py @@ -1,7 +1,7 @@ import datetime -from packages.core.utils import Astronomy, StateInterface, Logger, types +from packages.core import types, utils, interfaces -logger = Logger(origin="measurement-conditions") +logger = utils.Logger(origin="measurement-conditions") def is_time_trigger_active( @@ -61,10 +61,10 @@ def run(self, new_config: types.ConfigDict) -> None: measurements_should_be_running = self._get_automatic_decision() if ( - StateInterface.read()["measurements_should_be_running"] + interfaces.StateInterface.read()["measurements_should_be_running"] != measurements_should_be_running ): - Logger.log_activity_event( + utils.Logger.log_activity_event( "start-measurements" if measurements_should_be_running else "stop-measurements" ) @@ -72,7 +72,7 @@ def run(self, new_config: types.ConfigDict) -> None: f"Measurements should be running is set to: {measurements_should_be_running}." ) # Update of the StateInterface with the latest measurement decision - StateInterface.update( + interfaces.StateInterface.update( {"measurements_should_be_running": measurements_should_be_running} ) @@ -98,12 +98,12 @@ def _get_automatic_decision(self) -> bool: # Evaluate sun elevation if trigger is active if triggers["consider_sun_elevation"]: logger.info("Sun elevation as a trigger is considered.") - current_sun_elevation = Astronomy.get_current_sun_elevation() + current_sun_elevation = utils.Astronomy.get_current_sun_elevation() min_sun_elevation = max( self._CONFIG["general"]["min_sun_elevation"], triggers["min_sun_elevation"] ) sun_above_threshold = current_sun_elevation > ( - min_sun_elevation * Astronomy.units.deg + min_sun_elevation * utils.Astronomy.units.deg ) if sun_above_threshold: logger.debug("Sun angle is above threshold.") @@ -123,7 +123,9 @@ def _get_automatic_decision(self) -> bool: # Helios runs in a thread and evaluates the sun conditions consistanly during day. if triggers["consider_helios"]: logger.info("Helios as a trigger is considered.") - helios_result = StateInterface.read()["helios_indicates_good_conditions"] + helios_result = interfaces.StateInterface.read()[ + "helios_indicates_good_conditions" + ] if helios_result is None: logger.debug(f"Helios does not nave enough images yet.") diff --git a/packages/core/modules/opus_measurement.py b/packages/core/modules/opus_measurement.py index 898dfff2..71e4d1af 100644 --- a/packages/core/modules/opus_measurement.py +++ b/packages/core/modules/opus_measurement.py @@ -2,7 +2,7 @@ import sys import time from typing import Any -from packages.core.utils import Logger, StateInterface, Astronomy, types +from packages.core import types, utils, interfaces # these imports are provided by pywin32 @@ -13,7 +13,7 @@ import dde # type: ignore -logger = Logger(origin="opus-measurement") +logger = utils.Logger(origin="opus-measurement") class OpusMeasurement: @@ -71,7 +71,7 @@ def run(self, new_config: types.ConfigDict) -> None: logger.info("EM27 seems to be disconnected.") # check for automation state flank changes - measurements_should_be_running = StateInterface.read()[ + measurements_should_be_running = interfaces.StateInterface.read()[ "measurements_should_be_running" ] if self.last_cycle_automation_status != measurements_should_be_running: @@ -301,8 +301,8 @@ def low_sun_angle_present(self) -> bool: """OPUS closes at the end of the day to start up fresh the next day.""" assert sys.platform == "win32" - sun_angle_is_low: bool = Astronomy.get_current_sun_elevation().is_within_bounds( - None, self._CONFIG["general"]["min_sun_elevation"] * Astronomy.units.deg + sun_angle_is_low: bool = utils.Astronomy.get_current_sun_elevation().is_within_bounds( + None, self._CONFIG["general"]["min_sun_elevation"] * utils.Astronomy.units.deg ) return sun_angle_is_low @@ -352,7 +352,7 @@ def check_for_experiment_change(self) -> None: assert sys.platform == "win32" if self._CONFIG["opus"]["experiment_path"] != self.current_experiment: - if StateInterface.read_persistent()["active_opus_macro_id"] == None: + if interfaces.StateInterface.read_persistent()["active_opus_macro_id"] == None: self.load_experiment() else: self.stop_macro() diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index 361a5df6..fb1d2908 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -3,15 +3,13 @@ # interface to be implemented for any software like "Camtracker" import os -import sys import time -from typing import Any import jdcal # type: ignore import datetime -from packages.core.utils import StateInterface, Logger, OSInterface, types +from packages.core import types, utils, interfaces -logger = Logger(origin="sun-tracking") +logger = utils.Logger(origin="sun-tracking") class SunTracking: @@ -29,7 +27,7 @@ def run(self, new_config: types.ConfigDict) -> None: logger.info("Running SunTracking") - measurements_should_be_running = StateInterface.read()[ + measurements_should_be_running = interfaces.StateInterface.read()[ "measurements_should_be_running" ] @@ -66,7 +64,7 @@ def ct_application_running(self) -> bool: ct_path = self._CONFIG["camtracker"]["executable_path"] process_name = os.path.basename(ct_path) - return OSInterface.get_process_status(process_name) in [ + return interfaces.OSInterface.get_process_status(process_name) in [ "running", "start_pending", "continue_pending", diff --git a/packages/core/modules/system_checks.py b/packages/core/modules/system_checks.py index 5f9aed88..f498e5e1 100644 --- a/packages/core/modules/system_checks.py +++ b/packages/core/modules/system_checks.py @@ -1,6 +1,6 @@ -from packages.core.utils import Logger, OSInterface, StateInterface, types +from packages.core import types, utils, interfaces -logger = Logger(origin="system-checks") +logger = utils.Logger(origin="system-checks") class SystemChecks: @@ -17,25 +17,25 @@ def run(self, new_config: types.ConfigDict) -> None: logger.info("Running SystemChecks") # check os system stability - cpu_usage = OSInterface.get_cpu_usage() + cpu_usage = interfaces.OSInterface.get_cpu_usage() logger.debug(f"Current CPU usage for all cores is {cpu_usage}%.") - memory_usage = OSInterface.get_memory_usage() + memory_usage = interfaces.OSInterface.get_memory_usage() logger.debug(f"Current v_memory usage for the system is {memory_usage}.") - last_boot_time = OSInterface.get_last_boot_time() + last_boot_time = interfaces.OSInterface.get_last_boot_time() logger.debug(f"The system is running since {last_boot_time}.") - disk_space = OSInterface.get_disk_space() + disk_space = interfaces.OSInterface.get_disk_space() logger.debug(f"The disk is currently filled with {disk_space}%.") # raises error if disk_space is below 10% - OSInterface.validate_disk_space() + interfaces.OSInterface.validate_disk_space() # raises error if system battery is below 20% - OSInterface.validate_system_battery() + interfaces.OSInterface.validate_system_battery() - StateInterface.update( + interfaces.StateInterface.update( { "os_state": { "cpu_usage": cpu_usage, diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index aa264188..9338df19 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -1,7 +1,7 @@ import abc import threading from typing import Optional -from packages.core.utils import Logger, types +from packages.core import types, utils class AbstractThreadBase(abc.ABC): @@ -11,7 +11,7 @@ class AbstractThreadBase(abc.ABC): def __init__(self, config: types.ConfigDict, logger_origin: str) -> None: self.__thread: Optional[threading.Thread] = None - self.__logger: Logger = Logger(origin=logger_origin) + self.__logger: utils.Logger = utils.Logger(origin=logger_origin) self.config: types.ConfigDict = config def update_thread_state(self, new_config: types.ConfigDict) -> None: diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 28cfc67a..f6934b1f 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -1,21 +1,13 @@ from datetime import datetime import os import time -from typing import Any, Literal, Optional import cv2 as cv # type: ignore import numpy as np -from packages.core.utils import ( - ConfigInterface, - StateInterface, - Logger, - RingList, - Astronomy, - ImageProcessing, - types, -) +from typing import Any, Literal, Optional +from packages.core import types, utils, interfaces from .abstract_thread_base import AbstractThreadBase -logger = Logger(origin="helios") +logger = utils.Logger(origin="helios") dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) @@ -178,7 +170,7 @@ def adjust_exposure() -> None: img = _Helios.take_image(trow_away_white_images=False) mean_color = round(np.mean(img), 3) exposure_results.append({"exposure": e, "mean": mean_color}) - img = ImageProcessing.add_text_to_image( + img = utils.ImageProcessing.add_text_to_image( img, f"mean={mean_color}", color=(0, 0, 255) ) cv.imwrite(os.path.join(AUTOEXPOSURE_IMG_DIR, f"exposure-{e}.jpg"), img) @@ -214,8 +206,8 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: single_valued_pixels = cv.cvtColor(downscaled_image, cv.COLOR_BGR2GRAY) # determine lense position and size from binary mask - binary_mask = ImageProcessing.get_binary_mask(single_valued_pixels) - circle_cx, circle_cy, circle_r = ImageProcessing.get_circle_location(binary_mask) + binary_mask = utils.ImageProcessing.get_binary_mask(single_valued_pixels) + circle_cx, circle_cy, circle_r = utils.ImageProcessing.get_circle_location(binary_mask) # only consider edges and make them bold edges_only: cv.Mat = np.array(cv.Canny(single_valued_pixels, 40, 40), dtype=np.float32) @@ -224,7 +216,7 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: ) # blacken the outer 10% of the circle radius - edges_only_dilated *= ImageProcessing.get_circle_mask( + edges_only_dilated *= utils.ImageProcessing.get_circle_mask( edges_only_dilated.shape, round(circle_r * 0.9), circle_cx, circle_cy ) @@ -241,7 +233,7 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: image_timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") raw_image_name = f"{image_timestamp}-{status}-raw.jpg" processed_image_name = f"{image_timestamp}-{status}-processed.jpg" - processed_frame = ImageProcessing.add_markings_to_image( + processed_frame = utils.ImageProcessing.add_markings_to_image( edges_only_dilated, edge_fraction, circle_cx, circle_cy, circle_r ) cv.imwrite(os.path.join(IMG_DIR, raw_image_name), frame) @@ -305,22 +297,22 @@ def main(self, infinite_loop: bool = True, headless: bool = False) -> None: # headless mode = don't use logger, just print messages to console, always save images if headless: - logger = Logger(origin="helios", just_print=True) - _CONFIG = ConfigInterface.read() + logger = utils.Logger(origin="helios", just_print=True) + _CONFIG = interfaces.ConfigInterface.read() self.config = _CONFIG # Check for termination if (_CONFIG["helios"] is None) or (not self.should_be_running()): return - status_history = RingList(_CONFIG["helios"]["evaluation_size"]) + status_history = utils.RingList(_CONFIG["helios"]["evaluation_size"]) current_state = None repeated_camera_error_count = 0 while True: start_time = time.time() - _CONFIG = ConfigInterface.read() + _CONFIG = interfaces.ConfigInterface.read() # Check for termination if (_CONFIG["helios"] is None) or (not self.should_be_running()): @@ -343,12 +335,16 @@ def main(self, infinite_loop: bool = True, headless: bool = False) -> None: status_history.reinitialize(new_size) # sleep while sun angle is too low - if (not headless) and Astronomy.get_current_sun_elevation().is_within_bounds( - None, _CONFIG["general"]["min_sun_elevation"] * Astronomy.units.deg + if ( + not headless + ) and utils.Astronomy.get_current_sun_elevation().is_within_bounds( + None, _CONFIG["general"]["min_sun_elevation"] * utils.Astronomy.units.deg ): logger.debug("Current sun elevation below minimum: Waiting 5 minutes") if current_state != None: - StateInterface.update({"helios_indicates_good_conditions": False}) + interfaces.StateInterface.update( + {"helios_indicates_good_conditions": False} + ) current_state = None # reinit for next day _Helios.deinit() @@ -391,7 +387,9 @@ def main(self, infinite_loop: bool = True, headless: bool = False) -> None: logger.info( f"State change: {'BAD -> GOOD' if (new_state == True) else 'GOOD -> BAD'}" ) - StateInterface.update({"helios_indicates_good_conditions": new_state}) + interfaces.StateInterface.update( + {"helios_indicates_good_conditions": new_state} + ) current_state = new_state # wait rest of loop time diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 569aba8a..e193fba9 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -9,10 +9,10 @@ import fabric # type: ignore import re import pydantic -from packages.core.utils import ConfigInterface, Logger, types +from packages.core import types, utils, interfaces from .abstract_thread_base import AbstractThreadBase -logger = Logger(origin="upload") +logger = utils.Logger(origin="upload") dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) @@ -296,7 +296,7 @@ def should_be_running(self) -> bool: def main(self) -> None: """Main entrypoint of the thread""" while True: - self.config = ConfigInterface.read() + self.config = interfaces.ConfigInterface.read() if self.config["upload"] is None: return diff --git a/packages/core/utils/types/__init__.py b/packages/core/types/__init__.py similarity index 100% rename from packages/core/utils/types/__init__.py rename to packages/core/types/__init__.py diff --git a/packages/core/utils/types/config.py b/packages/core/types/config.py similarity index 96% rename from packages/core/utils/types/config.py rename to packages/core/types/config.py index d76d1b75..11c4fa94 100644 --- a/packages/core/utils/types/config.py +++ b/packages/core/types/config.py @@ -1,7 +1,6 @@ -from ast import Call import os -from typing import Any, Callable, Literal, Optional, TypedDict, Union import pydantic +from typing import Any, Callable, Literal, Optional, TypedDict TimeDict = TypedDict("TimeDict", {"hour": int, "minute": int, "second": int}) @@ -214,7 +213,7 @@ def assert_min_max(property_path: str, min_value: float, max_value: float) -> No prop: float = get_nested_dict_property(property_path) error_message = f"config.{property_path} must be in range [{min_value}, {max_value}]" assert prop >= min_value, error_message - assert prop >= max_value, error_message + assert prop <= max_value, error_message def assert_file_path(property_path: str) -> None: prop: str = get_nested_dict_property(property_path) @@ -260,11 +259,14 @@ def assert_ip_address(property_path: str) -> None: failed_checks = [] for assertion in assertions: - # KeyErrors will be ignored (for partial objects) try: assertion() except AssertionError as a: failed_checks.append(a) + except (TypeError, KeyError): + # Will be ignored because the structure is already + # validated. Occurs when property is missing + pass if len(failed_checks) > 0: raise ValidationError( diff --git a/packages/core/utils/types/persistent_state.py b/packages/core/types/persistent_state.py similarity index 100% rename from packages/core/utils/types/persistent_state.py rename to packages/core/types/persistent_state.py index 10d261e5..476fdca4 100644 --- a/packages/core/utils/types/persistent_state.py +++ b/packages/core/types/persistent_state.py @@ -1,5 +1,5 @@ -from typing import Any, Optional, TypedDict import pydantic +from typing import Any, Optional, TypedDict class PersistentStateDict(TypedDict): diff --git a/packages/core/utils/types/plc_specification.py b/packages/core/types/plc_specification.py similarity index 100% rename from packages/core/utils/types/plc_specification.py rename to packages/core/types/plc_specification.py diff --git a/packages/core/utils/types/plc_state.py b/packages/core/types/plc_state.py similarity index 100% rename from packages/core/utils/types/plc_state.py rename to packages/core/types/plc_state.py diff --git a/packages/core/utils/types/state.py b/packages/core/types/state.py similarity index 95% rename from packages/core/utils/types/state.py rename to packages/core/types/state.py index 5576b9ba..ce43b2d3 100644 --- a/packages/core/utils/types/state.py +++ b/packages/core/types/state.py @@ -1,7 +1,6 @@ -from typing import Any, Union, Optional, TypedDict import pydantic - -from . import PlcStateDict, PlcStateDictPartial +from typing import Any, Union, Optional, TypedDict +from .plc_state import PlcStateDict, PlcStateDictPartial class _OSStateDict(TypedDict): diff --git a/packages/core/utils/types/upload_meta.py b/packages/core/types/upload_meta.py similarity index 99% rename from packages/core/utils/types/upload_meta.py rename to packages/core/types/upload_meta.py index 2212119d..e7f79790 100644 --- a/packages/core/utils/types/upload_meta.py +++ b/packages/core/types/upload_meta.py @@ -1,6 +1,5 @@ -from typing import Any, Optional, TypedDict - import pydantic +from typing import Any, Optional, TypedDict class UploadMetaDict(TypedDict): diff --git a/packages/core/utils/__init__.py b/packages/core/utils/__init__.py index 9bcca9be..2b0e2270 100644 --- a/packages/core/utils/__init__.py +++ b/packages/core/utils/__init__.py @@ -1,16 +1,7 @@ -from .functions import Logger -from .functions import RingList -from .functions import Astronomy -from .functions import ExceptionEmailClient -from .functions import ExceptionEmailClient -from .functions import update_dict_recursively -from .functions import ImageProcessing - -from .decorators import with_filelock - -from .interfaces import ConfigInterface -from .interfaces import StateInterface -from .interfaces import PLCInterface -from .interfaces import OSInterface - -from . import types +from .logger import Logger +from .ring_list import RingList +from .astronomy import Astronomy +from .exception_email_client import ExceptionEmailClient +from .update_dict_recursively import update_dict_recursively +from .image_processing import ImageProcessing +from .with_filelock import with_filelock diff --git a/packages/core/utils/functions/astronomy.py b/packages/core/utils/astronomy.py similarity index 96% rename from packages/core/utils/functions/astronomy.py rename to packages/core/utils/astronomy.py index 5fab6748..7d7c1429 100644 --- a/packages/core/utils/functions/astronomy.py +++ b/packages/core/utils/astronomy.py @@ -2,10 +2,9 @@ import astropy.coordinates as astropy_coordinates # type: ignore import astropy.time as astropy_time # type: ignore import astropy.units as astropy_units # type: ignore -from .. import types +from packages.core import types -# TODO: pass config via functions instea of indirectly -# more code but way simpler +# TODO: pass config via functions instea of indirectly more code but way simpler # TODO: add static typic (simplify code while doing that (less astropy stuff)) diff --git a/packages/core/utils/decorators/__init__.py b/packages/core/utils/decorators/__init__.py deleted file mode 100644 index 0b5afbf8..00000000 --- a/packages/core/utils/decorators/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .with_filelock import with_filelock diff --git a/packages/core/utils/functions/exception_email_client.py b/packages/core/utils/exception_email_client.py similarity index 98% rename from packages/core/utils/functions/exception_email_client.py rename to packages/core/utils/exception_email_client.py index c55cbb2a..8139dc6b 100644 --- a/packages/core/utils/functions/exception_email_client.py +++ b/packages/core/utils/exception_email_client.py @@ -6,10 +6,10 @@ import subprocess import traceback from typing import Optional -from .. import types +from packages.core import types dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) +PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) def get_pyra_version() -> str: diff --git a/packages/core/utils/functions/__init__.py b/packages/core/utils/functions/__init__.py deleted file mode 100644 index 175730a1..00000000 --- a/packages/core/utils/functions/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .logger import Logger -from .ring_list import RingList -from .astronomy import Astronomy -from .exception_email_client import ExceptionEmailClient -from .update_dict_recursively import update_dict_recursively -from .image_processing import ImageProcessing diff --git a/packages/core/utils/functions/image_processing.py b/packages/core/utils/image_processing.py similarity index 99% rename from packages/core/utils/functions/image_processing.py rename to packages/core/utils/image_processing.py index a216b676..2de84800 100644 --- a/packages/core/utils/functions/image_processing.py +++ b/packages/core/utils/image_processing.py @@ -1,4 +1,3 @@ -from typing import Iterable import cv2 as cv # type: ignore import numpy as np diff --git a/packages/core/utils/interfaces/plc_specification.py b/packages/core/utils/interfaces/plc_specification.py deleted file mode 100644 index 26e26cb7..00000000 --- a/packages/core/utils/interfaces/plc_specification.py +++ /dev/null @@ -1,83 +0,0 @@ -from typing import Literal -from .. import types - - -# TODO: use tuples (3 ints vs 4 ints) - -# these are the pins used on the TUM-PLC for all functionality -PLC_SPECIFICATION_VERSIONS: dict[Literal[1, 2], types.PlcSpecificationDict] = { - 1: { - "actors": { - "current_angle": [25, 6, 2], - "fan_speed": [8, 18, 2], - "move_cover": [25, 8, 2], - "nominal_angle": [25, 8, 2], - }, - "control": { - "auto_temp_mode": [8, 24, 1, 2], - "manual_control": [8, 24, 1, 5], - "manual_temp_mode": [8, 24, 1, 3], - "reset": [3, 4, 1, 5], - "sync_to_tracker": [8, 16, 1, 0], - }, - "sensors": {"humidity": [8, 22, 2], "temperature": [8, 20, 2]}, - "state": { - "cover_closed": [25, 2, 1, 2], - "motor_failed": [8, 12, 1, 3], - "rain": [8, 6, 1, 0], - "reset_needed": [3, 2, 1, 2], - "ups_alert": [8, 0, 1, 1], - }, - "power": { - "camera": [8, 16, 1, 2], - "computer": [8, 16, 1, 6], - "heater": [8, 16, 1, 5], - "router": [8, 16, 1, 3], - "spectrometer": [8, 16, 1, 1], - }, - "connections": { - "camera": [8, 14, 1, 6], - "computer": [8, 14, 1, 3], - "heater": [8, 14, 1, 1], - "router": [8, 14, 1, 2], - "spectrometer": [8, 14, 1, 0], - }, - }, - 2: { - "actors": { - "current_angle": [6, 6, 2], - "fan_speed": [8, 4, 2], - "move_cover": [6, 8, 2], - "nominal_angle": [6, 8, 2], - }, - "control": { - "auto_temp_mode": [8, 24, 1, 5], - "manual_control": [8, 12, 1, 7], - "manual_temp_mode": [8, 24, 1, 4], - "reset": [3, 4, 1, 5], - "sync_to_tracker": [8, 8, 1, 1], - }, - "sensors": {"humidity": [8, 22, 2], "temperature": [8, 16, 2]}, - "state": { - "cover_closed": [6, 16, 1, 1], - "motor_failed": None, - "rain": [3, 0, 1, 0], - "reset_needed": [3, 2, 1, 2], - "ups_alert": [8, 13, 1, 6], - }, - "power": { - "camera": [8, 8, 1, 4], # K5 Relay - "computer": None, - "heater": [8, 12, 1, 7], # K3 Relay - "router": None, # not allowed - "spectrometer": [8, 8, 1, 2], # K4 Relay - }, - "connections": { - "camera": None, - "computer": [8, 13, 1, 2], - "heater": [8, 6, 1, 1], - "router": [8, 12, 1, 4], - "spectrometer": None, - }, - }, -} diff --git a/packages/core/utils/functions/logger.py b/packages/core/utils/logger.py similarity index 99% rename from packages/core/utils/functions/logger.py rename to packages/core/utils/logger.py index 6878336c..40aa09b2 100644 --- a/packages/core/utils/functions/logger.py +++ b/packages/core/utils/logger.py @@ -5,7 +5,7 @@ import filelock dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(dir(dir(os.path.abspath(__file__)))))) +PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) INFO_LOG_FILE = os.path.join(PROJECT_DIR, "logs", "info.log") DEBUG_LOG_FILE = os.path.join(PROJECT_DIR, "logs", "debug.log") LOG_FILES_LOCK = os.path.join(PROJECT_DIR, "logs", ".logs.lock") diff --git a/packages/core/utils/functions/ring_list.py b/packages/core/utils/ring_list.py similarity index 91% rename from packages/core/utils/functions/ring_list.py rename to packages/core/utils/ring_list.py index 8bc15ec2..da36f828 100644 --- a/packages/core/utils/functions/ring_list.py +++ b/packages/core/utils/ring_list.py @@ -1,7 +1,9 @@ class RingList: """ Base code created by Flavio Catalani on Tue, 5 Jul 2005 (PSF). - Added sum() and reinitialize() functions. + Added empty(), sum(), and reinitialize() functions. + + https://code.activestate.com/recipes/435902-ring-list-a-fixed-size-circular-list/ """ def __init__(self, length: int): diff --git a/packages/core/utils/functions/update_dict_recursively.py b/packages/core/utils/update_dict_recursively.py similarity index 100% rename from packages/core/utils/functions/update_dict_recursively.py rename to packages/core/utils/update_dict_recursively.py diff --git a/packages/core/utils/decorators/with_filelock.py b/packages/core/utils/with_filelock.py similarity index 95% rename from packages/core/utils/decorators/with_filelock.py rename to packages/core/utils/with_filelock.py index 36e649d4..d0d315be 100644 --- a/packages/core/utils/decorators/with_filelock.py +++ b/packages/core/utils/with_filelock.py @@ -26,6 +26,6 @@ def __call__(self, f: F) -> F: @wraps(f) def wrapper(*args: tuple[Any], **kwargs: dict[str, Any]) -> Any: with filelock.FileLock(self.file_lock_path, timeout=self.timeout): - return function(*args, **kwargs) + return f(*args, **kwargs) return cast(F, wrapper) diff --git a/packages/core/utils/decorators/with_timeout.py b/packages/core/utils/with_timeout.py similarity index 100% rename from packages/core/utils/decorators/with_timeout.py rename to packages/core/utils/with_timeout.py From 2d3e7540833b7dea58ab6a22b269aa370e12206f Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 00:32:54 +0200 Subject: [PATCH 058/132] Extend README.md in pyra-core --- packages/core/README.md | 45 ++++++++++++++++++++++++++++++++++------- 1 file changed, 38 insertions(+), 7 deletions(-) diff --git a/packages/core/README.md b/packages/core/README.md index eb931bb9..6edaf6ab 100644 --- a/packages/core/README.md +++ b/packages/core/README.md @@ -1,8 +1,39 @@ # `pyra-core` -`pyra-core` is the program that is constantly running on the enclosure and operates it. +## Codebase structure + +### Responsibilities + +`types` contains all types used in the codebase. The whole codebase contains static type hints. A static type analysis can be done using `mypy` (see `scripts/`). + +`utils` contains all supporting functionality used in one or more places. + +`interfaces` includes the "low-level" code to interact with the PLC, the operating system, and the config- and state-files. + +`modules` contains the different steps that PYRA Core runs sequentially on the main thread. + +`threads` contains the logic that PYRA Core runs in parallel to the main thread. + +### Import hierarchy + +- `types` doesn't import any other code from PYRA Core +- `utils` can import `types` +- `interfaces` can import `types` and `utils` +- `modules` and `threads` can import `interfaces`, `types`, and `utils` +- `main.py` can import all of the above + +```mermaid + graph LR; + A["types"] --> B; + B["utils"] --> C; + C["interfaces"] --> D; + C --> E; + D["modules"] --> F; + E["threads"] --> F["main.py"]; +``` + +_\* the graph is transient_ -

## Logging @@ -10,9 +41,9 @@ All scripts that output messages at runtime should use the `Logger` class: ```python -from packages.core.utils.logger import Logger +from packages.core import utils -logger = Logger() +logger = utils.Logger() logger.debug("...") logger.info("...") @@ -22,10 +53,10 @@ logger.error("...") # By default, it will log from a "pyra.core" origin -logger = Logger() +logger = utils.Logger() -# Here, it will log from a "pyra.core.camtracker" origin -logger = Logger(origin="pyra.core.camtracker") +# Here, it will log from a "camtracker" origin +logger = utils.Logger(origin="camtracker") ``` Messages from all log levels can be found in `logs/debug.log`, messages from levels INFO/WARNING/CRITICAL/ERROR can be found in `logs/info.log`. From 5eeed9f4b7ca80ea265ee386d587d00fc7fe7bbe Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 00:47:01 +0200 Subject: [PATCH 059/132] #96 - move mypy config into pyproject.toml file --- packages/core/interfaces/plc_interface.py | 4 ++-- packages/core/modules/enclosure_control.py | 2 +- packages/core/modules/sun_tracking.py | 2 +- packages/core/threads/helios_thread.py | 2 +- packages/core/threads/upload_thread.py | 2 +- packages/core/utils/astronomy.py | 6 +++--- packages/core/utils/image_processing.py | 2 +- pyproject.toml | 16 ++++++++++++++++ scripts/run_type_analysis.sh | 4 ++-- 9 files changed, 28 insertions(+), 12 deletions(-) diff --git a/packages/core/interfaces/plc_interface.py b/packages/core/interfaces/plc_interface.py index 186f9d4b..dc5e9ac4 100644 --- a/packages/core/interfaces/plc_interface.py +++ b/packages/core/interfaces/plc_interface.py @@ -1,8 +1,8 @@ from typing import Literal, Optional -import snap7 # type: ignore +import snap7 import time import os -from snap7.exceptions import Snap7Exception # type: ignore +from snap7.exceptions import Snap7Exception from packages.core import types, utils, interfaces logger = utils.Logger(origin="plc-interface") diff --git a/packages/core/modules/enclosure_control.py b/packages/core/modules/enclosure_control.py index 6917f2b3..0c1beabe 100644 --- a/packages/core/modules/enclosure_control.py +++ b/packages/core/modules/enclosure_control.py @@ -1,5 +1,5 @@ import time -from snap7.exceptions import Snap7Exception # type: ignore +from snap7.exceptions import Snap7Exception from packages.core import types, utils, interfaces logger = utils.Logger(origin="enclosure-control") diff --git a/packages/core/modules/sun_tracking.py b/packages/core/modules/sun_tracking.py index fb1d2908..19df1079 100644 --- a/packages/core/modules/sun_tracking.py +++ b/packages/core/modules/sun_tracking.py @@ -4,7 +4,7 @@ import os import time -import jdcal # type: ignore +import jdcal import datetime from packages.core import types, utils, interfaces diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index f6934b1f..63d5eed9 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -1,7 +1,7 @@ from datetime import datetime import os import time -import cv2 as cv # type: ignore +import cv2 as cv import numpy as np from typing import Any, Literal, Optional from packages.core import types, utils, interfaces diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index e193fba9..13a376cc 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -6,7 +6,7 @@ import invoke import paramiko import time -import fabric # type: ignore +import fabric import re import pydantic from packages.core import types, utils, interfaces diff --git a/packages/core/utils/astronomy.py b/packages/core/utils/astronomy.py index 7d7c1429..4a5450ed 100644 --- a/packages/core/utils/astronomy.py +++ b/packages/core/utils/astronomy.py @@ -1,7 +1,7 @@ from typing import Any, Optional -import astropy.coordinates as astropy_coordinates # type: ignore -import astropy.time as astropy_time # type: ignore -import astropy.units as astropy_units # type: ignore +import astropy.coordinates as astropy_coordinates +import astropy.time as astropy_time +import astropy.units as astropy_units from packages.core import types # TODO: pass config via functions instea of indirectly more code but way simpler diff --git a/packages/core/utils/image_processing.py b/packages/core/utils/image_processing.py index 2de84800..12c681fd 100644 --- a/packages/core/utils/image_processing.py +++ b/packages/core/utils/image_processing.py @@ -1,4 +1,4 @@ -import cv2 as cv # type: ignore +import cv2 as cv import numpy as np diff --git a/pyproject.toml b/pyproject.toml index b78bf958..0d244b3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,3 +34,19 @@ build-backend = "poetry.core.masonry.api" [tool.black] line-length = 95 + +[tool.mypy] +strict = true +implicit_reexport = true +no_warn_unused_ignores = true + +[[tool.mypy.overrides]] +module = [ + "astropy.*", + "cv2", + "fabric", + "jdcal", + "snap7", + "snap7.*", +] +ignore_missing_imports = true diff --git a/scripts/run_type_analysis.sh b/scripts/run_type_analysis.sh index 0094ac4b..85d507e1 100644 --- a/scripts/run_type_analysis.sh +++ b/scripts/run_type_analysis.sh @@ -1,5 +1,5 @@ echo "running static type analysis for PYRA Core" -python -m mypy run-pyra-core.py --strict --implicit-reexport --no-warn-unused-ignores +python -m mypy run-pyra-core.py echo "running static type analysis for PYRA CLI" -python -m mypy packages/cli/main.py --strict --implicit-reexport --no-warn-unused-ignores \ No newline at end of file +python -m mypy packages/cli/main.py \ No newline at end of file From a2e60e8e23d0549b3f9af5e96ba741ab398e6c80 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 02:06:29 +0200 Subject: [PATCH 060/132] Save helios images in date specific subdirectories --- packages/core/threads/helios_thread.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 63d5eed9..37318d6f 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -230,14 +230,18 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: logger.debug(f"exposure = {_Helios.current_exposure}, edge_fraction = {edge_fraction}") if save_image: - image_timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") - raw_image_name = f"{image_timestamp}-{status}-raw.jpg" - processed_image_name = f"{image_timestamp}-{status}-processed.jpg" + now = datetime.now() + img_timestamp = now.strftime("%Y%m%d-%H%M%S") + raw_img_name = f"{img_timestamp}-{status}-raw.jpg" + processed_img_name = f"{img_timestamp}-{status}-processed.jpg" processed_frame = utils.ImageProcessing.add_markings_to_image( edges_only_dilated, edge_fraction, circle_cx, circle_cy, circle_r ) - cv.imwrite(os.path.join(IMG_DIR, raw_image_name), frame) - cv.imwrite(os.path.join(IMG_DIR, processed_image_name), processed_frame) + img_directory_path = os.path.join(IMG_DIR, now.strftime("%Y%m%d")) + if not os.path.exists(img_directory_path): + os.mkdir(img_directory_path) + cv.imwrite(os.path.join(img_directory_path, raw_img_name), frame) + cv.imwrite(os.path.join(img_directory_path, processed_img_name), processed_frame) return status From 61e5f5ceb8af4daa150d72831a3728645fcd3d78 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 02:07:28 +0200 Subject: [PATCH 061/132] #99 - make upload config type importable --- packages/core/types/__init__.py | 2 +- packages/core/types/config.py | 42 +++++++++++++++++---------------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/packages/core/types/__init__.py b/packages/core/types/__init__.py index 78577381..d43233ba 100644 --- a/packages/core/types/__init__.py +++ b/packages/core/types/__init__.py @@ -1,4 +1,4 @@ -from .config import ConfigDict, ConfigDictPartial +from .config import ConfigDict, ConfigDictPartial, ConfigSubDicts from .config import validate_config_dict from .persistent_state import PersistentStateDict, PersistentStateDictPartial diff --git a/packages/core/types/config.py b/packages/core/types/config.py index 11c4fa94..32969120 100644 --- a/packages/core/types/config.py +++ b/packages/core/types/config.py @@ -9,7 +9,7 @@ ) -class _ConfigSubDicts: +class ConfigSubDicts: @staticmethod class General(TypedDict): seconds_per_core_interval: float @@ -158,27 +158,27 @@ class UploadPartial(TypedDict, total=False): class ConfigDict(TypedDict): - general: _ConfigSubDicts.General - opus: _ConfigSubDicts.Opus - camtracker: _ConfigSubDicts.Camtracker - error_email: _ConfigSubDicts.ErrorEmail - measurement_decision: _ConfigSubDicts.MeasurementDecision - measurement_triggers: _ConfigSubDicts.MeasurementTriggers - tum_plc: Optional[_ConfigSubDicts.TumPlc] - helios: Optional[_ConfigSubDicts.Helios] - upload: Optional[_ConfigSubDicts.Upload] + general: ConfigSubDicts.General + opus: ConfigSubDicts.Opus + camtracker: ConfigSubDicts.Camtracker + error_email: ConfigSubDicts.ErrorEmail + measurement_decision: ConfigSubDicts.MeasurementDecision + measurement_triggers: ConfigSubDicts.MeasurementTriggers + tum_plc: Optional[ConfigSubDicts.TumPlc] + helios: Optional[ConfigSubDicts.Helios] + upload: Optional[ConfigSubDicts.Upload] class ConfigDictPartial(TypedDict, total=False): - general: _ConfigSubDicts.GeneralPartial - opus: _ConfigSubDicts.OpusPartial - camtracker: _ConfigSubDicts.CamtrackerPartial - error_email: _ConfigSubDicts.ErrorEmailPartial - measurement_decision: _ConfigSubDicts.MeasurementDecisionPartial - measurement_triggers: _ConfigSubDicts.MeasurementTriggersPartial - tum_plc: Optional[_ConfigSubDicts.TumPlcPartial] - helios: Optional[_ConfigSubDicts.HeliosPartial] - upload: Optional[_ConfigSubDicts.UploadPartial] + general: ConfigSubDicts.GeneralPartial + opus: ConfigSubDicts.OpusPartial + camtracker: ConfigSubDicts.CamtrackerPartial + error_email: ConfigSubDicts.ErrorEmailPartial + measurement_decision: ConfigSubDicts.MeasurementDecisionPartial + measurement_triggers: ConfigSubDicts.MeasurementTriggersPartial + tum_plc: Optional[ConfigSubDicts.TumPlcPartial] + helios: Optional[ConfigSubDicts.HeliosPartial] + upload: Optional[ConfigSubDicts.UploadPartial] class ValidationError(Exception): @@ -253,9 +253,11 @@ def assert_ip_address(property_path: str) -> None: lambda: assert_min_max("helios.seconds_per_interval", 5, 600), lambda: assert_min_max("helios.measurement_threshold", 0.1, 1), lambda: assert_ip_address("upload.host"), - lambda: assert_file_path("upload.src_directory"), ] + # this does not check for a valid upload.src_directory_ifgs path + # since the thread itself will check for this + failed_checks = [] for assertion in assertions: From 46366b996fc25223ce979a6f9770db451c0f7fab Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 02:07:48 +0200 Subject: [PATCH 062/132] #99 - implement dual use logic in upload thread --- packages/core/threads/upload_thread.py | 271 +++++++++++++++---------- 1 file changed, 167 insertions(+), 104 deletions(-) diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 13a376cc..81c3addc 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -3,6 +3,7 @@ import json import os import shutil +from typing import Optional import invoke import paramiko import time @@ -24,33 +25,34 @@ class InvalidUploadState(Exception): class DirectoryUploadClient: """ - This is the client that is concerned with uploading one - specific directory (YYYYMMDD). "self.run()" will perform - the actual upload process. + This is the client that is concerned with uploading one specific + directory. run() will perform the actual upload process. """ - def __init__(self, date_string: str, config: types.ConfigDict) -> None: - assert config["upload"] is not None - - self.connection = fabric.connection.Connection( - f"{config['upload']['user']}@{config['upload']['host']}", - connect_kwargs={"password": config["upload"]["password"]}, - connect_timeout=5, - ) - self.transfer_process = fabric.transfer.Transfer(self.connection) - - self.upload_config = config["upload"] + def __init__( + self, + date_string: str, + src_path: str, + dst_path: str, + remove_files_after_upload: bool, + connection: fabric.connection.Connection, + transfer_process: fabric.transfer.Transfer, + ) -> None: self.date_string = date_string + self.src_path = src_path + self.dst_path = dst_path + self.remove_files_after_upload = remove_files_after_upload + self.connection = connection + self.transfer_process = transfer_process + + self.src_meta_path = os.path.join(self.src_path, self.date_string, "upload-meta.json") + self.dst_meta_path = os.path.join( + f"{self.src_path}/{self.date_string}/upload-meta.json" + ) - self.src_dir_path = os.path.join(config["upload"]["src_directory_ifgs"], date_string) - self.src_meta_path = os.path.join(self.src_dir_path, "upload-meta.json") - assert os.path.isdir(self.src_dir_path), f"{self.src_dir_path} is not a directory" - - self.dst_dir_path = f"{config['upload']['dst_directory_ifgs']}/{date_string}" - self.dst_meta_path = f"{self.dst_dir_path}/upload-meta.json" assert self.transfer_process.is_remote_dir( - config["upload"]["dst_directory_ifgs"] - ), f"remote {config['upload']['dst_directory_ifgs']} is not a directory" + self.dst_path + ), f"remote {self.dst_path} is not a directory" self.meta_content: types.UploadMetaDict = { "complete": False, @@ -58,7 +60,6 @@ def __init__(self, date_string: str, config: types.ConfigDict) -> None: "createdTime": round(time.time(), 3), "lastModifiedTime": round(time.time(), 3), } - self.remove_src_after_upload: bool = config["upload"]["remove_src_ifgs_after_upload"] def __initialize_remote_dir(self) -> None: """ @@ -66,8 +67,9 @@ def __initialize_remote_dir(self) -> None: create the directory and add a fresh upload-meta.json file to it. """ - if not self.transfer_process.is_remote_dir(self.dst_dir_path): - self.connection.run(f"mkdir {self.dst_dir_path}") + dst_dir_path = f"{self.dst_path}/{self.date_string}" + if not self.transfer_process.is_remote_dir(dst_dir_path): + self.connection.run(f"mkdir {dst_dir_path}") with open(self.src_meta_path, "w") as f: json.dump(self.meta_content, f, indent=4) self.transfer_process.put(self.src_meta_path, self.dst_meta_path) @@ -84,9 +86,7 @@ def __get_remote_directory_checksum(self) -> str: not present. """ local_script_path = os.path.join(PROJECT_DIR, "scripts", "get_upload_dir_checksum.py") - remote_script_path = ( - self.upload_config["src_directory_ifgs"] + "/get_upload_dir_checksum.py" - ) + remote_script_path = f"{self.dst_path}/{self.date_string}/get_upload_dir_checksum.py" self.transfer_process.put(local_script_path, remote_script_path) try: @@ -113,7 +113,7 @@ def __get_local_directory_checksum(self) -> str: """ hasher = hashlib.md5() for filename in sorted(self.meta_content["fileList"]): - filepath = os.path.join(self.src_dir_path, filename) + filepath = os.path.join(self.src_path, self.date_string, filename) with open(filepath, "rb") as f: hasher.update(f.read()) @@ -137,18 +137,23 @@ def __fetch_meta(self) -> None: except (AssertionError, json.JSONDecodeError, pydantic.ValidationError) as e: raise InvalidUploadState(str(e)) - def __update_meta(self, new_meta_content_partial: types.UploadMetaDictPartial) -> None: + def __update_meta( + self, + update: Optional[types.UploadMetaDictPartial] = None, + sync_remote_meta: bool = True, + ) -> None: """ Update the local upload-meta.json file and overwrite - the meta file on the server + the meta file on the server when sync==True """ - assert self.meta_content is not None - self.meta_content.update(new_meta_content_partial) - self.meta_content.update({"lastModifiedTime": round(time.time(), 3)}) + if update is not None: + self.meta_content.update(update) + self.meta_content.update({"lastModifiedTime": round(time.time(), 3)}) + with open(self.src_meta_path, "w") as f: + json.dump(self.meta_content, f, indent=4) - with open(self.src_meta_path, "w") as f: - json.dump(self.meta_content, f, indent=4) - self.transfer_process.put(self.src_meta_path, self.dst_meta_path) + if sync_remote_meta: + self.transfer_process.put(self.src_meta_path, self.dst_meta_path) def run(self) -> None: """ @@ -168,20 +173,19 @@ def run(self) -> None: self.__initialize_remote_dir() self.__fetch_meta() - assert self.meta_content is not None # determine files present in src and dst directory - # ifg files should be named like "YYYYMMDD." - ifg_file_pattern = re.compile("^.*" + self.date_string + ".*\.\d{2,6}$") - src_file_set = set( - [f for f in os.listdir(self.src_dir_path) if ifg_file_pattern.match(f)] - ) + # files should be named like "YYYYMMDD" + ifg_file_pattern = re.compile("^.*" + self.date_string + ".*$") + raw_src_files = os.listdir(os.path.join(self.src_path, self.date_string)) + src_file_set = set([f for f in raw_src_files if ifg_file_pattern.match(f)]) dst_file_set = set(self.meta_content["fileList"]) # determine file differences between src and dst files_missing_in_dst = src_file_set.difference(dst_file_set) files_missing_in_src = dst_file_set.difference(src_file_set) if len(files_missing_in_src) > 0: + # this happens, when the process fails during the src removal raise InvalidUploadState( f"files present in dst are missing in src: {files_missing_in_src}" ) @@ -195,22 +199,26 @@ def run(self) -> None: ) # upload every file that is missing in the remote - # meta but present in the local directory. Every 25 - # files, upload the remote meta file on which files - # have been uploaded - upload_is_finished = False - while not upload_is_finished: + # meta but present in the local directory + while True: try: - f = files_missing_in_dst.pop() + f = files_missing_in_dst.pop() # raises a KeyError when empty self.transfer_process.put( - os.path.join(self.src_dir_path, f), f"{self.dst_dir_path}/{f}" + os.path.join(self.src_path, self.date_string, f), + f"{self.dst_path}/{self.date_string}/{f}", + ) + # update the local meta in every loop, but only + # sync the remote meta every 25 iterations + new_file_list = [*self.meta_content["fileList"], f] + self.__update_meta( + update={"fileList": new_file_list}, + sync_remote_meta=(len(new_file_list) % 25 == 0), ) - self.meta_content["fileList"].append(f) except KeyError: - upload_is_finished = True + break - if (len(self.meta_content["fileList"]) % 25 == 0) or upload_is_finished: - self.__update_meta({"fileList": self.meta_content["fileList"]}) + # make sure that the remote meta is synced + self.__update_meta() # raise an exception if the checksums do not match remote_checksum = self.__get_remote_directory_checksum() @@ -221,21 +229,17 @@ def run(self) -> None: + f"remote={remote_checksum}" ) - # only set meta.complet to True, when the checksums match - self.__update_meta({"complete": True}) - logger.debug(f"successfully uploaded {self.date_string}") + # only set meta.complete to True, when the checksums match + self.__update_meta(update={"complete": True}) + logger.debug(f"successfully uploaded {self.directory_name}") # only remove src if configured and checksums match - if self.remove_src_after_upload: - shutil.rmtree(self.src_dir_path) + if self.remove_files_after_upload: + shutil.rmtree(os.path.join(self.src_path, self.date_string)) logger.debug("successfully removed source") else: logger.debug("skipping removal of source") - def teardown(self) -> None: - """close ssh and scp connection""" - self.connection.close() - @staticmethod def __is_valid_date(date_string: str) -> bool: try: @@ -247,15 +251,15 @@ def __is_valid_date(date_string: str) -> bool: return False @staticmethod - def get_directories_to_be_uploaded(ifg_src_path: str) -> list[str]: - if not os.path.isdir(ifg_src_path): + def get_directories_to_be_uploaded(data_path: str) -> list[str]: + if not os.path.isdir(data_path): return [] return list( filter( - lambda f: os.path.isdir(os.path.join(ifg_src_path, f)) + lambda f: os.path.isdir(os.path.join(data_path, f)) and DirectoryUploadClient.__is_valid_date(f), - os.listdir(ifg_src_path), + os.listdir(data_path), ) ) @@ -267,19 +271,32 @@ class UploadThread(AbstractThreadBase): be removed (optional) if the files on the server generate the same MD5 checksum as the local files. - The source directory (where OPUS puts the interferograms) - can be configured with config.upload.src_directory. OPUS's - dst directory should be configured inside the macro file. + The source directory where OPUS puts the interferograms + can be configured with config.upload.src_directory_ifgs. + OPUS's output directory should be configured inside the + macro file. The expected file structure looks like this: - 📁 + 📁 and 📁 - 📄 - 📄 + 📄 + 📄 📁 - 📄 - 📄 + 📄 + 📄 📁 ... + + Each YYYYMMDD folder will be uploaded independently. During + its upload the process will store its progress inside a file + "YYYYMMDD/upload-meta.json" (locally and remotely). + + The upload-meta.json file looks like this: + { + "complete": bool, + "fileList": [, , ...], + "createdTime": float, + "lastModifiedTime": float + } """ def __init__(self, config: types.ConfigDict) -> None: @@ -287,40 +304,86 @@ def __init__(self, config: types.ConfigDict) -> None: def should_be_running(self) -> bool: """Should the thread be running? (based on config.upload)""" - return ( - (not self.config["general"]["test_mode"]) - and (self.config["upload"] is not None) - and (self.config["upload"]["upload_ifgs"]) + return (not self.config["general"]["test_mode"]) and ( + self.config["upload"] is not None ) def main(self) -> None: """Main entrypoint of the thread""" while True: self.config = interfaces.ConfigInterface.read() + if not self.should_be_running(): + break + upload_config: types.ConfigSubDicts.Upload = self.config["upload"] - if self.config["upload"] is None: - return - - src_dates_strings = DirectoryUploadClient.get_directories_to_be_uploaded( - self.config["upload"]["src_directory_ifgs"] - ) - for src_date_string in src_dates_strings: - - # check for termination before processing each directory - if not self.should_be_running(): - return - - try: - client = DirectoryUploadClient(src_date_string, self.config) - client.run() - except TimeoutError as e: - logger.error(f"could not reach host (uploading {src_date_string}): {e}") - except paramiko.ssh_exception.AuthenticationException as e: - logger.error(f"failed to authenticate (uploading {src_date_string}): {e}") - except InvalidUploadState as e: - logger.error(f"stuck in invalid state (uploading {src_date_string}): {e}") - - client.teardown() - - # Wait 10 minutes until checking all directories again + try: + connection = fabric.connection.Connection( + f"{upload_config['user']}@{upload_config['host']}", + connect_kwargs={"password": upload_config["password"]}, + connect_timeout=5, + ) + transfer_process = fabric.transfer.Transfer(connection) + except TimeoutError as e: + logger.error( + f"could not reach host (uploading {date_string})," + + f" waiting 5 minutes: {e}" + ) + time.sleep(300) + continue + except paramiko.ssh_exception.AuthenticationException as e: + logger.error( + f"failed to authenticate (uploading {date_string})," + + f" waiting 2 minutes: {e}" + ) + time.sleep(120) + continue + + for category in ["helios", "ifgs"]: + if category == "helios": + if upload_config["upload_helios"]: + src_path = os.path.join(PROJECT_DIR, "logs", "helios") + dst_path = upload_config["dst_directory_helios"] + remove_files_after_upload = upload_config[ + "remove_src_helios_after_upload" + ] + else: + continue + else: + if upload_config["upload_ifgs"]: + src_path = upload_config["src_directory_ifgs"] + dst_path = upload_config["dst_directory_ifgs"] + remove_files_after_upload = upload_config[ + "remove_src_ifgs_after_upload" + ] + if not os.path.isdir(src_path): + logger.error( + f'config.upload.src_directory_ifgs ("{src_path}") is not a directory' + ) + continue + else: + continue + + src_date_strings = DirectoryUploadClient.get_directories_to_be_uploaded( + src_path + ) + for date_string in src_date_strings: + # check for termination before processing each directory + if not self.should_be_running(): + break + + try: + DirectoryUploadClient( + date_string, + src_path, + dst_path, + remove_files_after_upload, + connection, + transfer_process, + ).run() + except InvalidUploadState as e: + logger.error(f"uploading {date_string} is stuck in invalid state: {e}") + + # Close SSH connections and wait 10 minutes + # before checking all directories again + connection.close() time.sleep(600) From 5c494f9f5987e08def80c164657086f83d17370f Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 02:14:27 +0200 Subject: [PATCH 063/132] #99 - debug upload thread code --- packages/core/threads/upload_thread.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index 81c3addc..ecd85f48 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -40,14 +40,14 @@ def __init__( ) -> None: self.date_string = date_string self.src_path = src_path - self.dst_path = dst_path + self.dst_path = dst_path[:-1] if dst_path.endswith("/") else dst_path self.remove_files_after_upload = remove_files_after_upload self.connection = connection self.transfer_process = transfer_process self.src_meta_path = os.path.join(self.src_path, self.date_string, "upload-meta.json") self.dst_meta_path = os.path.join( - f"{self.src_path}/{self.date_string}/upload-meta.json" + f"{self.dst_path}/{self.date_string}/upload-meta.json" ) assert self.transfer_process.is_remote_dir( @@ -95,7 +95,9 @@ def __get_remote_directory_checksum(self) -> str: raise InvalidUploadState("python3.10 is not installed on the server") try: - remote_command = f"python3.10 {remote_script_path} {self.src_dir_path}" + remote_command = ( + f"python3.10 {remote_script_path} {self.src_path}/{self.date_string}" + ) a: invoke.runners.Result = self.connection.run(remote_command, hide=True) assert a.exited == 0 return a.stdout.strip() @@ -231,7 +233,7 @@ def run(self) -> None: # only set meta.complete to True, when the checksums match self.__update_meta(update={"complete": True}) - logger.debug(f"successfully uploaded {self.directory_name}") + logger.debug(f"successfully uploaded {self.date_string}") # only remove src if configured and checksums match if self.remove_files_after_upload: @@ -312,9 +314,9 @@ def main(self) -> None: """Main entrypoint of the thread""" while True: self.config = interfaces.ConfigInterface.read() - if not self.should_be_running(): + upload_config = self.config["upload"] + if not self.should_be_running() or upload_config is None: break - upload_config: types.ConfigSubDicts.Upload = self.config["upload"] try: connection = fabric.connection.Connection( @@ -324,17 +326,11 @@ def main(self) -> None: ) transfer_process = fabric.transfer.Transfer(connection) except TimeoutError as e: - logger.error( - f"could not reach host (uploading {date_string})," - + f" waiting 5 minutes: {e}" - ) + logger.error(f"could not reach host, waiting 5 minutes: {e}") time.sleep(300) continue except paramiko.ssh_exception.AuthenticationException as e: - logger.error( - f"failed to authenticate (uploading {date_string})," - + f" waiting 2 minutes: {e}" - ) + logger.error(f"failed to authenticate, waiting 2 minutes: {e}") time.sleep(120) continue From bf086de29759958bd66a7e2ac1cbb6bc0b62e12c Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 02:22:42 +0200 Subject: [PATCH 064/132] #99 - Update scripts to run upload/helios headless --- packages/core/main.py | 6 ++---- packages/core/threads/__init__.py | 6 ++---- packages/core/threads/abstract_thread_base.py | 2 +- packages/core/threads/helios_thread.py | 14 ++++++-------- packages/core/threads/upload_thread.py | 12 ++++++++++-- run_headless_helios_thread.py | 4 ++++ run_headless_upload_thread.py | 4 ++++ run_headless_vbdsd_thread.py | 6 ------ 8 files changed, 29 insertions(+), 25 deletions(-) create mode 100644 run_headless_helios_thread.py create mode 100644 run_headless_upload_thread.py delete mode 100644 run_headless_vbdsd_thread.py diff --git a/packages/core/main.py b/packages/core/main.py index 382a0143..f9741d90 100644 --- a/packages/core/main.py +++ b/packages/core/main.py @@ -5,8 +5,6 @@ logger = utils.Logger(origin="main") -# TODO: Move library ignores to pyproject.toml - def update_exception_state( config: types.ConfigDict, current_exceptions: list[str], new_exception: Optional[Exception] @@ -83,8 +81,8 @@ def run() -> None: # dedicated mainloop in a parallel thread if the # respective service is configured. The threads itself # load the config periodically and stop themselves - helios_thread_instance = threads.helios_thread.HeliosThread(config) - upload_thread_instance = threads.upload_thread.UploadThread(config) + helios_thread_instance = threads.HeliosThread(config) + upload_thread_instance = threads.UploadThread(config) current_exceptions = interfaces.StateInterface.read_persistent()["current_exceptions"] diff --git a/packages/core/threads/__init__.py b/packages/core/threads/__init__.py index 45db511a..838f37de 100644 --- a/packages/core/threads/__init__.py +++ b/packages/core/threads/__init__.py @@ -1,4 +1,2 @@ -from . import ( - helios_thread, - upload_thread, -) +from .helios_thread import HeliosThread +from .upload_thread import UploadThread diff --git a/packages/core/threads/abstract_thread_base.py b/packages/core/threads/abstract_thread_base.py index 9338df19..cab10717 100644 --- a/packages/core/threads/abstract_thread_base.py +++ b/packages/core/threads/abstract_thread_base.py @@ -40,6 +40,6 @@ def should_be_running(self) -> bool: pass @abc.abstractmethod - def main(self) -> None: + def main(self, headless: bool = False) -> None: """Main entrypoint of the thread""" pass diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index 37318d6f..bd457d10 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -293,13 +293,15 @@ def should_be_running(self) -> bool: and (self.config["measurement_triggers"]["consider_helios"]) ) - # TODO: Update tests/headless mode to comply with new class structure - def main(self, infinite_loop: bool = True, headless: bool = False) -> None: - """Main entrypoint of the thread""" + def main(self, headless: bool = False) -> None: + """ + Main entrypoint of the thread. + + headless mode = don't write to log files, print to console, save all images + """ global logger global _CONFIG - # headless mode = don't use logger, just print messages to console, always save images if headless: logger = utils.Logger(origin="helios", just_print=True) _CONFIG = interfaces.ConfigInterface.read() @@ -405,10 +407,6 @@ def main(self, infinite_loop: bool = True, headless: bool = False) -> None: ) time.sleep(time_to_wait) - if not infinite_loop: - break - # return status_history - except Exception as e: status_history.empty() _Helios.deinit() diff --git a/packages/core/threads/upload_thread.py b/packages/core/threads/upload_thread.py index ecd85f48..3584fedc 100644 --- a/packages/core/threads/upload_thread.py +++ b/packages/core/threads/upload_thread.py @@ -310,8 +310,16 @@ def should_be_running(self) -> bool: self.config["upload"] is not None ) - def main(self) -> None: - """Main entrypoint of the thread""" + def main(self, headless: bool = False) -> None: + """ + Main entrypoint of the thread + + headless mode = don't write to log files, print to console + """ + + if headless: + logger = utils.Logger(origin="upload", just_print=True) + while True: self.config = interfaces.ConfigInterface.read() upload_config = self.config["upload"] diff --git a/run_headless_helios_thread.py b/run_headless_helios_thread.py new file mode 100644 index 00000000..ab6c7b66 --- /dev/null +++ b/run_headless_helios_thread.py @@ -0,0 +1,4 @@ +from packages.core.threads import HeliosThread + +if __name__ == "__main__": + HeliosThread.main(headless=True) diff --git a/run_headless_upload_thread.py b/run_headless_upload_thread.py new file mode 100644 index 00000000..3182162b --- /dev/null +++ b/run_headless_upload_thread.py @@ -0,0 +1,4 @@ +from packages.core.threads import UploadThread + +if __name__ == "__main__": + UploadThread.main(headless=True) diff --git a/run_headless_vbdsd_thread.py b/run_headless_vbdsd_thread.py deleted file mode 100644 index fd0ee7e6..00000000 --- a/run_headless_vbdsd_thread.py +++ /dev/null @@ -1,6 +0,0 @@ -import queue -from packages.core.threads.helios_thread import HeliosThread - -if __name__ == "__main__": - shared_queue = queue.Queue() - HeliosThread.main(shared_queue, headless=True) From 01b8e1da14240263b4303ea37f15d59af771278d Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 02:26:14 +0200 Subject: [PATCH 065/132] Refactor CLI code for new module structure --- packages/cli/commands/core.py | 16 +++++++--------- packages/cli/commands/plc.py | 25 +++++++++++++------------ packages/cli/commands/state.py | 6 +++--- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/packages/cli/commands/core.py b/packages/cli/commands/core.py index 11291053..abac2692 100644 --- a/packages/cli/commands/core.py +++ b/packages/cli/commands/core.py @@ -3,9 +3,7 @@ import click import os import psutil -from packages.core.modules.enclosure_control import EnclosureControl -from packages.core.modules.sun_tracking import SunTracking -from packages.core.utils import ConfigInterface, Logger +from packages.core import utils, interfaces, modules dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) @@ -62,7 +60,7 @@ def _start_pyra_core() -> None: stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - Logger.log_activity_event("start-core") + utils.Logger.log_activity_event("start-core") print_green(f"Started background process with PID {p.pid}") @@ -76,16 +74,16 @@ def _stop_pyra_core() -> None: f"Terminated {len(termination_pids)} pyra-core background " + f"processe(s) with PID(s) {termination_pids}" ) - Logger.log_activity_event("stop-core") + utils.Logger.log_activity_event("stop-core") - config = ConfigInterface.read() + config = interfaces.ConfigInterface.read() if config["general"]["test_mode"] or (config["tum_plc"] is None): return - config = ConfigInterface().read() + config = interfaces.ConfigInterface().read() try: - enclosure = EnclosureControl(config) + enclosure = modules.enclosure_control.EnclosureControl(config) enclosure.force_cover_close() enclosure.plc_interface.disconnect() print_green("Successfully closed cover") @@ -93,7 +91,7 @@ def _stop_pyra_core() -> None: print_red(f"Failed to close cover: {e}") try: - tracking = SunTracking(config) + tracking = modules.sun_tracking.SunTracking(config) if tracking.ct_application_running(): tracking.stop_sun_tracking_automation() print_green("Successfully closed CamTracker") diff --git a/packages/cli/commands/plc.py b/packages/cli/commands/plc.py index 0f4a1eb6..5de45865 100644 --- a/packages/cli/commands/plc.py +++ b/packages/cli/commands/plc.py @@ -3,9 +3,7 @@ from typing import Callable, Optional import click import os -from packages.core import types -from packages.core.modules.enclosure_control import EnclosureControl -from packages.core.utils import StateInterface, ConfigInterface, PLCInterface, with_filelock +from packages.core import types, utils, interfaces, modules dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) @@ -22,14 +20,16 @@ def print_red(text: str) -> None: click.echo(click.style(text, fg="red")) -def get_plc_interface() -> Optional[PLCInterface]: - config = ConfigInterface.read() +def get_plc_interface() -> Optional[interfaces.PLCInterface]: + config = interfaces.ConfigInterface.read() plc_interface = None try: assert config["tum_plc"] is not None, "PLC not configured" assert config["tum_plc"]["controlled_by_user"], "PLC is controlled by automation" - plc_interface = PLCInterface(config["tum_plc"]["version"], config["tum_plc"]["ip"]) + plc_interface = interfaces.PLCInterface( + config["tum_plc"]["version"], config["tum_plc"]["ip"] + ) plc_interface.connect() except Exception as e: print_red(f"{e}") @@ -59,7 +59,7 @@ def _reset() -> None: time.sleep(2) running_time += 2 if not plc_interface.reset_is_needed(): - StateInterface.update( + interfaces.StateInterface.update( {"enclosure_plc_readings": {"state": {"reset_needed": False}}} ) break @@ -69,7 +69,7 @@ def _reset() -> None: def wait_until_cover_is_at_angle( - plc_interface: PLCInterface, new_cover_angle: int, timeout: float = 15 + plc_interface: interfaces.PLCInterface, new_cover_angle: int, timeout: float = 15 ) -> None: # waiting until cover is at this angle running_time = 0 @@ -78,7 +78,7 @@ def wait_until_cover_is_at_angle( running_time += 2 current_cover_angle = plc_interface.get_cover_angle() if abs(new_cover_angle - current_cover_angle) <= 3: - StateInterface.update( + interfaces.StateInterface.update( { "enclosure_plc_readings": { "actors": {"current_angle": new_cover_angle}, @@ -89,7 +89,7 @@ def wait_until_cover_is_at_angle( break if running_time > timeout: - raise EnclosureControl.CoverError( + raise modules.enclosure_control.EnclosureControl.CoverError( f"Cover took too long to move, latest cover angle: {current_cover_angle}" ) @@ -113,7 +113,7 @@ def _set_cover_angle(angle: str) -> None: plc_interface.disconnect() -@with_filelock(CONFIG_LOCK_PATH) +@utils.with_filelock(CONFIG_LOCK_PATH) def enable_user_control_in_config() -> None: with open(CONFIG_FILE_PATH, "r") as f: config = json.load(f) @@ -143,7 +143,8 @@ def _close_cover() -> None: def set_boolean_plc_state( - state: str, get_setter_function: Callable[[PLCInterface], Callable[[bool], None]] + state: str, + get_setter_function: Callable[[interfaces.PLCInterface], Callable[[bool], None]], ) -> None: plc_interface = get_plc_interface() if plc_interface is not None: diff --git a/packages/cli/commands/state.py b/packages/cli/commands/state.py index 875ccd0e..93faa34c 100644 --- a/packages/cli/commands/state.py +++ b/packages/cli/commands/state.py @@ -1,7 +1,7 @@ import json import click import os -from packages.core.utils import StateInterface, with_filelock +from packages.core import utils, interfaces dir = os.path.dirname PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) @@ -11,10 +11,10 @@ @click.command(help="Read the current state.json file.") @click.option("--no-indent", is_flag=True, help="Do not print the JSON in an indented manner") -@with_filelock(STATE_LOCK_PATH) +@utils.with_filelock(STATE_LOCK_PATH) def _get_state(no_indent: bool) -> None: if not os.path.isfile(STATE_FILE_PATH): - StateInterface.initialize() + interfaces.StateInterface.initialize() with open(STATE_FILE_PATH, "r") as f: try: From f8a153fd74cbf9ef39889c612be722584180742d Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 03:19:54 +0200 Subject: [PATCH 066/132] #100 - test default config --- pyproject.toml | 5 ++++ tests/integrity/test_default_config.py | 37 ------------------------- tests/repository/README.md | 7 +++++ tests/repository/__init__.py | 0 tests/repository/test_default_config.py | 31 +++++++++++++++++++++ 5 files changed, 43 insertions(+), 37 deletions(-) delete mode 100644 tests/integrity/test_default_config.py create mode 100644 tests/repository/README.md create mode 100644 tests/repository/__init__.py create mode 100644 tests/repository/test_default_config.py diff --git a/pyproject.toml b/pyproject.toml index 0d244b3f..452d3071 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,8 @@ module = [ "snap7.*", ] ignore_missing_imports = true + +[tool.pytest.ini_options] +filterwarnings = [ + 'ignore:the imp module is deprecated in favour of importlib:DeprecationWarning', +] \ No newline at end of file diff --git a/tests/integrity/test_default_config.py b/tests/integrity/test_default_config.py deleted file mode 100644 index b5c7300f..00000000 --- a/tests/integrity/test_default_config.py +++ /dev/null @@ -1,37 +0,0 @@ -import json -import os -from packages.core.utils.interfaces.config_validation import Validation - -dir = os.path.dirname -PROJECT_DIR = dir(dir(dir(os.path.abspath(__file__)))) - -DEFAULT_CONFIG_PATH = os.path.join(PROJECT_DIR, "config", "config.default.json") -DEFAULT_CONFIG_PATH_TUM_PLC = os.path.join( - PROJECT_DIR, "config", "tum_plc.config.default.json" -) -DEFAULT_CONFIG_PATH_HELIOS = os.path.join(PROJECT_DIR, "config", "helios.config.default.json") - - -def test_default_config(): - with open(DEFAULT_CONFIG_PATH, "r") as f: - DEFAULT_CONFIG: dict[str, dict[str]] = json.load(f) - - with open(DEFAULT_CONFIG_PATH_TUM_PLC, "r") as f: - DEFAULT_CONFIG_TUM_PLC: dict = json.load(f) - - with open(DEFAULT_CONFIG_PATH_HELIOS, "r") as f: - DEFAULT_CONFIG_HELIOS: dict = json.load(f) - - for k1 in DEFAULT_CONFIG.keys(): - if DEFAULT_CONFIG[k1] is not None: - for k2 in DEFAULT_CONFIG[k1].keys(): - if k2.endswith("_path"): - DEFAULT_CONFIG[k1][k2] = os.path.join(PROJECT_DIR, ".gitignore") - - Validation.check(DEFAULT_CONFIG) - - DEFAULT_CONFIG["tum_plc"] = DEFAULT_CONFIG_TUM_PLC - Validation.check(DEFAULT_CONFIG) - - DEFAULT_CONFIG["helios"] = DEFAULT_CONFIG_HELIOS - Validation.check(DEFAULT_CONFIG) diff --git a/tests/repository/README.md b/tests/repository/README.md new file mode 100644 index 00000000..d6a84aee --- /dev/null +++ b/tests/repository/README.md @@ -0,0 +1,7 @@ +Tests for the repositories state. + +1. Are the default configs in a valid state (all validation rules except paths) +2. Is the static type analysis running through? +3. ... + +These tests can be run in a CI environment diff --git a/tests/repository/__init__.py b/tests/repository/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/repository/test_default_config.py b/tests/repository/test_default_config.py new file mode 100644 index 00000000..2db6ae3d --- /dev/null +++ b/tests/repository/test_default_config.py @@ -0,0 +1,31 @@ +import json +import os +import sys + +dir = os.path.dirname +PROJECT_DIR = dir(dir(dir(os.path.abspath(__file__)))) +CONFIG_DIR = os.path.join(PROJECT_DIR, "config") + +sys.path.append(PROJECT_DIR) +from packages.core import types + + +def test_default_config() -> None: + with open(os.path.join(CONFIG_DIR, "config.default.json"), "r") as f: + config = json.load(f) + types.validate_config_dict(config, skip_filepaths=True) + + with open(os.path.join(CONFIG_DIR, "tum_plc.config.default.json"), "r") as f: + config_tum_plc = json.load(f) + config["tum_plc"] = config_tum_plc + types.validate_config_dict(config, skip_filepaths=True) + + with open(os.path.join(CONFIG_DIR, "helios.config.default.json"), "r") as f: + config_helios = json.load(f) + config["helios"] = config_helios + types.validate_config_dict(config, skip_filepaths=True) + + with open(os.path.join(CONFIG_DIR, "upload.config.default.json"), "r") as f: + config_upload = json.load(f) + config["upload"] = config_upload + types.validate_config_dict(config, skip_filepaths=True) From cb776a5a8c193986f7922b2dc594eb5163318ec6 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 03:21:33 +0200 Subject: [PATCH 067/132] #96 - add pydantic-workaround for runtime-json validation --- packages/core/types/__init__.py | 37 +++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/packages/core/types/__init__.py b/packages/core/types/__init__.py index d43233ba..7bcef022 100644 --- a/packages/core/types/__init__.py +++ b/packages/core/types/__init__.py @@ -1,3 +1,40 @@ +from typing import Any +import pydantic.errors +import pydantic.validators + + +# this workaround is necessary because pydantic currently does +# not support strict validation on a whole Model. It converts +# the input to the datatype, i.e. "23" will not raise an error +# on int or float data types because it can be converted. + +# Read https://github.com/pydantic/pydantic/issues/578 on the reason for this decision +# Watch https://github.com/pydantic/pydantic/issues/1098 for a possible fix + + +def _strict_bool_validator(v: Any) -> bool: + if isinstance(v, bool): + return v + raise pydantic.errors.BoolError() + + +def _strict_float_validator(v: Any) -> float: + if isinstance(v, float) or isinstance(v, int): + return v + raise pydantic.errors.FloatError() + + +for i, (type_, _) in enumerate(pydantic.validators._VALIDATORS): + if type_ == int: + pydantic.validators._VALIDATORS[i] = (int, [pydantic.validators.strict_int_validator]) + if type_ == float: + pydantic.validators._VALIDATORS[i] = (float, [_strict_float_validator]) + if type_ == str: + pydantic.validators._VALIDATORS[i] = (str, [pydantic.validators.strict_str_validator]) + if type_ == bool: + pydantic.validators._VALIDATORS[i] = (bool, [_strict_bool_validator]) + + from .config import ConfigDict, ConfigDictPartial, ConfigSubDicts from .config import validate_config_dict From c9778f1323e347adef9e1c99a0f731def82ef5c8 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 03:32:18 +0200 Subject: [PATCH 068/132] #99 - add new config elements to UI --- .../sections/config-section-upload.tsx | 72 +++++++++++++------ packages/ui/src/custom-types.ts | 22 +++--- 2 files changed, 64 insertions(+), 30 deletions(-) diff --git a/packages/ui/src/components/configuration/sections/config-section-upload.tsx b/packages/ui/src/components/configuration/sections/config-section-upload.tsx index a60af770..7c28f8dd 100644 --- a/packages/ui/src/components/configuration/sections/config-section-upload.tsx +++ b/packages/ui/src/components/configuration/sections/config-section-upload.tsx @@ -1,6 +1,7 @@ import { customTypes } from '../../../custom-types'; import { configurationComponents, essentialComponents } from '../..'; import { reduxUtils } from '../../../utils'; +import { ICONS } from '../../../assets'; export default function ConfigSectionUpload() { const centralSectionConfig = reduxUtils.useTypedSelector((s) => s.config.central?.upload); @@ -13,13 +14,16 @@ export default function ConfigSectionUpload() { function addDefault() { update({ upload: { - is_active: false, host: '1.2.3.4', user: '...', password: '...', - src_directory: '...', - dst_directory: '...', - remove_src_after_upload: false, + upload_ifgs: false, + src_directory_ifgs: '...', + dst_directory_ifgs: '...', + remove_src_ifgs_after_upload: false, + upload_helios: false, + dst_directory_helios: '...', + remove_src_helios_after_upload: true, }, }); } @@ -65,12 +69,6 @@ export default function ConfigSectionUpload() { remove configuration
- update({ upload: { is_active: v } })} - oldValue={centralSectionConfig?.is_active === true} - /> update({ upload: { password: v } })} oldValue={centralSectionConfig !== null ? centralSectionConfig.password : 'null'} /> +
+ update({ upload: { upload_ifgs: v } })} + oldValue={centralSectionConfig?.upload_ifgs === true} + /> + update({ upload: { src_directory_ifgs: v } })} + oldValue={ + centralSectionConfig !== null ? centralSectionConfig.src_directory_ifgs : 'null' + } + /> update({ upload: { src_directory: v } })} + title="IFG Destination Directory Path (Server Side)" + value={localSectionConfig.dst_directory_ifgs} + setValue={(v: any) => update({ upload: { dst_directory_ifgs: v } })} oldValue={ - centralSectionConfig !== null ? centralSectionConfig.src_directory : 'null' + centralSectionConfig !== null ? centralSectionConfig.dst_directory_ifgs : 'null' } /> + update({ upload: { remove_src_ifgs_after_upload: v } })} + oldValue={centralSectionConfig?.remove_src_ifgs_after_upload === true} + /> +
+ update({ upload: { upload_helios: v } })} + oldValue={centralSectionConfig?.upload_helios === true} + /> update({ upload: { dst_directory: v } })} + title="Helios Destination Directory Path (Server Side)" + value={localSectionConfig.dst_directory_helios} + setValue={(v: any) => update({ upload: { dst_directory_helios: v } })} oldValue={ - centralSectionConfig !== null ? centralSectionConfig.dst_directory : 'null' + centralSectionConfig !== null + ? centralSectionConfig.dst_directory_helios + : 'null' } /> update({ upload: { remove_src_after_upload: v } })} - oldValue={centralSectionConfig?.remove_src_after_upload === true} + title="Remove Source Helios Images After Upload" + value={localSectionConfig.remove_src_helios_after_upload} + setValue={(v: boolean) => update({ upload: { remove_src_helios_after_upload: v } })} + oldValue={centralSectionConfig?.remove_src_helios_after_upload === true} /> ); diff --git a/packages/ui/src/custom-types.ts b/packages/ui/src/custom-types.ts index 4dab2dbf..1a54c322 100644 --- a/packages/ui/src/custom-types.ts +++ b/packages/ui/src/custom-types.ts @@ -91,13 +91,16 @@ export namespace customTypes { save_images: boolean; }; upload: null | { - is_active: boolean; host: string; user: string; password: string; - src_directory: string; - dst_directory: string; - remove_src_after_upload: boolean; + upload_ifgs: boolean; + src_directory_ifgs: string; + dst_directory_ifgs: string; + remove_src_ifgs_after_upload: boolean; + upload_helios: boolean; + dst_directory_helios: string; + remove_src_helios_after_upload: boolean; }; }; @@ -156,13 +159,16 @@ export namespace customTypes { save_images?: boolean; }; upload?: null | { - is_active?: boolean; host?: string; user?: string; password?: string; - src_directory?: string; - dst_directory?: string; - remove_src_after_upload?: boolean; + upload_ifgs?: boolean; + src_directory_ifgs?: string; + dst_directory_ifgs?: string; + remove_src_ifgs_after_upload?: boolean; + upload_helios?: boolean; + dst_directory_helios?: string; + remove_src_helios_after_upload?: boolean; }; }; From 40a9ee1cd4432bf3e81ad66bb65358d109dad467 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 03:38:44 +0200 Subject: [PATCH 069/132] #85 - add parameter to config and core --- config/helios.config.default.json | 1 + packages/core/threads/helios_thread.py | 10 ++++++++-- packages/core/types/config.py | 2 ++ 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/config/helios.config.default.json b/config/helios.config.default.json index 15c13fd3..ab085e81 100644 --- a/config/helios.config.default.json +++ b/config/helios.config.default.json @@ -3,5 +3,6 @@ "evaluation_size": 15, "seconds_per_interval": 6, "measurement_threshold": 0.6, + "edge_detection_threshold": 0.02, "save_images": false } diff --git a/packages/core/threads/helios_thread.py b/packages/core/threads/helios_thread.py index bd457d10..6e96f1f5 100644 --- a/packages/core/threads/helios_thread.py +++ b/packages/core/threads/helios_thread.py @@ -13,7 +13,7 @@ PROJECT_DIR = dir(dir(dir(dir(os.path.abspath(__file__))))) IMG_DIR = os.path.join(PROJECT_DIR, "logs", "helios") AUTOEXPOSURE_IMG_DIR = os.path.join(PROJECT_DIR, "logs", "helios-autoexposure") -_CONFIG = None +_CONFIG: Optional[types.ConfigDict] = None class CameraError(Exception): @@ -199,6 +199,9 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: 6. If number of edge-pixels is > x: return 1; else: return 0; """ + assert _CONFIG is not None + assert _CONFIG["helios"] is not None + # transform image from 1280x720 to 640x360 downscaled_image = cv.resize(frame, None, fx=0.5, fy=0.5) @@ -225,7 +228,10 @@ def determine_frame_status(frame: cv.Mat, save_image: bool) -> Literal[0, 1]: status: Literal[1, 0] = 0 if pixels_inside_circle != 0: edge_fraction = round((np.sum(edges_only_dilated) / 255) / pixels_inside_circle, 6) - status = 1 if (edge_fraction > 0.02) else 0 + sufficient_edge_fraction = ( + edge_fraction >= _CONFIG["helios"]["edge_detection_threshold"] + ) + status = 1 if sufficient_edge_fraction else 0 logger.debug(f"exposure = {_Helios.current_exposure}, edge_fraction = {edge_fraction}") diff --git a/packages/core/types/config.py b/packages/core/types/config.py index 32969120..0b796f12 100644 --- a/packages/core/types/config.py +++ b/packages/core/types/config.py @@ -120,6 +120,7 @@ class Helios(TypedDict): evaluation_size: int seconds_per_interval: float measurement_threshold: float + edge_detection_threshold: float save_images: bool @staticmethod @@ -128,6 +129,7 @@ class HeliosPartial(TypedDict, total=False): evaluation_size: int seconds_per_interval: float measurement_threshold: float + edge_detection_threshold: float save_images: bool @staticmethod From 17401918b358e5d814d3cdf30272b3d8a932579b Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 03:42:10 +0200 Subject: [PATCH 070/132] #85 - add parameter to UI --- .../configuration/sections/config-section-helios.tsx | 12 ++++++++++++ packages/ui/src/custom-types.ts | 2 ++ .../src/utils/functional-utils/parse-number-types.ts | 3 +++ 3 files changed, 17 insertions(+) diff --git a/packages/ui/src/components/configuration/sections/config-section-helios.tsx b/packages/ui/src/components/configuration/sections/config-section-helios.tsx index 332e4e26..bb4e4124 100644 --- a/packages/ui/src/components/configuration/sections/config-section-helios.tsx +++ b/packages/ui/src/components/configuration/sections/config-section-helios.tsx @@ -17,6 +17,7 @@ export default function ConfigSectionHelios() { evaluation_size: 15, seconds_per_interval: 6, measurement_threshold: 0.6, + edge_detection_threshold: 0.02, save_images: false, }, }); @@ -103,6 +104,17 @@ export default function ConfigSectionHelios() { } numeric /> + update({ helios: { edge_detection_threshold: v } })} + oldValue={ + centralSectionConfig !== null + ? centralSectionConfig.edge_detection_threshold + : 'null' + } + numeric + /> Date: Wed, 24 Aug 2022 13:04:02 +0200 Subject: [PATCH 071/132] #100 - remove currently unused tests --- tests/core/__init__.py | 0 tests/core/test_dde_interface.py | 13 ------ tests/core/test_emailing.py | 11 ----- tests/core/test_ip_connections.py | 14 ------- tests/core/test_opus_measurement.py | 13 ------ tests/core/test_process_status.py | 19 --------- tests/core/test_read_camtracker_logs.py | 17 -------- tests/core/test_start_measurements.py | 28 ------------- tests/core/test_stop_measurements.py | 24 ----------- tests/core/test_sun_tracking.py | 13 ------ tests/helios/__init__.py | 0 tests/helios/test_helios.py | 10 ----- tests/helios/test_take_helios_picture.py | 47 ---------------------- tests/integrity/__init__.py | 0 tests/plc/__init__.py | 0 tests/plc/basic_connection_tests.py | 51 ------------------------ tests/plc/test_camera_power_switch.py | 11 ----- tests/plc/test_cover.py | 21 ---------- tests/plc/test_enclosure_control.py | 13 ------ tests/plc/test_enclosure_reset_check.py | 22 ---------- 20 files changed, 327 deletions(-) delete mode 100644 tests/core/__init__.py delete mode 100644 tests/core/test_dde_interface.py delete mode 100644 tests/core/test_emailing.py delete mode 100644 tests/core/test_ip_connections.py delete mode 100644 tests/core/test_opus_measurement.py delete mode 100644 tests/core/test_process_status.py delete mode 100644 tests/core/test_read_camtracker_logs.py delete mode 100644 tests/core/test_start_measurements.py delete mode 100644 tests/core/test_stop_measurements.py delete mode 100644 tests/core/test_sun_tracking.py delete mode 100644 tests/helios/__init__.py delete mode 100644 tests/helios/test_helios.py delete mode 100644 tests/helios/test_take_helios_picture.py delete mode 100644 tests/integrity/__init__.py delete mode 100644 tests/plc/__init__.py delete mode 100644 tests/plc/basic_connection_tests.py delete mode 100644 tests/plc/test_camera_power_switch.py delete mode 100644 tests/plc/test_cover.py delete mode 100644 tests/plc/test_enclosure_control.py delete mode 100644 tests/plc/test_enclosure_reset_check.py diff --git a/tests/core/__init__.py b/tests/core/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/core/test_dde_interface.py b/tests/core/test_dde_interface.py deleted file mode 100644 index b6e610fd..00000000 --- a/tests/core/test_dde_interface.py +++ /dev/null @@ -1,13 +0,0 @@ -# these imports are provided by pywin32 -if sys.platform == "win32": - import win32con # type: ignore - import win32process # type: ignore - import win32ui # type: ignore - import dde # type: ignore - - -server = dde.CreateServer() -server.Create("Client") -conversation = dde.CreateConversation(server) - - diff --git a/tests/core/test_emailing.py b/tests/core/test_emailing.py deleted file mode 100644 index bd3d3f80..00000000 --- a/tests/core/test_emailing.py +++ /dev/null @@ -1,11 +0,0 @@ -from packages.core.utils import ConfigInterface, ExceptionEmailClient - - -def test_emailing(): - _CONFIG = ConfigInterface().read() - - try: - raise Exception("some exception name") - except Exception as e: - ExceptionEmailClient.handle_occured_exception(_CONFIG, e) - ExceptionEmailClient.handle_resolved_exception(_CONFIG) diff --git a/tests/core/test_ip_connections.py b/tests/core/test_ip_connections.py deleted file mode 100644 index 7ad0fd46..00000000 --- a/tests/core/test_ip_connections.py +++ /dev/null @@ -1,14 +0,0 @@ -from packages.core.utils import ConfigInterface, OSInfo - - -def test_ip_connections(): - _CONFIG = ConfigInterface().read() - - plc_status = OSInfo.check_connection_status(_CONFIG["tum_plc"]["ip"]) - print(plc_status) - assert plc_status != 'NO_INFO' - em27_status = OSInfo.check_connection_status(_CONFIG["opus"]["em27_ip"]) - print(em27_status) - assert em27_status != 'NO_INFO' - - diff --git a/tests/core/test_opus_measurement.py b/tests/core/test_opus_measurement.py deleted file mode 100644 index da0aea05..00000000 --- a/tests/core/test_opus_measurement.py +++ /dev/null @@ -1,13 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.opus_measurement import OpusMeasurement - - -def test_opus_measurement(): - _CONFIG = ConfigInterface().read() - instance = OpusMeasurement(_CONFIG) - # print(control.continuous_readings()) - # assert(False) - - instance.test_setup() - - assert False diff --git a/tests/core/test_process_status.py b/tests/core/test_process_status.py deleted file mode 100644 index c2666d83..00000000 --- a/tests/core/test_process_status.py +++ /dev/null @@ -1,19 +0,0 @@ -import os -from packages.core.utils import ConfigInterface, OSInfo -from packages.core.modules.sun_tracking import SunTracking - - -def test_ct_measurement(): - _CONFIG = ConfigInterface().read() - instance = SunTracking(_CONFIG) - # print(control.continuous_readings()) - # assert(False) - - ct_path = _CONFIG["camtracker"]["executable_path"] - process_name = os.path.basename(ct_path) - - status = OSInfo.check_process_status(process_name) - - print(status) - - assert False diff --git a/tests/core/test_read_camtracker_logs.py b/tests/core/test_read_camtracker_logs.py deleted file mode 100644 index 01f07fcd..00000000 --- a/tests/core/test_read_camtracker_logs.py +++ /dev/null @@ -1,17 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.sun_tracking import SunTracking - - -def test_ct_measurement(): - _CONFIG = ConfigInterface().read() - instance = SunTracking(_CONFIG) - # print(control.continuous_readings()) - # assert(False) - - log_line = instance.read_ct_log_learn_az_elev() - - print(log_line) - - instance.valdiate_tracker_position() - - assert False diff --git a/tests/core/test_start_measurements.py b/tests/core/test_start_measurements.py deleted file mode 100644 index 8c32a8d3..00000000 --- a/tests/core/test_start_measurements.py +++ /dev/null @@ -1,28 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.enclosure_control import EnclosureControl -from packages.core.modules.sun_tracking import SunTracking -from packages.core.modules.opus_measurement import OpusMeasurement - -import time - - -def test_start_measurements(): - _CONFIG = ConfigInterface().read() - enclosure = EnclosureControl(_CONFIG) - opus = OpusMeasurement(_CONFIG) - tracking = SunTracking(_CONFIG) - - if enclosure.plc_state.state.reset_needed: - enclosure.plc_interface.reset() - - enclosure.plc_interface.set_sync_to_tracker(True) - time.sleep(2) - if not tracking.ct_application_running(): - tracking.start_sun_tracking_automation() - time.sleep(2) - if not opus.opus_application_running(): - opus.start_opus() - time.sleep(2) - opus.load_experiment() - time.sleep(2) - opus.start_macro() diff --git a/tests/core/test_stop_measurements.py b/tests/core/test_stop_measurements.py deleted file mode 100644 index e99f1110..00000000 --- a/tests/core/test_stop_measurements.py +++ /dev/null @@ -1,24 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.enclosure_control import EnclosureControl -from packages.core.modules.sun_tracking import SunTracking -from packages.core.modules.opus_measurement import OpusMeasurement - -import time - - -def test_stop_measurements(): - _CONFIG = ConfigInterface().read() - enclosure = EnclosureControl(_CONFIG) - opus = OpusMeasurement(_CONFIG) - tracking = SunTracking(_CONFIG) - - enclosure.set_sync_to_tracker(False) - enclosure.move_cover(0) - enclosure.wait_for_cover_closing() - - time.sleep(2) - if tracking.ct_application_running(): - tracking.stop_sun_tracking_automation() - time.sleep(2) - if opus.opus_application_running(): - opus.stop_macro() diff --git a/tests/core/test_sun_tracking.py b/tests/core/test_sun_tracking.py deleted file mode 100644 index 620a9eb6..00000000 --- a/tests/core/test_sun_tracking.py +++ /dev/null @@ -1,13 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.sun_tracking import SunTracking - - -def test_ct_measurement(): - _CONFIG = ConfigInterface().read() - instance = SunTracking(_CONFIG) - # print(control.continuous_readings()) - # assert(False) - - assert(instance.test_setup) - - assert False diff --git a/tests/helios/__init__.py b/tests/helios/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/helios/test_helios.py b/tests/helios/test_helios.py deleted file mode 100644 index 92f3b83a..00000000 --- a/tests/helios/test_helios.py +++ /dev/null @@ -1,10 +0,0 @@ -from packages.core.threads.helios_thread import HeliosThread -import time - - -def test_helios(): - """Pictures are saved in C:\pyra-4\runtime-data\helios""" - helios = HeliosThread() - helios.start() - time.sleep(30) - helios.stop() diff --git a/tests/helios/test_take_helios_picture.py b/tests/helios/test_take_helios_picture.py deleted file mode 100644 index e6874808..00000000 --- a/tests/helios/test_take_helios_picture.py +++ /dev/null @@ -1,47 +0,0 @@ -import cv2 as cv -import time -import datetime -import astropy.units as astropy_units -from packages.core.utils import ( - ConfigInterface, - Astronomy, -) - - -def test_picture(): - _CONFIG = ConfigInterface().read() - - cam = cv.VideoCapture(_CONFIG["helios"]["camera_id"]) # - - cam.set(3, 1280) # width - cam.set(4, 720) # height - cam.set(15, -12) # exposure - cam.set(10, 64) # brightness - cam.set(11, 64) # contrast - cam.set(12, 0) # saturation - cam.set(14, 0) # gain - - current_sun_angle = Astronomy.get_current_sun_elevation() - diff = 0 - if current_sun_angle < 4 * astropy_units.deg: - exp = -9 + diff - elif current_sun_angle < 6 * astropy_units.deg: - exp = -10 + diff - elif current_sun_angle < 10 * astropy_units.deg: - exp = -11 + diff - else: - exp = -12 + diff - - cam.set(15, exp) - - for i in range(5): - ret, frame = cam.read() - - path = "C:\\pyra-4\\runtime-data\\helios\\test_{}.jpg".format( - str(datetime.datetime.now().strftime("%H-%M-%S")) - ) - - cv.imwrite(path, frame) - time.sleep(5) - - cam.release() diff --git a/tests/integrity/__init__.py b/tests/integrity/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/plc/__init__.py b/tests/plc/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/plc/basic_connection_tests.py b/tests/plc/basic_connection_tests.py deleted file mode 100644 index b740250a..00000000 --- a/tests/plc/basic_connection_tests.py +++ /dev/null @@ -1,51 +0,0 @@ -import snap7 -from snap7.exceptions import Snap7Exception -import time - -plc = snap7.client.Client() - -def connect(plc): - - start_time = time.time() - while(True): - try: - plc.connect("10.10.0.4", 0, 1) - time.sleep(0.2) - - if time.time()-start_time > 10: - print("Could not connect to PLC") - return False - - if plc.get_connected(): - print("Connected") - return True - - print("Connection try failed. Retrying.") - plc.destroy() - plc = snap7.client.Client() - - except Snap7Exception: - print("PLC connect raied an error.") - continue - - -if connect(plc): - - time.sleep(2) - - if plc.get_connected(): - result = plc.db_read(8,0,25) - print(result) - time.sleep(0.2) - result = plc.db_read(25,0,9) - print(result) - time.sleep(0.2) - result = plc.db_read(3,0,5) - print(result) - - - -plc.disconnect() -plc.destroy() - - diff --git a/tests/plc/test_camera_power_switch.py b/tests/plc/test_camera_power_switch.py deleted file mode 100644 index fe138ba5..00000000 --- a/tests/plc/test_camera_power_switch.py +++ /dev/null @@ -1,11 +0,0 @@ -from packages.core.utils import ConfigInterface, OSInfo, STANDARD_PLC_INTERFACES, PLCInterface -from packages.core.modules.enclosure_control import EnclosureControl -import time - - -def test_enclosure_control(): - _CONFIG = ConfigInterface().read() - control = EnclosureControl(_CONFIG) - control.set_power_camera(False) - time.sleep(45) - control.set_power_camera(True) diff --git a/tests/plc/test_cover.py b/tests/plc/test_cover.py deleted file mode 100644 index c6b64d80..00000000 --- a/tests/plc/test_cover.py +++ /dev/null @@ -1,21 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.enclosure_control import EnclosureControl - -import time - - -def test_cover_movement(): - _CONFIG = ConfigInterface().read() - enclosure = EnclosureControl(_CONFIG) - - if enclosure.plc_state.state.reset_needed: - enclosure.plc_interface.reset() - - # assert not enclosure.check_for_reset_needed() - - # enclosure.set_sync_to_tracker(False) - - # enclosure.move_cover(120) - # time.sleep(10) - enclosure.move_cover(0) - enclosure.wait_for_cover_closing() diff --git a/tests/plc/test_enclosure_control.py b/tests/plc/test_enclosure_control.py deleted file mode 100644 index cd4ddcd2..00000000 --- a/tests/plc/test_enclosure_control.py +++ /dev/null @@ -1,13 +0,0 @@ -from packages.core.utils import ConfigInterface -from packages.core.modules.enclosure_control import EnclosureControl - - -def test_enclosure_control(): - - _CONFIG = ConfigInterface().read() - control = EnclosureControl(_CONFIG) - print(control.read_states_from_plc()) - - # control.auto_set_power_spectrometer() - - assert False diff --git a/tests/plc/test_enclosure_reset_check.py b/tests/plc/test_enclosure_reset_check.py deleted file mode 100644 index b67f0daf..00000000 --- a/tests/plc/test_enclosure_reset_check.py +++ /dev/null @@ -1,22 +0,0 @@ -from packages.core.utils import ConfigInterface, STANDARD_PLC_INTERFACES, PLCInterface -from packages.core.modules.enclosure_control import EnclosureControl - -import time - - -def test_cover_movement(): - _CONFIG = ConfigInterface().read() - _PLC_INTERFACE: PLCInterface = STANDARD_PLC_INTERFACES[ - _CONFIG["tum_plc"]["version"] - ] - enclosure = EnclosureControl(_CONFIG) - - state = enclosure.check_for_rest_needed() - print(state) - - enclosure.reset() - - state = enclosure.check_for_rest_needed() - print(state) - - assert False From d2f181e13095e0806d724493b63d4895449c0d1a Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 13:04:43 +0200 Subject: [PATCH 072/132] #100 - make CLI tests work --- packages/cli/main.py | 2 +- packages/core/types/config.py | 28 ++++++++++++++++++---------- tests/cli/test_cli_config.py | 2 +- tests/cli/test_cli_core.py | 14 +++++++++----- 4 files changed, 29 insertions(+), 17 deletions(-) diff --git a/packages/cli/main.py b/packages/cli/main.py index 889b4d28..fac67de8 100644 --- a/packages/cli/main.py +++ b/packages/cli/main.py @@ -6,7 +6,7 @@ PROJECT_DIR = dir(dir(dir(os.path.abspath(__file__)))) sys.path.append(PROJECT_DIR) -from .commands import ( +from commands import ( config_command_group, core_command_group, logs_command_group, diff --git a/packages/core/types/config.py b/packages/core/types/config.py index 0b796f12..f811adbd 100644 --- a/packages/core/types/config.py +++ b/packages/core/types/config.py @@ -198,10 +198,17 @@ def validate_config_dict(o: Any, partial: bool = False, skip_filepaths: bool = F This should always be used when loading the object from a JSON file! """ - if partial: - _ValidationModel(partial=o) - else: - _ValidationModel(regular=o) + try: + if partial: + _ValidationModel(partial=o) + else: + _ValidationModel(regular=o) + except pydantic.ValidationError as e: + pretty_error_messages = [] + for error in e.errors(): + fields = [f for f in error["loc"][1:] if f not in ["__root__"]] + pretty_error_messages.append(f"{'.'.join(fields)} -> {error['msg']}") + raise ValidationError(f"config is invalid: {', '.join(pretty_error_messages)}") new_object: ConfigDict = o @@ -260,24 +267,25 @@ def assert_ip_address(property_path: str) -> None: # this does not check for a valid upload.src_directory_ifgs path # since the thread itself will check for this - failed_checks = [] + pretty_error_messages = [] for assertion in assertions: try: assertion() except AssertionError as a: - failed_checks.append(a) + pretty_error_messages.append(a.args[0]) except (TypeError, KeyError): # Will be ignored because the structure is already # validated. Occurs when property is missing pass - if len(failed_checks) > 0: - raise ValidationError( - ("ConfigDictPartial" if partial else "ConfigDict") + f": {failed_checks}" - ) + if len(pretty_error_messages) > 0: + raise ValidationError(f"config is invalid: {', '.join(pretty_error_messages)}") class _ValidationModel(pydantic.BaseModel): regular: Optional[ConfigDict] partial: Optional[ConfigDictPartial] + + class Config: + extra = "forbid" diff --git a/tests/cli/test_cli_config.py b/tests/cli/test_cli_config.py index b5b2ec5a..c61445a1 100644 --- a/tests/cli/test_cli_config.py +++ b/tests/cli/test_cli_config.py @@ -65,7 +65,7 @@ def test_update_config(original_config): # run "pyra-cli config update" for some invalid variables for update in updates: stdout = run_cli_command(["config", "update", json.dumps(update)]) - assert "Error in new config string" in stdout + assert "config is invalid" in stdout assert_config_file_content(original_config, "config.json should not have changed") diff --git a/tests/cli/test_cli_core.py b/tests/cli/test_cli_core.py index 0e00fbae..962595c7 100644 --- a/tests/cli/test_cli_core.py +++ b/tests/cli/test_cli_core.py @@ -58,24 +58,28 @@ def test_start_stop_procedure(original_config, original_logs): print("first three log lines:\n" + "".join(info_log_lines[:3]) + "\n") expected_lines = [ - f"main - INFO - started mainloop inside process with PID {pid}", - "main - INFO - Starting Iteration", + f"main - INFO - Starting mainloop inside process with PID {pid}", + "main - INFO - Starting iteration", "main - INFO - pyra-core in test mode", ] now = datetime.utcnow() for expected_line, actual_line in zip(expected_lines, info_log_lines[:3]): line_time = datetime.strptime(actual_line[:19], "%Y-%m-%d %H:%M:%S") - print(f"expected log line: {'.'*29}{expected_line}") + print( + f"expected log line: {'.'*(len(actual_line.strip()) - len(expected_line) - 1)} {expected_line}" + ) print(f"actual log line: {actual_line}\n") assert (now - line_time).total_seconds() < 3 - assert actual_line.endswith(expected_line + "\n") + assert actual_line.strip().endswith(expected_line) stdout_5 = run_cli_command(["core", "is-running"]) assert stdout_5.startswith(f"pyra-core is running with PID {pid}") stdout_6 = run_cli_command(["core", "stop"]) - assert stdout_6.startswith(f"Terminated 1 background processe(s) with PID(s) [{pid}]") + assert stdout_6.startswith( + f"Terminated 1 pyra-core background processe(s) with PID(s) [{pid}]" + ) stdout_7 = run_cli_command(["core", "is-running"]) assert stdout_7.startswith("pyra-core is not running") From 9cd8d6caafb30eb598d28c852ebc896276555a16 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 13:05:25 +0200 Subject: [PATCH 073/132] #101 - add GitHub Action to run pytests --- .github/workflows/test-on-push-to-main.yml | 50 ++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 .github/workflows/test-on-push-to-main.yml diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml new file mode 100644 index 00000000..1583fb0f --- /dev/null +++ b/.github/workflows/test-on-push-to-main.yml @@ -0,0 +1,50 @@ +name: 'test-on-push-to-main' +on: + push: + branches: + - development-moritz + +jobs: + test: + runs-on: ubuntu-latest + steps: + # check-out repo and set-up python + - name: Check out repository + uses: actions/checkout@v2 + - name: Set up python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: 3.10.6 + + # install & configure poetry + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + # load cached venv if cache exists + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v2 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + + # install dependencies if cache does not exist + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + # install your root project, if required + - name: Install library + run: poetry install --no-interaction + + # run test suite + - name: Run tests + run: | + source .venv/bin/activate + pytest tests/ + coverage report From 443cd5f9ff29491293a2135eedab1208269d25c0 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 13:34:33 +0200 Subject: [PATCH 074/132] #100 - debug pytest fixtures --- tests/fixtures.py | 126 ++++++++++++++++++++++++++++++---------------- 1 file changed, 84 insertions(+), 42 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 6749f768..be9b3e8a 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -3,78 +3,120 @@ import shutil import pytest -dir = os.path.dirname -PROJECT_DIR = dir(dir(os.path.abspath(__file__))) +PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +EXISTING_TEST_FILE_PATH = os.path.join(PROJECT_DIR, "pyproject.toml") CONFIG_FILE_PATHS = { "actual": os.path.join(PROJECT_DIR, "config", "config.json"), "temporary": os.path.join(PROJECT_DIR, "config", "config.original.json"), + "test_content": json.dumps( + { + "general": { + "seconds_per_core_interval": 30, + "test_mode": True, + "station_id": "...", + "min_sun_elevation": 11, + }, + "opus": { + "em27_ip": "10.10.0.1", + "executable_path": EXISTING_TEST_FILE_PATH, + "experiment_path": EXISTING_TEST_FILE_PATH, + "macro_path": EXISTING_TEST_FILE_PATH, + "username": "Default", + "password": "...", + }, + "camtracker": { + "config_path": EXISTING_TEST_FILE_PATH, + "executable_path": EXISTING_TEST_FILE_PATH, + "learn_az_elev_path": EXISTING_TEST_FILE_PATH, + "sun_intensity_path": EXISTING_TEST_FILE_PATH, + "motor_offset_threshold": 10, + }, + "error_email": { + "sender_address": "pyra.technical.user@gmail.com", + "sender_password": "...", + "notify_recipients": True, + "recipients": "your@mail.com", + }, + "measurement_decision": { + "mode": "automatic", + "manual_decision_result": False, + "cli_decision_result": False, + }, + "measurement_triggers": { + "consider_time": True, + "consider_sun_elevation": True, + "consider_helios": False, + "start_time": {"hour": 7, "minute": 0, "second": 0}, + "stop_time": {"hour": 21, "minute": 0, "second": 0}, + "min_sun_elevation": 0, + }, + "tum_plc": None, + "helios": None, + "upload": None, + }, + indent=4, + ), } -INFO_LOG_PATHS = { +INFO_LOGS_PATHS = { "actual": os.path.join(PROJECT_DIR, "logs", "info.log"), "temporary": os.path.join(PROJECT_DIR, "logs", "info.original.log"), + "test_content": "", } DEBUG_LOGS_PATHS = { "actual": os.path.join(PROJECT_DIR, "logs", "debug.log"), "temporary": os.path.join(PROJECT_DIR, "logs", "debug.original.log"), + "test_content": "", } -LOG_PATHSS = [INFO_LOG_PATHS, DEBUG_LOGS_PATHS] - - -def save_file(paths: dict): - if os.path.isfile(paths["temporary"]): - os.remove(paths["temporary"]) - shutil.copy(paths["actual"], paths["temporary"]) - - -def restore_file(paths: dict): - if os.path.isfile(paths["actual"]): - os.remove(paths["actual"]) - assert os.path.isfile( - paths["temporary"] - ), f'{paths["temporary"]} does not exist anymore' - os.rename(paths["temporary"], paths["actual"]) @pytest.fixture() def original_config(): # save original config.json for later - assert os.path.isfile(CONFIG_FILE_PATHS["actual"]), "config.json does not exist" + assert not os.path.isfile(CONFIG_FILE_PATHS["temporary"]) try: - with open(CONFIG_FILE_PATHS["actual"], "r") as f: - config = json.load(f) - except json.JSONDecodeError: - raise Exception("config.json is invalid") - - # save original config.json for later - save_file(CONFIG_FILE_PATHS) + shutil.copyfile(CONFIG_FILE_PATHS["actual"], CONFIG_FILE_PATHS["temporary"]) + except FileNotFoundError: + pass - config["general"]["test_mode"] = True + # create temporary file with open(CONFIG_FILE_PATHS["actual"], "w") as f: - json.dump(config, f, indent=4) + f.write(CONFIG_FILE_PATHS["test_content"]) # run the respective test - yield config + yield json.loads(CONFIG_FILE_PATHS["test_content"]) # restore original config.json - restore_file(CONFIG_FILE_PATHS) + os.remove(CONFIG_FILE_PATHS["actual"]) + try: + os.rename(CONFIG_FILE_PATHS["temporary"], CONFIG_FILE_PATHS["actual"]) + except FileNotFoundError: + pass @pytest.fixture() def original_logs(): - # save original logs for later - for log_paths in LOG_PATHSS: - if not os.path.isfile(log_paths["actual"]): - with open(log_paths["actual"], "w") as f: - pass - save_file(log_paths) - # all temporary log files should be empty - with open(log_paths["actual"], "w") as f: + assert not os.path.isfile(INFO_LOGS_PATHS["temporary"]) + assert not os.path.isfile(DEBUG_LOGS_PATHS["temporary"]) + + # save original log files for later + for ps in [INFO_LOGS_PATHS, DEBUG_LOGS_PATHS]: + try: + shutil.copyfile(ps["actual"], ps["temporary"]) + except FileNotFoundError: pass + # create temporary file + with open(ps["actual"], "w") as f: + f.write(ps["test_content"]) + # run the respective test yield - # restore original config.json - for log_paths in LOG_PATHSS: - restore_file(log_paths) + # restore original log files + for ps in [INFO_LOGS_PATHS, DEBUG_LOGS_PATHS]: + os.remove(ps["actual"]) + try: + os.rename(ps["temporary"], ps["actual"]) + except FileNotFoundError: + pass From d02daee2c344d52cc0d0d5a98d5d342736004dec Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 13:39:46 +0200 Subject: [PATCH 075/132] #100 - add coverage report --- .github/workflows/test-on-push-to-main.yml | 2 +- .gitignore | 1 + poetry.lock | 33 +++++++++++++++++++++- pyproject.toml | 1 + 4 files changed, 35 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml index 1583fb0f..212c314a 100644 --- a/.github/workflows/test-on-push-to-main.yml +++ b/.github/workflows/test-on-push-to-main.yml @@ -46,5 +46,5 @@ jobs: - name: Run tests run: | source .venv/bin/activate - pytest tests/ + pytest --cov=packages tests coverage report diff --git a/.gitignore b/.gitignore index a7eaec88..3efcff3d 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ *.pyc .pytest_cache __pycache__ +.coverage # config config/config.json diff --git a/poetry.lock b/poetry.lock index 08dd886f..c69fdc63 100644 --- a/poetry.lock +++ b/poetry.lock @@ -115,6 +115,20 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "coverage" +version = "6.4.4" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "37.0.4" @@ -440,6 +454,21 @@ tomli = ">=1.0.0" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] + [[package]] name = "python-snap7" version = "1.1" @@ -530,7 +559,7 @@ python-versions = ">=3.7" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "7f108e4291d261ccafe43af936d8915adde43154dd69f36d547a212f72097211" +content-hash = "6ddefb9a40773489f8863988ae37feca415be5b3a41dbcb970b71277c7162ed3" [metadata.files] astropy = [ @@ -664,6 +693,7 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +coverage = [] cryptography = [] deepdiff = [ {file = "deepdiff-5.8.1-py3-none-any.whl", hash = "sha256:e9aea49733f34fab9a0897038d8f26f9d94a97db1790f1b814cced89e9e0d2b7"}, @@ -824,6 +854,7 @@ pytest = [ {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, ] +pytest-cov = [] python-snap7 = [ {file = "python-snap7-1.1.tar.gz", hash = "sha256:0fbb25d3c6cc1328ac5916aaf0d02f96c7939804bb387079d3b84ab44494a1f9"}, {file = "python_snap7-1.1-py3-none-macosx_10_9_universal2.whl", hash = "sha256:523acbb5164ad83f2aa5140a363424d8dfb2aaaf3b47a0cffe24c5f0841cb641"}, diff --git a/pyproject.toml b/pyproject.toml index 452d3071..c92dfed6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ mypy = "^0.971" types-paramiko = "^2.11.3" types-invoke = "^1.7.3" types-psutil = "^5.9.5" +pytest-cov = "^3.0.0" [build-system] requires = ["poetry-core>=1.0.0"] From 04dad7f541f0794695f1aba5a992bc9d71414507 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 13:44:29 +0200 Subject: [PATCH 076/132] #100 - give core more time to start up --- .github/workflows/test-on-push-to-main.yml | 4 ++-- tests/cli/test_cli_core.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml index 212c314a..8db8df68 100644 --- a/.github/workflows/test-on-push-to-main.yml +++ b/.github/workflows/test-on-push-to-main.yml @@ -28,10 +28,10 @@ jobs: # load cached venv if cache exists - name: Load cached venv id: cached-poetry-dependencies - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('poetry.lock') }} # install dependencies if cache does not exist - name: Install dependencies diff --git a/tests/cli/test_cli_core.py b/tests/cli/test_cli_core.py index 962595c7..5a073242 100644 --- a/tests/cli/test_cli_core.py +++ b/tests/cli/test_cli_core.py @@ -50,7 +50,7 @@ def test_start_stop_procedure(original_config, original_logs): assert pid_string.isnumeric() pid = int(pid_string) - time.sleep(1) + time.sleep(8) with open(INFO_LOG_PATH, "r") as f: info_log_lines = f.readlines() @@ -70,7 +70,7 @@ def test_start_stop_procedure(original_config, original_logs): f"expected log line: {'.'*(len(actual_line.strip()) - len(expected_line) - 1)} {expected_line}" ) print(f"actual log line: {actual_line}\n") - assert (now - line_time).total_seconds() < 3 + assert (now - line_time).total_seconds() < 10 assert actual_line.strip().endswith(expected_line) stdout_5 = run_cli_command(["core", "is-running"]) From 66b883f0b554f1e8620376d1ad19e2029e927ffc Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 13:55:05 +0200 Subject: [PATCH 077/132] #101 - add mypy tests to CI --- .github/workflows/test-on-push-to-main.yml | 10 ++++++++-- packages/cli/main.py | 2 +- packages/core/types/config.py | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml index 8db8df68..ede1f182 100644 --- a/.github/workflows/test-on-push-to-main.yml +++ b/.github/workflows/test-on-push-to-main.yml @@ -5,7 +5,7 @@ on: - development-moritz jobs: - test: + test-python-codebase: runs-on: ubuntu-latest steps: # check-out repo and set-up python @@ -42,8 +42,14 @@ jobs: - name: Install library run: poetry install --no-interaction + # run test suite + - name: Run mypy static type analysis + run: | + source .venv/bin/activate + bash scripts/run_type_analysis.sh + # run test suite - - name: Run tests + - name: Run pytest tests run: | source .venv/bin/activate pytest --cov=packages tests diff --git a/packages/cli/main.py b/packages/cli/main.py index fac67de8..76544ccd 100644 --- a/packages/cli/main.py +++ b/packages/cli/main.py @@ -6,7 +6,7 @@ PROJECT_DIR = dir(dir(dir(os.path.abspath(__file__)))) sys.path.append(PROJECT_DIR) -from commands import ( +from packages.cli.commands import ( config_command_group, core_command_group, logs_command_group, diff --git a/packages/core/types/config.py b/packages/core/types/config.py index f811adbd..26c4fa73 100644 --- a/packages/core/types/config.py +++ b/packages/core/types/config.py @@ -206,7 +206,7 @@ def validate_config_dict(o: Any, partial: bool = False, skip_filepaths: bool = F except pydantic.ValidationError as e: pretty_error_messages = [] for error in e.errors(): - fields = [f for f in error["loc"][1:] if f not in ["__root__"]] + fields = [str(f) for f in error["loc"][1:] if f not in ["__root__"]] pretty_error_messages.append(f"{'.'.join(fields)} -> {error['msg']}") raise ValidationError(f"config is invalid: {', '.join(pretty_error_messages)}") From 2c18b6ef5badb9311ef1fa14aac60a8c2d30ff4c Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 14:07:41 +0200 Subject: [PATCH 078/132] #101 - add typescript transpiling to CI --- .github/workflows/test-on-push-to-main.yml | 27 +++++++++++++++++----- 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml index ede1f182..2ffbebdc 100644 --- a/.github/workflows/test-on-push-to-main.yml +++ b/.github/workflows/test-on-push-to-main.yml @@ -5,15 +5,32 @@ on: - development-moritz jobs: + test-typescript-codebase: + runs-on: ubuntu-latest + steps: + # check-out repo and set-up python + - name: Check out repository + uses: actions/checkout@v3 + - name: Install yarn dependencies + uses: actions/setup-node@v3 + with: + node-version: '14' + cache: 'yarn' + cache-dependency-path: packages/ui/yarn.lock + - run: yarn install + working-directory: packages/ui + - run: yarn build + working-directory: packages/ui + test-python-codebase: runs-on: ubuntu-latest steps: # check-out repo and set-up python - name: Check out repository - uses: actions/checkout@v2 - - name: Set up python + uses: actions/checkout@v3 + - name: Set up Python 3.10.6 id: setup-python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: 3.10.6 @@ -42,13 +59,11 @@ jobs: - name: Install library run: poetry install --no-interaction - # run test suite + # run test suite - name: Run mypy static type analysis run: | source .venv/bin/activate bash scripts/run_type_analysis.sh - - # run test suite - name: Run pytest tests run: | source .venv/bin/activate From eca15df3eabbd64d964c8596567883a49f366c56 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 14:34:01 +0200 Subject: [PATCH 079/132] #101 - only run CI on pushes to main --- .github/workflows/test-on-push-to-main.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml index 2ffbebdc..14f39f2b 100644 --- a/.github/workflows/test-on-push-to-main.yml +++ b/.github/workflows/test-on-push-to-main.yml @@ -2,7 +2,7 @@ name: 'test-on-push-to-main' on: push: branches: - - development-moritz + - main jobs: test-typescript-codebase: @@ -11,15 +11,17 @@ jobs: # check-out repo and set-up python - name: Check out repository uses: actions/checkout@v3 - - name: Install yarn dependencies + - name: Set up NodeJS with Yarn uses: actions/setup-node@v3 with: node-version: '14' cache: 'yarn' cache-dependency-path: packages/ui/yarn.lock - - run: yarn install + - name: Install dependencies + run: yarn install working-directory: packages/ui - - run: yarn build + - name: Build frontend + run: yarn build working-directory: packages/ui test-python-codebase: From 456b1e73360905f17db87ff62aefa7432be1bb07 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 14:50:31 +0200 Subject: [PATCH 080/132] #101 - run CI on pr to main --- .github/workflows/test-on-push-to-main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test-on-push-to-main.yml b/.github/workflows/test-on-push-to-main.yml index 14f39f2b..80ec4e8e 100644 --- a/.github/workflows/test-on-push-to-main.yml +++ b/.github/workflows/test-on-push-to-main.yml @@ -3,6 +3,9 @@ on: push: branches: - main + pull_request: + branches: + - main jobs: test-typescript-codebase: From 03d2d39cda7db3b70abc545309f5c85d28505758 Mon Sep 17 00:00:00 2001 From: dostuffthatmatters Date: Wed, 24 Aug 2022 16:40:39 +0200 Subject: [PATCH 081/132] Fix some UI things --- .../configuration/rows/labeled-row.tsx | 4 +- .../sections/config-section-upload.tsx | 6 +- packages/ui/src/tabs/control-tab.tsx | 6 +- packages/ui/src/tabs/overview-tab.tsx | 75 +++++++++---------- 4 files changed, 45 insertions(+), 46 deletions(-) diff --git a/packages/ui/src/components/configuration/rows/labeled-row.tsx b/packages/ui/src/components/configuration/rows/labeled-row.tsx index d63942bb..aa5d39dd 100644 --- a/packages/ui/src/components/configuration/rows/labeled-row.tsx +++ b/packages/ui/src/components/configuration/rows/labeled-row.tsx @@ -8,8 +8,8 @@ export default function LabeledRow(props: { const { title, modified, children } = props; return ( -
-