diff --git a/README.md b/README.md index ef514a9..b76b583 100644 --- a/README.md +++ b/README.md @@ -21,16 +21,17 @@ Follow these steps to install Shake&Tune on your printer: # result_folder: ~/printer_data/config/ShakeTune_results # The folder where the results will be stored. It will be created if it doesn't exist. # number_of_results_to_keep: 3 - # The number of results to keep in the result_folder. The oldest results will + # The number of results to keep in the result folder. The oldest results will # be automatically deleted after each runs. - # keep_raw_csv: False - # If True, the raw CSV files will be kept in the result_folder alongside the - # PNG graphs. If False, they will be deleted and only the graphs will be kept. + # keep_raw_data: False + # If True, the raw data files will be kept in the result folder alongside the + # PNG graphs for archive and debbuging purposes. Please attach them on GitHub when + # reporting any issue to help understand and solve them. # show_macros_in_webui: True # Mainsail and Fluidd doesn't create buttons for "system" macros that are not in the # printer.cfg file. If you want to see the macros in the webui, set this to True. # timeout: 600 - # The maximum time in seconds to let Shake&Tune process the CSV files and generate the graphs. + # The maximum time in seconds to let Shake&Tune process the data and generate the graphs. ``` Don't forget to check out **[Shake&Tune documentation here](./docs/README.md)**. diff --git a/requirements.txt b/requirements.txt index 89acd3c..6231c87 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,3 +3,4 @@ matplotlib==3.8.2 numpy==1.26.2 scipy==1.11.4 PyWavelets==1.6.0 +zstandard==0.23.0 diff --git a/shaketune/commands/accelerometer.py b/shaketune/commands/accelerometer.py deleted file mode 100644 index 6b3d8ae..0000000 --- a/shaketune/commands/accelerometer.py +++ /dev/null @@ -1,108 +0,0 @@ -# Shake&Tune: 3D printer analysis tools -# -# Copyright (C) 2024 Félix Boisselier (Frix_x on Discord) -# Licensed under the GNU General Public License v3.0 (GPL-3.0) -# -# File: accelerometer.py -# Description: Provides a custom and internal Shake&Tune Accelerometer helper that interfaces -# with Klipper's accelerometer classes. It includes functions to start and stop -# accelerometer measurements and write the data to a file in a blocking manner. - - -import os -import time -from multiprocessing import Process, Queue - -FILE_WRITE_TIMEOUT = 20 # seconds max to write a whole CSV file - - -class Accelerometer: - def __init__(self, reactor, klipper_accelerometer): - self._k_accelerometer = klipper_accelerometer - self._reactor = reactor - - self._bg_client = None - self._write_queue = Queue() - self._write_processes = [] - - @staticmethod - def find_axis_accelerometer(printer, axis: str = 'xy'): - accel_chip_names = printer.lookup_object('resonance_tester').accel_chip_names - for chip_axis, chip_name in accel_chip_names: - if axis in {'x', 'y'} and chip_axis == 'xy': - return chip_name - elif chip_axis == axis: - return chip_name - return None - - def start_measurement(self): - if self._bg_client is None: - self._bg_client = self._k_accelerometer.start_internal_client() - else: - raise ValueError('measurements already started!') - - def stop_measurement(self, name: str = None, append_time: bool = True): - if self._bg_client is None: - raise ValueError('measurements need to be started first!') - - timestamp = time.strftime('%Y%m%d_%H%M%S') - if name is None: - name = timestamp - elif append_time: - name += f'_{timestamp}' - - if not name.replace('-', '').replace('_', '').isalnum(): - raise ValueError('invalid file name!') - - bg_client = self._bg_client - self._bg_client = None - bg_client.finish_measurements() - - filename = f'/tmp/shaketune-{name}.csv' - self._queue_file_write(bg_client, filename) - - def _queue_file_write(self, bg_client, filename): - self._write_queue.put(filename) - write_proc = Process(target=self._write_to_file, args=(bg_client, filename)) - write_proc.daemon = True - write_proc.start() - self._write_processes.append(write_proc) - - def _write_to_file(self, bg_client, filename): - try: - os.nice(19) - except Exception: - pass - - samples = bg_client.samples or bg_client.get_samples() - - with open(filename, 'w') as f: - f.write('#time,accel_x,accel_y,accel_z\n') - for t, accel_x, accel_y, accel_z in samples: - f.write(f'{t:.6f},{accel_x:.6f},{accel_y:.6f},{accel_z:.6f}\n') - - self._write_queue.get() - - def wait_for_file_writes(self): - while not self._write_queue.empty(): - eventtime = self._reactor.monotonic() - self._reactor.pause(eventtime + 0.1) - - for proc in self._write_processes: - if proc is None: - continue - eventtime = self._reactor.monotonic() - endtime = eventtime + FILE_WRITE_TIMEOUT - complete = False - while eventtime < endtime: - eventtime = self._reactor.pause(eventtime + 0.05) - if not proc.is_alive(): - complete = True - break - if not complete: - raise TimeoutError( - 'Shake&Tune was not able to write the accelerometer data into the CSV file. ' - 'This might be due to a slow SD card or a busy or full filesystem.' - ) - - self._write_processes = [] diff --git a/shaketune/commands/axes_map_calibration.py b/shaketune/commands/axes_map_calibration.py index a23cf33..298a3e3 100644 --- a/shaketune/commands/axes_map_calibration.py +++ b/shaketune/commands/axes_map_calibration.py @@ -9,9 +9,9 @@ # and performs post-processing to analyze the collected data. +from ..helpers.accelerometer import Accelerometer, MeasurementsManager from ..helpers.console_output import ConsoleOutput from ..shaketune_process import ShakeTuneProcess -from .accelerometer import Accelerometer SEGMENT_LENGTH = 30 # mm @@ -35,9 +35,9 @@ def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None: current_axes_map = pconfig.status_raw_config[accel_chip].get('axes_map', None) if current_axes_map is not None and current_axes_map.strip().replace(' ', '') != 'x,y,z': raise gcmd.error( - f'The parameter axes_map is already set in your {accel_chip} configuration! Please remove it (or set it to "x,y,z")!' + f'The parameter axes_map is already set in your {accel_chip} configuration! Please remove it (or set it to "x,y,z") to be able to use this macro!' ) - accelerometer = Accelerometer(printer.get_reactor(), k_accelerometer) + accelerometer = Accelerometer(k_accelerometer) toolhead_info = toolhead.get_status(systime) old_accel = toolhead_info['max_accel'] @@ -69,28 +69,27 @@ def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None: toolhead.move([mid_x - SEGMENT_LENGTH / 2, mid_y - SEGMENT_LENGTH / 2, z_height, E], feedrate_travel) toolhead.dwell(0.5) + measurements_manager = MeasurementsManager() + # Start the measurements and do the movements (+X, +Y and then +Z) - accelerometer.start_measurement() + accelerometer.start_recording(measurements_manager, name='axesmap_X', append_time=True) toolhead.dwell(0.5) toolhead.move([mid_x + SEGMENT_LENGTH / 2, mid_y - SEGMENT_LENGTH / 2, z_height, E], speed) toolhead.dwell(0.5) - accelerometer.stop_measurement('axesmap_X', append_time=True) + accelerometer.stop_recording() toolhead.dwell(0.5) - accelerometer.wait_for_file_writes() - accelerometer.start_measurement() + accelerometer.start_recording(measurements_manager, name='axesmap_Y', append_time=True) toolhead.dwell(0.5) toolhead.move([mid_x + SEGMENT_LENGTH / 2, mid_y + SEGMENT_LENGTH / 2, z_height, E], speed) toolhead.dwell(0.5) - accelerometer.stop_measurement('axesmap_Y', append_time=True) + accelerometer.stop_recording() toolhead.dwell(0.5) - accelerometer.wait_for_file_writes() - accelerometer.start_measurement() + accelerometer.start_recording(measurements_manager, name='axesmap_Z', append_time=True) toolhead.dwell(0.5) toolhead.move([mid_x + SEGMENT_LENGTH / 2, mid_y + SEGMENT_LENGTH / 2, z_height + SEGMENT_LENGTH, E], speed) toolhead.dwell(0.5) - accelerometer.stop_measurement('axesmap_Z', append_time=True) + accelerometer.stop_recording() toolhead.dwell(0.5) - accelerometer.wait_for_file_writes() # Re-enable the input shaper if it was active if input_shaper is not None: @@ -111,5 +110,5 @@ def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None: ConsoleOutput.print('This may take some time (1-3min)') creator = st_process.get_graph_creator() creator.configure(accel, SEGMENT_LENGTH) - st_process.run() + st_process.run(measurements_manager) st_process.wait_for_completion() diff --git a/shaketune/commands/axes_shaper_calibration.py b/shaketune/commands/axes_shaper_calibration.py index 97360a3..2816f28 100644 --- a/shaketune/commands/axes_shaper_calibration.py +++ b/shaketune/commands/axes_shaper_calibration.py @@ -9,11 +9,11 @@ # and generates graphs for each axis to analyze the collected data. +from ..helpers.accelerometer import Accelerometer, MeasurementsManager from ..helpers.common_func import AXIS_CONFIG from ..helpers.console_output import ConsoleOutput from ..helpers.resonance_test import vibrate_axis from ..shaketune_process import ShakeTuneProcess -from .accelerometer import Accelerometer def axes_shaper_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None: @@ -90,29 +90,32 @@ def axes_shaper_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None: else: input_shaper = None + measurements_manager = MeasurementsManager() + # Filter axis configurations based on user input, assuming 'axis_input' can be 'x', 'y', 'all' (that means 'x' and 'y') filtered_config = [ a for a in AXIS_CONFIG if a['axis'] == axis_input or (axis_input == 'all' and a['axis'] in ('x', 'y')) ] for config in filtered_config: + measurements_manager.clear_measurements() # Clear the measurements in each iteration of the loop + # First we need to find the accelerometer chip suited for the axis accel_chip = Accelerometer.find_axis_accelerometer(printer, config['axis']) if accel_chip is None: raise gcmd.error('No suitable accelerometer found for measurement!') - accelerometer = Accelerometer(printer.get_reactor(), printer.lookup_object(accel_chip)) + accelerometer = Accelerometer(printer.lookup_object(accel_chip)) # Then do the actual measurements - accelerometer.start_measurement() + accelerometer.start_recording(measurements_manager, name=config['label'], append_time=True) vibrate_axis(toolhead, gcode, config['direction'], min_freq, max_freq, hz_per_sec, accel_per_hz) - accelerometer.stop_measurement(config['label'], append_time=True) + accelerometer.stop_recording() toolhead.dwell(0.5) toolhead.wait_moves() - accelerometer.wait_for_file_writes() # And finally generate the graph for each measured axis ConsoleOutput.print(f'{config["axis"].upper()} axis frequency profile generation...') ConsoleOutput.print('This may take some time (1-3min)') - st_process.run() + st_process.run(measurements_manager) st_process.wait_for_completion() toolhead.dwell(1) diff --git a/shaketune/commands/compare_belts_responses.py b/shaketune/commands/compare_belts_responses.py index 8092c13..89fddae 100644 --- a/shaketune/commands/compare_belts_responses.py +++ b/shaketune/commands/compare_belts_responses.py @@ -9,12 +9,12 @@ # for each axis to analyze the collected data. +from ..helpers.accelerometer import Accelerometer, MeasurementsManager from ..helpers.common_func import AXIS_CONFIG from ..helpers.console_output import ConsoleOutput from ..helpers.motors_config_parser import MotorsConfigParser from ..helpers.resonance_test import vibrate_axis from ..shaketune_process import ShakeTuneProcess -from .accelerometer import Accelerometer def compare_belts_responses(gcmd, config, st_process: ShakeTuneProcess) -> None: @@ -60,7 +60,7 @@ def compare_belts_responses(gcmd, config, st_process: ShakeTuneProcess) -> None: raise gcmd.error( 'No suitable accelerometer found for measurement! Multi-accelerometer configurations are not supported for this macro.' ) - accelerometer = Accelerometer(printer.get_reactor(), printer.lookup_object(accel_chip)) + accelerometer = Accelerometer(printer.lookup_object(accel_chip)) # Move to the starting point test_points = res_tester.test.get_start_test_points() @@ -103,14 +103,15 @@ def compare_belts_responses(gcmd, config, st_process: ShakeTuneProcess) -> None: else: input_shaper = None + measurements_manager = MeasurementsManager() + # Run the test for each axis for config in filtered_config: - accelerometer.start_measurement() + accelerometer.start_recording(measurements_manager, name=config['label'], append_time=True) vibrate_axis(toolhead, gcode, config['direction'], min_freq, max_freq, hz_per_sec, accel_per_hz) - accelerometer.stop_measurement(config['label'], append_time=True) + accelerometer.stop_recording() toolhead.dwell(0.5) toolhead.wait_moves() - accelerometer.wait_for_file_writes() # Re-enable the input shaper if it was active if input_shaper is not None: @@ -125,5 +126,5 @@ def compare_belts_responses(gcmd, config, st_process: ShakeTuneProcess) -> None: # Run post-processing ConsoleOutput.print('Belts comparative frequency profile generation...') ConsoleOutput.print('This may take some time (1-3min)') - st_process.run() + st_process.run(measurements_manager) st_process.wait_for_completion() diff --git a/shaketune/commands/create_vibrations_profile.py b/shaketune/commands/create_vibrations_profile.py index bdb3408..fa2b099 100644 --- a/shaketune/commands/create_vibrations_profile.py +++ b/shaketune/commands/create_vibrations_profile.py @@ -11,10 +11,10 @@ import math +from ..helpers.accelerometer import Accelerometer, MeasurementsManager from ..helpers.console_output import ConsoleOutput from ..helpers.motors_config_parser import MotorsConfigParser from ..shaketune_process import ShakeTuneProcess -from .accelerometer import Accelerometer MIN_SPEED = 2 # mm/s @@ -81,6 +81,8 @@ def create_vibrations_profile(gcmd, config, st_process: ShakeTuneProcess) -> Non toolhead.move([mid_x - 15, mid_y - 15, z_height, E], feedrate_travel) toolhead.dwell(0.5) + measurements_manager = MeasurementsManager() + nb_speed_samples = int((max_speed - MIN_SPEED) / speed_increment + 1) for curr_angle in main_angles: ConsoleOutput.print(f'-> Measuring angle: {curr_angle} degrees...') @@ -127,16 +129,15 @@ def create_vibrations_profile(gcmd, config, st_process: ShakeTuneProcess) -> Non movements = 2 # Back and forth movements to record the vibrations at constant speed in both direction - accelerometer.start_measurement() + name = f'vib_an{curr_angle:.2f}sp{curr_speed:.2f}'.replace('.', '_') + accelerometer.start_recording(measurements_manager, name=name, append_time=True) for _ in range(movements): toolhead.move([mid_x + dX, mid_y + dY, z_height, E], curr_speed) toolhead.move([mid_x - dX, mid_y - dY, z_height, E], curr_speed) - name = f'vib_an{curr_angle:.2f}sp{curr_speed:.2f}'.replace('.', '_') - accelerometer.stop_measurement(name) + accelerometer.stop_recording() toolhead.dwell(0.3) toolhead.wait_moves() - accelerometer.wait_for_file_writes() # Restore the previous acceleration values if old_mcr is not None: # minimum_cruise_ratio found: Klipper >= v0.12.0-239 @@ -152,5 +153,5 @@ def create_vibrations_profile(gcmd, config, st_process: ShakeTuneProcess) -> Non ConsoleOutput.print('This may take some time (5-8min)') creator = st_process.get_graph_creator() creator.configure(motors_config_parser.kinematics, accel, motors_config_parser) - st_process.run() + st_process.run(measurements_manager) st_process.wait_for_completion() diff --git a/shaketune/commands/excitate_axis_at_freq.py b/shaketune/commands/excitate_axis_at_freq.py index d2c5d10..48afb5d 100644 --- a/shaketune/commands/excitate_axis_at_freq.py +++ b/shaketune/commands/excitate_axis_at_freq.py @@ -8,11 +8,11 @@ # and optionally creates a graph of the vibration data collected by the accelerometer. +from ..helpers.accelerometer import Accelerometer, MeasurementsManager from ..helpers.common_func import AXIS_CONFIG from ..helpers.console_output import ConsoleOutput from ..helpers.resonance_test import vibrate_axis_at_static_freq from ..shaketune_process import ShakeTuneProcess -from .accelerometer import Accelerometer def excitate_axis_at_freq(gcmd, config, st_process: ShakeTuneProcess) -> None: @@ -41,7 +41,8 @@ def excitate_axis_at_freq(gcmd, config, st_process: ShakeTuneProcess) -> None: k_accelerometer = printer.lookup_object(accel_chip, None) if k_accelerometer is None: raise gcmd.error(f'Accelerometer chip [{accel_chip}] was not found!') - accelerometer = Accelerometer(printer.get_reactor(), k_accelerometer) + accelerometer = Accelerometer(k_accelerometer) + measurements_manager = MeasurementsManager() ConsoleOutput.print(f'Excitating {axis.upper()} axis at {freq}Hz for {duration} seconds') @@ -87,7 +88,7 @@ def excitate_axis_at_freq(gcmd, config, st_process: ShakeTuneProcess) -> None: # If the user want to create a graph, we start accelerometer recording if create_graph: - accelerometer.start_measurement() + accelerometer.start_recording(measurements_manager, name=f'staticfreq_{axis.upper()}', append_time=True) toolhead.dwell(0.5) vibrate_axis_at_static_freq(toolhead, gcode, axis_config['direction'], freq, duration, accel_per_hz) @@ -99,11 +100,10 @@ def excitate_axis_at_freq(gcmd, config, st_process: ShakeTuneProcess) -> None: # If the user wanted to create a graph, we stop the recording and generate it if create_graph: - accelerometer.stop_measurement(f'staticfreq_{axis.upper()}', append_time=True) + accelerometer.stop_recording() toolhead.dwell(0.5) - accelerometer.wait_for_file_writes() creator = st_process.get_graph_creator() creator.configure(freq, duration, accel_per_hz) - st_process.run() + st_process.run(measurements_manager) st_process.wait_for_completion() diff --git a/shaketune/graph_creators/axes_map_graph_creator.py b/shaketune/graph_creators/axes_map_graph_creator.py index 9e6df9f..9bb54a8 100644 --- a/shaketune/graph_creators/axes_map_graph_creator.py +++ b/shaketune/graph_creators/axes_map_graph_creator.py @@ -23,7 +23,7 @@ matplotlib.use('Agg') -from ..helpers.common_func import parse_log +from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig from .graph_creator import GraphCreator @@ -48,30 +48,14 @@ def configure(self, accel: int, segment_length: float) -> None: self._accel = accel self._segment_length = segment_length - def create_graph(self) -> None: - lognames = self._move_and_prepare_files( - glob_pattern='shaketune-axesmap_*.csv', - min_files_required=3, - custom_name_func=lambda f: f.stem.split('_')[1].upper(), - ) + def create_graph(self, measurements_manager: MeasurementsManager) -> None: fig = axesmap_calibration( - lognames=[str(path) for path in lognames], + measurements=measurements_manager.get_measurements(), accel=self._accel, fixed_length=self._segment_length, st_version=self._version, ) - self._save_figure_and_cleanup(fig, lognames) - - def clean_old_files(self, keep_results: int = 3) -> None: - files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) - if len(files) <= keep_results: - return # No need to delete any files - for old_file in files[keep_results:]: - file_date = '_'.join(old_file.stem.split('_')[1:3]) - for suffix in {'X', 'Y', 'Z'}: - csv_file = self._folder / f'axesmap_{file_date}_{suffix}.csv' - csv_file.unlink(missing_ok=True) - old_file.unlink() + self._save_figure(fig, measurements_manager) ###################################################################### @@ -340,19 +324,18 @@ def format_direction_vector(vectors: List[np.ndarray]) -> str: def axesmap_calibration( - lognames: List[str], fixed_length: float, accel: Optional[float] = None, st_version: str = 'unknown' + measurements: List[Measurement], fixed_length: float, accel: Optional[float] = None, st_version: str = 'unknown' ) -> plt.Figure: - # Parse data from the log files while ignoring CSV in the wrong format (sorted by axis name) + if len(measurements) != 3: + raise ValueError('This tool needs 3 measurements to work with (like axesmap_X, axesmap_Y and axesmap_Z)') + raw_datas = {} - for logname in lognames: - data = parse_log(logname) + for measurement in measurements: + data = np.array(measurement['samples']) if data is not None: - _axis = logname.split('_')[-1].split('.')[0].lower() + _axis = measurement['name'].split('_')[1].lower() raw_datas[_axis] = data - if len(raw_datas) != 3: - raise ValueError('This tool needs 3 CSVs to work with (like axesmap_X.csv, axesmap_Y.csv and axesmap_Z.csv)') - fig, ((ax1, ax2)) = plt.subplots( 1, 2, @@ -379,7 +362,7 @@ def axesmap_calibration( gravities = [] for _, machine_axis in enumerate(MACHINE_AXES): if machine_axis not in raw_datas: - raise ValueError(f'Missing CSV file for axis {machine_axis}') + raise ValueError(f'Missing measurement for axis {machine_axis}') # Get the accel data according to the current axes_map time = raw_datas[machine_axis][:, 0] @@ -451,16 +434,17 @@ def axesmap_calibration( 0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' ) try: - filename = lognames[0].split('/')[-1] - dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S') + filename = measurements[0]['name'] + dt = datetime.strptime(f"{filename.split('_')[2]} {filename.split('_')[3]}", '%Y%m%d %H%M%S') title_line2 = dt.strftime('%x %X') if accel is not None: title_line2 += f' -- at {accel:0.0f} mm/s²' except Exception: ConsoleOutput.print( - f'Warning: CSV filenames look to be different than expected ({lognames[0]}, {lognames[1]}, {lognames[2]})' + f"Warning: Shake&Tune measurements names look to be different than expected ({measurements[0]['name']}, " + f"{measurements[1]['name']}, {measurements[2]['name']})" ) - title_line2 = lognames[0].split('/')[-1] + ' ...' + title_line2 = measurements[0]['name'] + ' ...' fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) title_line3 = f'| Detected axes_map: {formatted_direction_vector}' @@ -491,9 +475,9 @@ def main(): ) options, args = opts.parse_args() if len(args) < 1: - opts.error('No CSV file(s) to analyse') + opts.error('No measurements to analyse') if options.accel is None: - opts.error('You must specify the acceleration value used when generating the CSV file (option -a)') + opts.error('You must specify the acceleration value used when recording the measurements (option -a)') try: accel_value = float(options.accel) except ValueError: @@ -507,7 +491,15 @@ def main(): if options.output is None: opts.error('You must specify an output file.png to use the script (option -o)') - fig = axesmap_calibration(args, length_value, accel_value, 'unknown') + measurements_manager = MeasurementsManager() + if args[0].endswith('.csv'): + measurements_manager.load_from_csvs(args) + elif args[0].endswith('.stdata'): + measurements_manager.load_from_stdata(args[0]) + else: + raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') + + fig = axesmap_calibration(measurements_manager.get_measurements(), length_value, accel_value, 'unknown') fig.savefig(options.output, dpi=150) diff --git a/shaketune/graph_creators/belts_graph_creator.py b/shaketune/graph_creators/belts_graph_creator.py index 21f9302..26b397e 100644 --- a/shaketune/graph_creators/belts_graph_creator.py +++ b/shaketune/graph_creators/belts_graph_creator.py @@ -23,7 +23,8 @@ matplotlib.use('Agg') -from ..helpers.common_func import detect_peaks, parse_log, setup_klipper_import +from ..helpers.accelerometer import Measurement, MeasurementsManager +from ..helpers.common_func import detect_peaks, setup_klipper_import from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig from .graph_creator import GraphCreator @@ -71,31 +72,15 @@ def configure(self, kinematics: Optional[str] = None, accel_per_hz: Optional[flo self._kinematics = kinematics self._accel_per_hz = accel_per_hz - def create_graph(self) -> None: - lognames = self._move_and_prepare_files( - glob_pattern='shaketune-belt_*.csv', - min_files_required=2, - custom_name_func=lambda f: f.stem.split('_')[1].upper(), - ) + def create_graph(self, measurements_manager: MeasurementsManager) -> None: fig = belts_calibration( - lognames=[str(path) for path in lognames], + measurements=measurements_manager.get_measurements(), kinematics=self._kinematics, klipperdir=str(self._config.klipper_folder), accel_per_hz=self._accel_per_hz, st_version=self._version, ) - self._save_figure_and_cleanup(fig, lognames) - - def clean_old_files(self, keep_results: int = 3) -> None: - files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) - if len(files) <= keep_results: - return # No need to delete any files - for old_file in files[keep_results:]: - file_date = '_'.join(old_file.stem.split('_')[1:3]) - for suffix in {'A', 'B'}: - csv_file = self._folder / f'beltscomparison_{file_date}_{suffix}.csv' - csv_file.unlink(missing_ok=True) - old_file.unlink() + self._save_figure(fig, measurements_manager) ###################################################################### @@ -486,25 +471,25 @@ def compute_signal_data(data: np.ndarray, common_freqs: np.ndarray, max_freq: fl def belts_calibration( - lognames: List[str], + measurements: List[Measurement], kinematics: Optional[str], klipperdir: str = '~/klipper', max_freq: float = 200.0, accel_per_hz: Optional[float] = None, st_version: str = 'unknown', ) -> plt.Figure: + if len(measurements) != 2: + raise ValueError('This tool needs 2 measurements to work with (one for each belt)!') + global shaper_calibrate shaper_calibrate = setup_klipper_import(klipperdir) - # Parse data from the log files while ignoring CSV in the wrong format - datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None] - if len(datas) != 2: - raise ValueError('Incorrect number of .csv files used (this function needs exactly two files to compare them)!') + datas = [np.array(m['samples']) for m in measurements if m['samples'] is not None] # Get the belts name for the legend to avoid putting the full file name belt_info = {'A': ' (axis 1,-1)', 'B': ' (axis 1, 1)'} - signal1_belt = (lognames[0].split('/')[-1]).split('_')[-1][0] - signal2_belt = (lognames[1].split('/')[-1]).split('_')[-1][0] + signal1_belt = (measurements[0]['name'].split('/')[-1]).split('_')[-1][0] + signal2_belt = (measurements[1]['name'].split('/')[-1]).split('_')[-1][0] signal1_belt += belt_info.get(signal1_belt, '') signal2_belt += belt_info.get(signal2_belt, '') @@ -550,14 +535,17 @@ def belts_calibration( 0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' ) try: - filename = lognames[0].split('/')[-1] - dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S') + filename = measurements[0]['name'] + dt = datetime.strptime(f"{filename.split('_')[2]} {filename.split('_')[3]}", '%Y%m%d %H%M%S') title_line2 = dt.strftime('%x %X') if kinematics is not None: title_line2 += ' -- ' + kinematics.upper() + ' kinematics' except Exception: - ConsoleOutput.print(f'Warning: Unable to parse the date from the filename ({lognames[0]}, {lognames[1]})') - title_line2 = lognames[0].split('/')[-1] + ' / ' + lognames[1].split('/')[-1] + ConsoleOutput.print( + f'Warning: Unable to parse the date from the measurements names ' + f"({measurements[0]['name']}, {measurements[1]['name']})" + ) + title_line2 = measurements[0]['name'] + ' / ' + measurements[1]['name'] fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) # We add the estimated similarity and the MHI value to the title only if the kinematics is CoreXY @@ -611,8 +599,21 @@ def main(): if options.output is None: opts.error('You must specify an output file.png to use the script (option -o)') + measurements_manager = MeasurementsManager() + if args[0].endswith('.csv'): + measurements_manager.load_from_csvs(args) + elif args[0].endswith('.stdata'): + measurements_manager.load_from_stdata(args[0]) + else: + raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') + fig = belts_calibration( - args, options.kinematics, options.klipperdir, options.max_freq, options.accel_per_hz, 'unknown' + measurements_manager.get_measurements(), + options.kinematics, + options.klipperdir, + options.max_freq, + options.accel_per_hz, + 'unknown', ) fig.savefig(options.output, dpi=150) diff --git a/shaketune/graph_creators/graph_creator.py b/shaketune/graph_creators/graph_creator.py index f89b9ce..41546c3 100644 --- a/shaketune/graph_creators/graph_creator.py +++ b/shaketune/graph_creators/graph_creator.py @@ -11,13 +11,12 @@ import abc -import shutil from datetime import datetime -from pathlib import Path -from typing import Callable, List, Optional +from typing import Optional from matplotlib.figure import Figure +from ..helpers.accelerometer import MeasurementsManager from ..shaketune_config import ShakeTuneConfig @@ -29,56 +28,28 @@ def __init__(self, config: ShakeTuneConfig, graph_type: str): self._type = graph_type self._folder = self._config.get_results_folder(graph_type) - def _move_and_prepare_files( - self, - glob_pattern: str, - min_files_required: Optional[int] = None, - custom_name_func: Optional[Callable[[Path], str]] = None, - ) -> List[Path]: - tmp_path = Path('/tmp') - globbed_files = list(tmp_path.glob(glob_pattern)) - - # If min_files_required is not set, use the number of globbed files as the minimum - min_files_required = min_files_required or len(globbed_files) - - if not globbed_files: - raise FileNotFoundError(f'no CSV files found in the /tmp folder to create the {self._type} graphs!') - if len(globbed_files) < min_files_required: - raise FileNotFoundError(f'{min_files_required} CSV files are needed to create the {self._type} graphs!') - - lognames = [] - for filename in sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[:min_files_required]: - custom_name = custom_name_func(filename) if custom_name_func else filename.name - new_file = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}_{custom_name}.csv" - # shutil.move() is needed to move the file across filesystems (mainly for BTT CB1 Pi default OS image) - shutil.move(filename, new_file) - lognames.append(new_file) - return lognames - - def _save_figure_and_cleanup(self, fig: Figure, lognames: List[Path], axis_label: Optional[str] = None) -> None: + def _save_figure( + self, fig: Figure, measurements_manager: MeasurementsManager, axis_label: Optional[str] = None + ) -> None: axis_suffix = f'_{axis_label}' if axis_label else '' - png_filename = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}{axis_suffix}.png" - fig.savefig(png_filename, dpi=self._config.dpi) + filename = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}{axis_suffix}" + fig.savefig(f'{filename}.png', dpi=self._config.dpi) - if self._config.keep_csv: - self._archive_files(lognames) - else: - self._remove_files(lognames) - - def _archive_files(self, lognames: List[Path]) -> None: - return - - def _remove_files(self, lognames: List[Path]) -> None: - for csv in lognames: - csv.unlink(missing_ok=True) + if self._config.keep_raw_data: + measurements_manager.save_stdata(f'{filename}.stdata') def get_type(self) -> str: return self._type @abc.abstractmethod - def create_graph(self) -> None: + def create_graph(self, measurements_manager: MeasurementsManager) -> None: pass - @abc.abstractmethod - def clean_old_files(self, keep_results: int) -> None: - pass + def clean_old_files(self, keep_results: int = 3) -> None: + files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) + if len(files) <= keep_results: + return # No need to delete any files + for old_png_file in files[keep_results:]: + stdata_file = old_png_file.with_suffix('.stdata') + stdata_file.unlink(missing_ok=True) + old_png_file.unlink() diff --git a/shaketune/graph_creators/shaper_graph_creator.py b/shaketune/graph_creators/shaper_graph_creator.py index fecf6eb..90c2749 100644 --- a/shaketune/graph_creators/shaper_graph_creator.py +++ b/shaketune/graph_creators/shaper_graph_creator.py @@ -33,11 +33,11 @@ matplotlib.use('Agg') +from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.common_func import ( compute_mechanical_parameters, compute_spectrogram, detect_peaks, - parse_log, setup_klipper_import, ) from ..helpers.console_output import ConsoleOutput @@ -74,33 +74,29 @@ def configure( self._max_smoothing = max_smoothing self._accel_per_hz = accel_per_hz - def create_graph(self) -> None: + def create_graph(self, measurements_manager: MeasurementsManager) -> None: if not self._scv: raise ValueError('scv must be set to create the input shaper graph!') - lognames = self._move_and_prepare_files( - glob_pattern='shaketune-axis_*.csv', - min_files_required=1, - custom_name_func=lambda f: f.stem.split('_')[1].upper(), - ) fig = shaper_calibration( - lognames=[str(path) for path in lognames], + measurements=measurements_manager.get_measurements(), klipperdir=str(self._config.klipper_folder), max_smoothing=self._max_smoothing, scv=self._scv, accel_per_hz=self._accel_per_hz, st_version=self._version, ) - self._save_figure_and_cleanup(fig, lognames, lognames[0].stem.split('_')[-1]) + axis_label = (measurements_manager.get_measurements())[0]['name'].split('_')[1] + self._save_figure(fig, measurements_manager, axis_label) def clean_old_files(self, keep_results: int = 3) -> None: files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) if len(files) <= 2 * keep_results: return # No need to delete any files - for old_file in files[2 * keep_results :]: - csv_file = old_file.with_suffix('.csv') - csv_file.unlink(missing_ok=True) - old_file.unlink() + for old_png_file in files[2 * keep_results :]: + stdata_file = old_png_file.with_suffix('.stdata') + stdata_file.unlink(missing_ok=True) + old_png_file.unlink() ###################################################################### @@ -585,7 +581,7 @@ def print_shaper_table(fig: plt.Figure, shaper_table_data: Dict[str, List[Dict[s def shaper_calibration( - lognames: List[str], + measurements: List[Measurement], klipperdir: str = '~/klipper', max_smoothing: Optional[float] = None, scv: float = 5.0, @@ -593,15 +589,15 @@ def shaper_calibration( accel_per_hz: Optional[float] = None, st_version: str = 'unknown', ) -> plt.Figure: + if len(measurements) == 0: + raise ValueError('No valid data found in the provided measurements!') + if len(measurements) > 1: + ConsoleOutput.print('Warning: incorrect number of measurements detected. Only the first one will be used!') + global shaper_calibrate shaper_calibrate = setup_klipper_import(klipperdir) - # Parse data from the log files while ignoring CSV in the wrong format - datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None] - if len(datas) == 0: - raise ValueError('No valid data found in the provided CSV files!') - if len(datas) > 1: - ConsoleOutput.print('Warning: incorrect number of .csv files detected. Only the first one will be used!') + datas = [np.array(m['samples']) for m in measurements if m['samples'] is not None] # Compute shapers, PSD outputs and spectrogram klipper_shaper_choice, shapers, additional_shapers, calibration_data, fr, zeta, max_smoothing_computed, compat = ( @@ -656,9 +652,9 @@ def shaper_calibration( 0.065, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' ) try: - filename_parts = (lognames[0].split('/')[-1]).split('_') - dt = datetime.strptime(f'{filename_parts[1]} {filename_parts[2]}', '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis' + filename_parts = measurements[0]['name'].split('_') + dt = datetime.strptime(f'{filename_parts[2]} {filename_parts[3]}', '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[1].upper() + ' axis' if compat: title_line3 = '| Older Klipper version detected, damping ratio' title_line4 = '| and SCV are not used for filter recommendations!' @@ -671,8 +667,10 @@ def shaper_calibration( title_line4 = f'| Allowed smoothing: {max_smoothing_string}' title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else '' except Exception: - ConsoleOutput.print(f'Warning: CSV filename look to be different than expected ({lognames[0]})') - title_line2 = lognames[0].split('/')[-1] + ConsoleOutput.print( + f'Warning: measurement names look to be different than expected ({measurements[0]["name"]})' + ) + title_line2 = measurements[0]['name'] title_line3 = '' title_line4 = '' title_line5 = '' @@ -724,8 +722,22 @@ def main(): if options.max_smoothing is not None and options.max_smoothing < 0.05: opts.error('Too small max_smoothing specified (must be at least 0.05)') + measurements_manager = MeasurementsManager() + if args[0].endswith('.csv'): + measurements_manager.load_from_csvs(args) + elif args[0].endswith('.stdata'): + measurements_manager.load_from_stdata(args[0]) + else: + raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') + fig = shaper_calibration( - args, options.klipperdir, options.max_smoothing, options.scv, options.max_freq, options.accel_per_hz, 'unknown' + measurements_manager.get_measurements(), + options.klipperdir, + options.max_smoothing, + options.scv, + options.max_freq, + options.accel_per_hz, + 'unknown', ) fig.savefig(options.output, dpi=150) diff --git a/shaketune/graph_creators/static_graph_creator.py b/shaketune/graph_creators/static_graph_creator.py index e0c9dd0..0fed522 100644 --- a/shaketune/graph_creators/static_graph_creator.py +++ b/shaketune/graph_creators/static_graph_creator.py @@ -21,7 +21,8 @@ matplotlib.use('Agg') -from ..helpers.common_func import compute_spectrogram, parse_log +from ..helpers.accelerometer import Measurement, MeasurementsManager +from ..helpers.common_func import compute_spectrogram from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig from .graph_creator import GraphCreator @@ -52,17 +53,12 @@ def configure(self, freq: float, duration: float, accel_per_hz: Optional[float] self._duration = duration self._accel_per_hz = accel_per_hz - def create_graph(self) -> None: + def create_graph(self, measurements_manager: MeasurementsManager) -> None: if not self._freq or not self._duration or not self._accel_per_hz: raise ValueError('freq, duration and accel_per_hz must be set to create the static frequency graph!') - lognames = self._move_and_prepare_files( - glob_pattern='shaketune-staticfreq_*.csv', - min_files_required=1, - custom_name_func=lambda f: f.stem.split('_')[1].upper(), - ) fig = static_frequency_tool( - lognames=[str(path) for path in lognames], + measurements=measurements_manager.get_measurements(), klipperdir=str(self._config.klipper_folder), freq=self._freq, duration=self._duration, @@ -70,16 +66,8 @@ def create_graph(self) -> None: accel_per_hz=self._accel_per_hz, st_version=self._version, ) - self._save_figure_and_cleanup(fig, lognames, lognames[0].stem.split('_')[-1]) - - def clean_old_files(self, keep_results: int = 3) -> None: - files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) - if len(files) <= keep_results: - return # No need to delete any files - for old_file in files[keep_results:]: - csv_file = old_file.with_suffix('.csv') - csv_file.unlink(missing_ok=True) - old_file.unlink() + axis_label = (measurements_manager.get_measurements())[0]['name'].split('_')[1] + self._save_figure(fig, measurements_manager, axis_label) ###################################################################### @@ -133,7 +121,7 @@ def plot_energy_accumulation(ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdat def static_frequency_tool( - lognames: List[str], + measurements: List[Measurement], klipperdir: str = '~/klipper', freq: Optional[float] = None, duration: Optional[float] = None, @@ -144,11 +132,12 @@ def static_frequency_tool( if freq is None or duration is None: raise ValueError('Error: missing frequency or duration parameters!') - datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None] - if len(datas) == 0: - raise ValueError('No valid data found in the provided CSV files!') - if len(datas) > 1: - ConsoleOutput.print('Warning: incorrect number of .csv files detected. Only the first one will be used!') + if len(measurements) == 0: + raise ValueError('No valid data found in the provided measurements!') + if len(measurements) > 1: + ConsoleOutput.print('Warning: incorrect number of measurements detected. Only the first one will be used!') + + datas = [np.array(m['samples']) for m in measurements if m['samples'] is not None] pdata, bins, t = compute_spectrogram(datas[0]) del datas @@ -173,14 +162,16 @@ def static_frequency_tool( 0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' ) try: - filename_parts = (lognames[0].split('/')[-1]).split('_') - dt = datetime.strptime(f'{filename_parts[1]} {filename_parts[2]}', '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis' + filename_parts = measurements[0]['name'].split('_') + dt = datetime.strptime(f'{filename_parts[2]} {filename_parts[3]}', '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[1].upper() + ' axis' title_line3 = f'| Maintained frequency: {freq}Hz for {duration}s' title_line4 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else '' except Exception: - ConsoleOutput.print(f'Warning: CSV filename look to be different than expected ({lognames[0]})') - title_line2 = lognames[0].split('/')[-1] + ConsoleOutput.print( + f'Warning: measurement names look to be different than expected ({measurements[0]["name"]})' + ) + title_line2 = measurements[0]['name'] title_line3 = '' title_line4 = '' fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) @@ -217,8 +208,22 @@ def main(): if options.output is None: opts.error('You must specify an output file.png to use the script (option -o)') + measurements_manager = MeasurementsManager() + if args[0].endswith('.csv'): + measurements_manager.load_from_csvs(args) + elif args[0].endswith('.stdata'): + measurements_manager.load_from_stdata(args[0]) + else: + raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') + fig = static_frequency_tool( - args, options.klipperdir, options.freq, options.duration, options.max_freq, options.accel_per_hz, 'unknown' + measurements_manager.get_measurements(), + options.klipperdir, + options.freq, + options.duration, + options.max_freq, + options.accel_per_hz, + 'unknown', ) fig.savefig(options.output, dpi=150) diff --git a/shaketune/graph_creators/vibrations_graph_creator.py b/shaketune/graph_creators/vibrations_graph_creator.py index 3fc0034..07f4baf 100644 --- a/shaketune/graph_creators/vibrations_graph_creator.py +++ b/shaketune/graph_creators/vibrations_graph_creator.py @@ -12,10 +12,7 @@ import optparse import os import re -import tarfile -from collections import defaultdict from datetime import datetime -from pathlib import Path from typing import List, Optional, Tuple import matplotlib @@ -27,11 +24,11 @@ matplotlib.use('Agg') +from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.common_func import ( compute_mechanical_parameters, detect_peaks, identify_low_energy_zones, - parse_log, setup_klipper_import, ) from ..helpers.console_output import ConsoleOutput @@ -67,40 +64,19 @@ def configure(self, kinematics: str, accel: float, motor_config_parser: MotorsCo self._accel = accel self._motors: List[Motor] = motor_config_parser.get_motors() - def _archive_files(self, lognames: List[Path]) -> None: - tar_path = self._folder / f'{self._type}_{self._graph_date}.tar.gz' - with tarfile.open(tar_path, 'w:gz') as tar: - for csv_file in lognames: - tar.add(csv_file, arcname=csv_file.name, recursive=False) - csv_file.unlink() - - def create_graph(self) -> None: + def create_graph(self, measurements_manager: MeasurementsManager) -> None: if not self._accel or not self._kinematics: raise ValueError('accel and kinematics must be set to create the vibrations profile graph!') - lognames = self._move_and_prepare_files( - glob_pattern='shaketune-vib_*.csv', - min_files_required=None, - custom_name_func=lambda f: re.search(r'shaketune-vib_(.*?)_\d{8}_\d{6}', f.name).group(1), - ) fig = vibrations_profile( - lognames=[str(path) for path in lognames], + measurements=measurements_manager.get_measurements(), klipperdir=str(self._config.klipper_folder), kinematics=self._kinematics, accel=self._accel, st_version=self._version, motors=self._motors, ) - self._save_figure_and_cleanup(fig, lognames) - - def clean_old_files(self, keep_results: int = 3) -> None: - files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) - if len(files) <= keep_results: - return # No need to delete any files - for old_file in files[keep_results:]: - old_file.unlink() - tar_file = old_file.with_suffix('.tar.gz') - tar_file.unlink(missing_ok=True) + self._save_figure(fig, measurements_manager) ###################################################################### @@ -720,7 +696,7 @@ def extract_angle_and_speed(logname: str) -> Tuple[float, float]: def vibrations_profile( - lognames: List[str], + measurements: List[Measurement], klipperdir: str = '~/klipper', kinematics: str = 'cartesian', accel: Optional[float] = None, @@ -738,15 +714,17 @@ def vibrations_profile( else: raise ValueError('Only Cartesian, CoreXY and CoreXZ kinematics are supported by this tool at the moment!') - psds = defaultdict(lambda: defaultdict(list)) - psds_sum = defaultdict(lambda: defaultdict(list)) + psds = {} + psds_sum = {} target_freqs_initialized = False + target_freqs = None - for logname in lognames: - data = parse_log(logname) + for measurement in measurements: + data = np.array(measurement['samples']) if data is None: - continue # File is not in the expected format, skip it - angle, speed = extract_angle_and_speed(logname) + continue # Measurement data is not in the expected format or is empty, skip it + + angle, speed = extract_angle_and_speed(measurement['name']) freq_response = calc_freq_response(data) first_freqs = freq_response.freq_bins psd_sum = freq_response.psd_sum @@ -758,6 +736,11 @@ def vibrations_profile( psd_sum = psd_sum[first_freqs <= max_freq] first_freqs = first_freqs[first_freqs <= max_freq] + # Initialize the angle dictionary if it doesn't exist + if angle not in psds: + psds[angle] = {} + psds_sum[angle] = {} + # Store the interpolated PSD and integral values psds[angle][speed] = np.interp(target_freqs, first_freqs, psd_sum) psds_sum[angle][speed] = np.trapz(psd_sum, first_freqs) @@ -849,14 +832,16 @@ def vibrations_profile( 0.060, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' ) try: - filename_parts = (lognames[0].split('/')[-1]).split('_') - dt = datetime.strptime(f"{filename_parts[1]} {filename_parts[2].split('-')[0]}", '%Y%m%d %H%M%S') + filename_parts = measurements[0]['name'].split('_') + dt = datetime.strptime(f"{filename_parts[2]} {filename_parts[3].split('-')[0]}", '%Y%m%d %H%M%S') title_line2 = dt.strftime('%x %X') if accel is not None: title_line2 += ' at ' + str(accel) + ' mm/s² -- ' + kinematics.upper() + ' kinematics' except Exception: - ConsoleOutput.print(f'Warning: CSV filenames appear to be different than expected ({lognames[0]})') - title_line2 = lognames[0].split('/')[-1] + ConsoleOutput.print( + f'Warning: measurement names look to be different than expected ({measurements[0]["name"]})' + ) + title_line2 = measurements[0]['name'] fig.text(0.060, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) # Add the motors infos to the top of the graph @@ -920,13 +905,27 @@ def main(): ) options, args = opts.parse_args() if len(args) < 1: - opts.error('No CSV file(s) to analyse') + opts.error('No measurements to analyse') if options.output is None: opts.error('You must specify an output file.png to use the script (option -o)') if options.kinematics not in {'cartesian', 'corexy', 'corexz'}: opts.error('Only cartesian, corexy and corexz kinematics are supported by this tool at the moment!') - fig = vibrations_profile(args, options.klipperdir, options.kinematics, options.accel, options.max_freq) + measurements_manager = MeasurementsManager() + if args[0].endswith('.csv'): + measurements_manager.load_from_csvs(args) + elif args[0].endswith('.stdata'): + measurements_manager.load_from_stdata(args[0]) + else: + raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') + + fig = vibrations_profile( + measurements_manager.get_measurements(), + options.klipperdir, + options.kinematics, + options.accel, + options.max_freq, + ) fig.savefig(options.output, dpi=150) diff --git a/shaketune/helpers/accelerometer.py b/shaketune/helpers/accelerometer.py new file mode 100644 index 0000000..5d11d90 --- /dev/null +++ b/shaketune/helpers/accelerometer.py @@ -0,0 +1,198 @@ +# Shake&Tune: 3D printer analysis tools +# +# Copyright (C) 2024 Félix Boisselier (Frix_x on Discord) +# Licensed under the GNU General Public License v3.0 (GPL-3.0) +# +# File: accelerometer.py +# Description: Provides a custom and internal Shake&Tune Accelerometer helper that interfaces +# with Klipper's accelerometer classes. It includes functions to start and stop +# accelerometer measurements. +# It also includes functions to load and save measurements from a file in a new +# compressed format (.stdata) or from the legacy Klipper CSV files. + + +import os +import pickle +import time +from multiprocessing import Process +from pathlib import Path +from typing import List, Tuple, TypedDict + +import numpy as np +import zstandard as zstd + +from ..helpers.console_output import ConsoleOutput + +Sample = Tuple[float, float, float, float] +SamplesList = List[Sample] + + +class Measurement(TypedDict): + name: str + samples: SamplesList + + +class MeasurementsManager: + def __init__(self): + self.measurements: List[Measurement] = [] + self._write_process = None + + def add_measurement(self, name: str, samples: SamplesList = None): + samples = samples if samples is not None else [] + self.measurements.append({'name': name, 'samples': samples}) + + def get_measurements(self) -> List[Measurement]: + return self.measurements + + def append_samples_to_last_measurement(self, additional_samples: SamplesList): + try: + self.measurements[-1]['samples'].extend(additional_samples) + except IndexError as err: + raise ValueError('no measurements available to append samples to.') from err + + def clear_measurements(self): + self.measurements = [] + + def save_stdata(self, filename: Path): + self._write_process = Process(target=self._save_to_file, args=(filename,)) + self._write_process.daemon = True + self._write_process.start() + + def _save_to_file(self, filename: Path): + try: + os.nice(19) + except Exception: + pass # Ignore errors as it's not critical + try: + with open(filename, 'wb') as f: + cctx = zstd.ZstdCompressor(level=3) + with cctx.stream_writer(f) as compressor: + pickle.dump(self.measurements, compressor) + except Exception as e: + ConsoleOutput.print(f'Warning: unable to save the measurements to {filename}: {e}') + + def load_from_stdata(self, filename: Path) -> List[Measurement]: + with open(filename, 'rb') as f: + dctx = zstd.ZstdDecompressor() + with dctx.stream_reader(f) as decompressor: + self.measurements = pickle.load(decompressor) + return self.measurements + + def load_from_csvs(self, klipper_CSVs: List[Path]) -> List[Measurement]: + for logname in klipper_CSVs: + try: + if logname.suffix != '.csv': + ConsoleOutput.print(f'Warning: {logname} is not a CSV file. It will be ignored by Shake&Tune!') + continue + with open(logname) as f: + header = None + for line in f: + cleaned_line = line.strip() + # Check for a PSD file generated by Klipper and raise a warning + if cleaned_line.startswith('#freq,psd_x,psd_y,psd_z,psd_xyz'): + ConsoleOutput.print( + f'Warning: {logname} does not contain raw Klipper accelerometer data. ' + 'Please use the official Klipper script to process it instead. ' + ) + continue + # Check for the expected legacy header used in Shake&Tune (raw accelerometer data from Klipper) + elif cleaned_line.startswith('#time,accel_x,accel_y,accel_z'): + header = cleaned_line + break + if not header: + ConsoleOutput.print( + f"Warning: file {logname} doesn't seem to be a Klipper raw accelerometer data file. " + f"Expected '#time,accel_x,accel_y,accel_z', but got '{header.strip()}'. " + 'This file will be ignored by Shake&Tune!' + ) + continue + # If we have the correct raw data header, proceed to load the data + data = np.loadtxt(logname, comments='#', delimiter=',', skiprows=1) + if data.ndim == 1 or data.shape[1] != 4: + ConsoleOutput.print( + f'Warning: {logname} does not have the correct data format; expected 4 columns. ' + 'It will be ignored by Shake&Tune!' + ) + continue + + # Add the parsed klipper raw accelerometer data to Shake&Tune measurements object + samples = [tuple(row) for row in data] + self.add_measurement(name=logname.stem, samples=samples) + except Exception as err: + ConsoleOutput.print(f'Error while reading {logname}: {err}. It will be ignored by Shake&Tune!') + continue + + return self.measurements + + def wait_for_file_writes(self, k_reactor, timeout: int = 20): + if self._write_process is None: + return # No file write is pending + + eventtime = k_reactor.monotonic() + endtime = eventtime + timeout + complete = False + + while eventtime < endtime: + eventtime = k_reactor.pause(eventtime + 0.05) + if not self._write_process.is_alive(): + complete = True + break + + if not complete: + raise TimeoutError( + 'Shake&Tune was unable to write the accelerometer data into the archive file. ' + 'This might be due to a slow SD card or a busy or full filesystem.' + ) + + self._write_process = None + + +class Accelerometer: + def __init__(self, klipper_accelerometer): + self._k_accelerometer = klipper_accelerometer + self._bg_client = None + self._measurements_manager: MeasurementsManager = None + + @staticmethod + def find_axis_accelerometer(printer, axis: str = 'xy'): + accel_chip_names = printer.lookup_object('resonance_tester').accel_chip_names + for chip_axis, chip_name in accel_chip_names: + if axis in {'x', 'y'} and chip_axis == 'xy': + return chip_name + elif chip_axis == axis: + return chip_name + return None + + def start_recording(self, measurements_manager: MeasurementsManager, name: str = None, append_time: bool = True): + if self._bg_client is None: + self._bg_client = self._k_accelerometer.start_internal_client() + + timestamp = time.strftime('%Y%m%d_%H%M%S') + if name is None: + name = timestamp + elif append_time: + name += f'_{timestamp}' + + if not name.replace('-', '').replace('_', '').isalnum(): + raise ValueError('invalid measurement name!') + + self._measurements_manager = measurements_manager + self._measurements_manager.add_measurement(name=name) + else: + raise ValueError('Recording already started!') + + def stop_recording(self) -> MeasurementsManager: + if self._bg_client is None: + ConsoleOutput.print('Warning: no recording to stop!') + return None + + bg_client = self._bg_client + self._bg_client = None + bg_client.finish_measurements() + + samples = bg_client.samples or bg_client.get_samples() + self._measurements_manager.append_samples_to_last_measurement(samples) + m_manager = self._measurements_manager + self._measurements_manager = None + + return m_manager diff --git a/shaketune/helpers/common_func.py b/shaketune/helpers/common_func.py index 49df45a..3e69a45 100644 --- a/shaketune/helpers/common_func.py +++ b/shaketune/helpers/common_func.py @@ -17,8 +17,6 @@ import numpy as np from scipy.signal import spectrogram -from .console_output import ConsoleOutput - # Constant used to define the standard axis direction and names AXIS_CONFIG = [ {'axis': 'x', 'direction': (1, 0, 0), 'label': 'axis_X'}, @@ -30,50 +28,6 @@ ] -def parse_log(logname): - try: - with open(logname) as f: - header = None - for line in f: - cleaned_line = line.strip() - - # Check for a PSD file generated by Klipper and raise a warning - if cleaned_line.startswith('#freq,psd_x,psd_y,psd_z,psd_xyz'): - ConsoleOutput.print( - f'Warning: {logname} does not contain raw accelerometer data. ' - 'Please use the official Klipper script to process it instead. ' - 'It will be ignored by Shake&Tune!' - ) - return None - - # Check for the expected header for Shake&Tune (raw accelerometer data from Klipper) - elif cleaned_line.startswith('#time,accel_x,accel_y,accel_z'): - header = cleaned_line - break - - if not header: - ConsoleOutput.print( - f'Warning: file {logname} has an incorrect header and will be ignored by Shake&Tune!\n' - f"Expected '#time,accel_x,accel_y,accel_z', but got '{header.strip()}'." - ) - return None - - # If we have the correct raw data header, proceed to load the data - data = np.loadtxt(logname, comments='#', delimiter=',', skiprows=1) - if data.ndim == 1 or data.shape[1] != 4: - ConsoleOutput.print( - f'Warning: {logname} does not have the correct data format; expected 4 columns. ' - 'It will be ignored by Shake&Tune!' - ) - return None - - return data - - except Exception as err: - ConsoleOutput.print(f'Error while reading {logname}: {err}. It will be ignored by Shake&Tune!') - return None - - def setup_klipper_import(kdir): kdir = os.path.expanduser(kdir) sys.path.append(os.path.join(kdir, 'klippy')) diff --git a/shaketune/shaketune.py b/shaketune/shaketune.py index 3ac88a2..a8e60a9 100644 --- a/shaketune/shaketune.py +++ b/shaketune/shaketune.py @@ -32,7 +32,7 @@ DEFAULT_FOLDER = '~/printer_data/config/ShakeTune_results' DEFAULT_NUMBER_OF_RESULTS = 3 -DEFAULT_KEEP_RAW_CSV = False +DEFAULT_KEEP_RAW_DATA = False DEFAULT_DPI = 150 DEFAULT_TIMEOUT = 600 DEFAULT_SHOW_MACROS = True @@ -78,9 +78,9 @@ def _initialize_config(self, config) -> None: result_folder = config.get('result_folder', default=DEFAULT_FOLDER) result_folder_path = Path(result_folder).expanduser() if result_folder else None keep_n_results = config.getint('number_of_results_to_keep', default=DEFAULT_NUMBER_OF_RESULTS, minval=0) - keep_csv = config.getboolean('keep_raw_csv', default=DEFAULT_KEEP_RAW_CSV) + keep_raw_data = config.getboolean('keep_raw_data', default=DEFAULT_KEEP_RAW_DATA) dpi = config.getint('dpi', default=DEFAULT_DPI, minval=100, maxval=500) - self._st_config = ShakeTuneConfig(result_folder_path, keep_n_results, keep_csv, dpi) + self._st_config = ShakeTuneConfig(result_folder_path, keep_n_results, keep_raw_data, dpi) self.timeout = config.getfloat('timeout', DEFAULT_TIMEOUT, above=0.0) self._show_macros = config.getboolean('show_macros_in_webui', default=DEFAULT_SHOW_MACROS) diff --git a/shaketune/shaketune_config.py b/shaketune/shaketune_config.py index 57b19a8..d10cfd1 100644 --- a/shaketune/shaketune_config.py +++ b/shaketune/shaketune_config.py @@ -26,12 +26,16 @@ class ShakeTuneConfig: def __init__( - self, result_folder: Path = RESULTS_BASE_FOLDER, keep_n_results: int = 3, keep_csv: bool = False, dpi: int = 150 + self, + result_folder: Path = RESULTS_BASE_FOLDER, + keep_n_results: int = 3, + keep_raw_data: bool = False, + dpi: int = 150, ) -> None: self._result_folder = result_folder self.keep_n_results = keep_n_results - self.keep_csv = keep_csv + self.keep_raw_data = keep_raw_data self.dpi = dpi self.klipper_folder = KLIPPER_FOLDER diff --git a/shaketune/shaketune_process.py b/shaketune/shaketune_process.py index 45012e8..2fd4aa1 100644 --- a/shaketune/shaketune_process.py +++ b/shaketune/shaketune_process.py @@ -14,6 +14,7 @@ from multiprocessing import Process from typing import Optional +from .helpers.accelerometer import MeasurementsManager from .helpers.console_output import ConsoleOutput from .shaketune_config import ShakeTuneConfig @@ -29,9 +30,12 @@ def __init__(self, st_config: ShakeTuneConfig, reactor, graph_creator, timeout: def get_graph_creator(self): return self.graph_creator - def run(self) -> None: + def run(self, measurements_manager: MeasurementsManager) -> None: # Start the target function in a new process (a thread is known to cause issues with Klipper and CANbus due to the GIL) - self._process = Process(target=self._shaketune_process_wrapper, args=(self.graph_creator, self._timeout)) + self._process = Process( + target=self._shaketune_process_wrapper, + args=(self.graph_creator, measurements_manager, self._timeout), + ) self._process.start() def wait_for_completion(self) -> None: @@ -50,7 +54,7 @@ def wait_for_completion(self) -> None: # This function is a simple wrapper to start the Shake&Tune process. It's needed in order to get the timeout # as a Timer in a thread INSIDE the Shake&Tune child process to not interfere with the main Klipper process - def _shaketune_process_wrapper(self, graph_creator, timeout) -> None: + def _shaketune_process_wrapper(self, graph_creator, measurements_manager: MeasurementsManager, timeout) -> None: if timeout is not None: # Add 5 seconds to the timeout for safety. The goal is to avoid the Timer to finish before the # Shake&Tune process is done in case we call the wait_for_completion() function that uses Klipper's reactor. @@ -58,7 +62,7 @@ def _shaketune_process_wrapper(self, graph_creator, timeout) -> None: timer = threading.Timer(timeout, self._handle_timeout) timer.start() try: - self._shaketune_process(graph_creator) + self._shaketune_process(graph_creator, measurements_manager) finally: if timeout is not None: timer.cancel() @@ -67,7 +71,7 @@ def _handle_timeout(self) -> None: ConsoleOutput.print('Timeout: Shake&Tune computation did not finish within the specified timeout!') os._exit(1) # Forcefully exit the process - def _shaketune_process(self, graph_creator) -> None: + def _shaketune_process(self, graph_creator, m_manager: MeasurementsManager) -> None: # Reducing Shake&Tune process priority by putting the scheduler into batch mode with low priority. This in order to avoid # slowing down the main Klipper process as this can lead to random "Timer too close" or "Move queue overflow" errors # when also already running CANbus, neopixels and other consumming stuff in Klipper's main process. @@ -81,9 +85,13 @@ def _shaketune_process(self, graph_creator) -> None: for folder in self._config.get_results_subfolders(): folder.mkdir(parents=True, exist_ok=True) + if m_manager.get_measurements() is None or len(m_manager.get_measurements()) == 0: + ConsoleOutput.print('Error: no measurements available to create the graphs!') + return + # Generate the graphs try: - graph_creator.create_graph() + graph_creator.create_graph(m_manager) except FileNotFoundError as e: ConsoleOutput.print(f'FileNotFound error: {e}') return