Run ShakeTune as an in-process Klipper module (#100)
* feat: Run ShakeTune as an in-process Klipper module * feat: install shaketune dependencies to klipper venv * refactor: replace print_with_c_locale with klipper console output with stdout fallback
This commit is contained in:
475
shaketune/__init__.py
Normal file
475
shaketune/__init__.py
Normal file
@@ -0,0 +1,475 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
############################################
|
||||
###### INPUT SHAPER KLIPPAIN WORKFLOW ######
|
||||
############################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
# This script is designed to be run from inside Klipper Console
|
||||
# Use the provided Shake&Tune macros instead!
|
||||
|
||||
|
||||
import abc
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import threading
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from matplotlib.figure import Figure
|
||||
|
||||
from .graph_creators.analyze_axesmap import axesmap_calibration
|
||||
from .graph_creators.graph_belts import belts_calibration
|
||||
from .graph_creators.graph_shaper import shaper_calibration
|
||||
from .graph_creators.graph_vibrations import vibrations_profile
|
||||
from .helpers import filemanager as fm
|
||||
from .helpers.motorlogparser import MotorLogParser
|
||||
from .helpers.console_output import ConsoleOutput
|
||||
|
||||
|
||||
class Config:
|
||||
KLIPPER_FOLDER = Path.home() / 'klipper'
|
||||
KLIPPER_LOG_FOLDER = Path.home() / 'printer_data/logs'
|
||||
RESULTS_BASE_FOLDER = Path.home() / 'printer_data/config/K-ShakeTune_results'
|
||||
RESULTS_SUBFOLDERS = {'belts': 'belts', 'shaper': 'inputshaper', 'vibrations': 'vibrations'}
|
||||
|
||||
@staticmethod
|
||||
def get_results_folder(type: str) -> Path:
|
||||
return Config.RESULTS_BASE_FOLDER / Config.RESULTS_SUBFOLDERS[type]
|
||||
|
||||
@staticmethod
|
||||
def get_git_version() -> str:
|
||||
try:
|
||||
from git import GitCommandError, Repo
|
||||
|
||||
# Get the absolute path of the script, resolving any symlinks
|
||||
# Then get 1 times to parent dir to be at the git root folder
|
||||
script_path = Path(__file__).resolve()
|
||||
repo_path = script_path.parents[1]
|
||||
repo = Repo(repo_path)
|
||||
try:
|
||||
version = repo.git.describe('--tags')
|
||||
except GitCommandError:
|
||||
version = repo.head.commit.hexsha[:7] # If no tag is found, use the simplified commit SHA instead
|
||||
return version
|
||||
except Exception as e:
|
||||
ConsoleOutput.print(f'Warning: unable to retrieve Shake&Tune version number: {e}')
|
||||
return 'unknown'
|
||||
|
||||
@staticmethod
|
||||
def parse_arguments(params: Optional[List] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description='Shake&Tune graphs generation script')
|
||||
parser.add_argument(
|
||||
'-t',
|
||||
'--type',
|
||||
dest='type',
|
||||
choices=['belts', 'shaper', 'vibrations', 'axesmap'],
|
||||
required=True,
|
||||
help='Type of output graph to produce',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--accel',
|
||||
type=int,
|
||||
default=None,
|
||||
dest='accel_used',
|
||||
help='Accelerometion used for vibrations profile creation or axes map calibration',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--chip_name',
|
||||
type=str,
|
||||
default='adxl345',
|
||||
dest='chip_name',
|
||||
help='Accelerometer chip name used for vibrations profile creation or axes map calibration',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--max_smoothing',
|
||||
type=float,
|
||||
default=None,
|
||||
dest='max_smoothing',
|
||||
help='Maximum smoothing to allow for input shaper filter recommendations',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--scv',
|
||||
'--square_corner_velocity',
|
||||
type=float,
|
||||
default=5.0,
|
||||
dest='scv',
|
||||
help='Square corner velocity used to compute max accel for input shapers filter recommendations',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-m',
|
||||
'--kinematics',
|
||||
dest='kinematics',
|
||||
default='cartesian',
|
||||
choices=['cartesian', 'corexy'],
|
||||
help='Machine kinematics configuration used for the vibrations profile creation',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--metadata',
|
||||
type=str,
|
||||
default=None,
|
||||
dest='metadata',
|
||||
help='Motor configuration metadata printed on the vibrations profiles',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-c',
|
||||
'--keep_csv',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='keep_csv',
|
||||
help='Whether to keep the raw CSV files after processing in addition to the PNG graphs',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-n',
|
||||
'--keep_results',
|
||||
type=int,
|
||||
default=3,
|
||||
dest='keep_results',
|
||||
help='Number of results to keep in the result folder after each run of the script',
|
||||
)
|
||||
parser.add_argument('--dpi', type=int, default=150, dest='dpi', help='DPI of the output PNG files')
|
||||
parser.add_argument('-v', '--version', action='version', version=f'Shake&Tune {Config.get_git_version()}')
|
||||
|
||||
return parser.parse_args(params)
|
||||
|
||||
|
||||
class GraphCreator(abc.ABC):
|
||||
def __init__(self, keep_csv: bool, dpi: int):
|
||||
self._keep_csv = keep_csv
|
||||
self._dpi = dpi
|
||||
|
||||
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
self._version = Config.get_git_version()
|
||||
|
||||
self._type = None
|
||||
self._folder = None
|
||||
|
||||
def _setup_folder(self, graph_type: str) -> None:
|
||||
self._type = graph_type
|
||||
self._folder = Config.get_results_folder(graph_type)
|
||||
|
||||
def _move_and_prepare_files(
|
||||
self,
|
||||
glob_pattern: str,
|
||||
min_files_required: Optional[int] = None,
|
||||
custom_name_func: Optional[Callable[[Path], str]] = None,
|
||||
) -> list[Path]:
|
||||
tmp_path = Path('/tmp')
|
||||
globbed_files = list(tmp_path.glob(glob_pattern))
|
||||
|
||||
# If min_files_required is not set, use the number of globbed files as the minimum
|
||||
min_files_required = min_files_required or len(globbed_files)
|
||||
|
||||
if not globbed_files:
|
||||
raise FileNotFoundError(f'no CSV files found in the /tmp folder to create the {self._type} graphs!')
|
||||
if len(globbed_files) < min_files_required:
|
||||
raise FileNotFoundError(f'{min_files_required} CSV files are needed to create the {self._type} graphs!')
|
||||
|
||||
lognames = []
|
||||
for filename in sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[:min_files_required]:
|
||||
fm.wait_file_ready(filename)
|
||||
custom_name = custom_name_func(filename) if custom_name_func else filename.name
|
||||
new_file = self._folder / f'{self._type}_{self._graph_date}_{custom_name}.csv'
|
||||
# shutil.move() is needed to move the file across filesystems (mainly for BTT CB1 Pi default OS image)
|
||||
shutil.move(filename, new_file)
|
||||
fm.wait_file_ready(new_file)
|
||||
lognames.append(new_file)
|
||||
return lognames
|
||||
|
||||
def _save_figure_and_cleanup(self, fig: Figure, lognames: list[Path], axis_label: Optional[str] = None) -> None:
|
||||
axis_suffix = f'_{axis_label}' if axis_label else ''
|
||||
png_filename = self._folder / f'{self._type}_{self._graph_date}{axis_suffix}.png'
|
||||
fig.savefig(png_filename, dpi=self._dpi)
|
||||
|
||||
if self._keep_csv:
|
||||
self._archive_files(lognames)
|
||||
else:
|
||||
self._remove_files(lognames)
|
||||
|
||||
def _archive_files(self, _: list[Path]) -> None:
|
||||
return
|
||||
|
||||
def _remove_files(self, lognames: list[Path]) -> None:
|
||||
for csv in lognames:
|
||||
csv.unlink(missing_ok=True)
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_graph(self) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def clean_old_files(self, keep_results: int) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class BeltsGraphCreator(GraphCreator):
|
||||
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||
super().__init__(keep_csv, dpi)
|
||||
|
||||
self._setup_folder('belts')
|
||||
|
||||
def create_graph(self) -> None:
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='raw_data_axis*.csv',
|
||||
min_files_required=2,
|
||||
custom_name_func=lambda f: f.stem.split('_')[3].upper(),
|
||||
)
|
||||
fig = belts_calibration(
|
||||
lognames=[str(path) for path in lognames],
|
||||
klipperdir=str(Config.KLIPPER_FOLDER),
|
||||
st_version=self._version,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames)
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
# Get all PNG files in the directory as a list of Path objects
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
|
||||
if len(files) <= keep_results:
|
||||
return # No need to delete any files
|
||||
|
||||
# Delete the older files
|
||||
for old_file in files[keep_results:]:
|
||||
file_date = '_'.join(old_file.stem.split('_')[1:3])
|
||||
for suffix in ['A', 'B']:
|
||||
csv_file = self._folder / f'belts_{file_date}_{suffix}.csv'
|
||||
csv_file.unlink(missing_ok=True)
|
||||
old_file.unlink()
|
||||
|
||||
|
||||
class ShaperGraphCreator(GraphCreator):
|
||||
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||
super().__init__(keep_csv, dpi)
|
||||
|
||||
self._max_smoothing = None
|
||||
self._scv = None
|
||||
|
||||
self._setup_folder('shaper')
|
||||
|
||||
def configure(self, scv: float, max_smoothing: float = None) -> None:
|
||||
self._scv = scv
|
||||
self._max_smoothing = max_smoothing
|
||||
|
||||
def create_graph(self) -> None:
|
||||
if not self._scv:
|
||||
raise ValueError('scv must be set to create the input shaper graph!')
|
||||
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='raw_data*.csv',
|
||||
min_files_required=1,
|
||||
custom_name_func=lambda f: f.stem.split('_')[3].upper(),
|
||||
)
|
||||
fig = shaper_calibration(
|
||||
lognames=[str(path) for path in lognames],
|
||||
klipperdir=str(Config.KLIPPER_FOLDER),
|
||||
max_smoothing=self._max_smoothing,
|
||||
scv=self._scv,
|
||||
st_version=self._version,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames, lognames[0].stem.split('_')[-1])
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
# Get all PNG files in the directory as a list of Path objects
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
|
||||
if len(files) <= 2 * keep_results:
|
||||
return # No need to delete any files
|
||||
|
||||
# Delete the older files
|
||||
for old_file in files[2 * keep_results :]:
|
||||
csv_file = old_file.with_suffix('.csv')
|
||||
csv_file.unlink(missing_ok=True)
|
||||
old_file.unlink()
|
||||
|
||||
|
||||
class VibrationsGraphCreator(GraphCreator):
|
||||
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||
super().__init__(keep_csv, dpi)
|
||||
|
||||
self._kinematics = None
|
||||
self._accel = None
|
||||
self._chip_name = None
|
||||
self._motors = None
|
||||
|
||||
self._setup_folder('vibrations')
|
||||
|
||||
def configure(self, kinematics: str, accel: float, chip_name: str, metadata: str) -> None:
|
||||
self._kinematics = kinematics
|
||||
self._accel = accel
|
||||
self._chip_name = chip_name
|
||||
|
||||
parser = MotorLogParser(Config.KLIPPER_LOG_FOLDER / 'klippy.log', metadata)
|
||||
self._motors = parser.get_motors()
|
||||
|
||||
def _archive_files(self, lognames: list[Path]) -> None:
|
||||
tar_path = self._folder / f'{self._type}_{self._graph_date}.tar.gz'
|
||||
with tarfile.open(tar_path, 'w:gz') as tar:
|
||||
for csv_file in lognames:
|
||||
tar.add(csv_file, arcname=csv_file.name, recursive=False)
|
||||
|
||||
def create_graph(self) -> None:
|
||||
if not self._accel or not self._chip_name or not self._kinematics:
|
||||
raise ValueError('accel, chip_name and kinematics must be set to create the vibrations profile graph!')
|
||||
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern=f'{self._chip_name}-*.csv',
|
||||
min_files_required=None,
|
||||
custom_name_func=lambda f: f.name.replace(self._chip_name, self._type),
|
||||
)
|
||||
fig = vibrations_profile(
|
||||
lognames=[str(path) for path in lognames],
|
||||
klipperdir=str(Config.KLIPPER_FOLDER),
|
||||
kinematics=self._kinematics,
|
||||
accel=self._accel,
|
||||
st_version=self._version,
|
||||
motors=self._motors,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames)
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
# Get all PNG files in the directory as a list of Path objects
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
|
||||
if len(files) <= keep_results:
|
||||
return # No need to delete any files
|
||||
|
||||
# Delete the older files
|
||||
for old_file in files[keep_results:]:
|
||||
old_file.unlink()
|
||||
tar_file = old_file.with_suffix('.tar.gz')
|
||||
tar_file.unlink(missing_ok=True)
|
||||
|
||||
|
||||
class AxesMapFinder(GraphCreator):
|
||||
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||
super().__init__(keep_csv, dpi)
|
||||
|
||||
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
self._type = 'axesmap'
|
||||
self._folder = Config.RESULTS_BASE_FOLDER
|
||||
|
||||
self._accel = None
|
||||
self._chip_name = None
|
||||
|
||||
def configure(self, accel: int, chip_name: str) -> None:
|
||||
self._accel = accel
|
||||
self._chip_name = chip_name
|
||||
|
||||
def find_axesmap(self) -> None:
|
||||
tmp_folder = Path('/tmp')
|
||||
globbed_files = list(tmp_folder.glob(f'{self._chip_name}-*.csv'))
|
||||
|
||||
if not globbed_files:
|
||||
raise FileNotFoundError('no CSV files found in the /tmp folder to find the axes map!')
|
||||
|
||||
# Find the CSV files with the latest timestamp and wait for it to be released by Klipper
|
||||
logname = sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[0]
|
||||
fm.wait_file_ready(logname)
|
||||
|
||||
results = axesmap_calibration(
|
||||
lognames=[str(logname)],
|
||||
accel=self._accel,
|
||||
)
|
||||
|
||||
result_filename = self._folder / f'{self._type}_{self._graph_date}.txt'
|
||||
with result_filename.open('w') as f:
|
||||
f.write(results)
|
||||
|
||||
ConsoleOutput.print(f'Detected axes_map: {results}')
|
||||
|
||||
def create_graph(self) -> None:
|
||||
self.find_axesmap()
|
||||
|
||||
def clean_old_files(self, keep_results: int) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def create_graph(options: argparse.Namespace) -> None:
|
||||
fm.ensure_folders_exist(
|
||||
folders=[Config.RESULTS_BASE_FOLDER / subfolder for subfolder in Config.RESULTS_SUBFOLDERS.values()]
|
||||
)
|
||||
|
||||
ConsoleOutput.print(f'Shake&Tune version: {Config.get_git_version()}')
|
||||
|
||||
graph_creators = {
|
||||
'belts': (BeltsGraphCreator, None),
|
||||
'shaper': (ShaperGraphCreator, lambda gc: gc.configure(options.scv, options.max_smoothing)),
|
||||
'vibrations': (
|
||||
VibrationsGraphCreator,
|
||||
lambda gc: gc.configure(options.kinematics, options.accel_used, options.chip_name, options.metadata),
|
||||
),
|
||||
'axesmap': (AxesMapFinder, lambda gc: gc.configure(options.accel_used, options.chip_name)),
|
||||
}
|
||||
|
||||
creator_info = graph_creators.get(options.type)
|
||||
if not creator_info:
|
||||
ConsoleOutput.print('Error: invalid graph type specified!')
|
||||
return
|
||||
|
||||
# Instantiate the graph creator
|
||||
graph_creator_class, configure_func = creator_info
|
||||
graph_creator = graph_creator_class(options.keep_csv, options.dpi)
|
||||
|
||||
# Configure it if needed
|
||||
if configure_func:
|
||||
configure_func(graph_creator)
|
||||
|
||||
# And then run it
|
||||
try:
|
||||
graph_creator.create_graph()
|
||||
except FileNotFoundError as e:
|
||||
ConsoleOutput.print(f'FileNotFound error: {e}')
|
||||
return
|
||||
except TimeoutError as e:
|
||||
ConsoleOutput.print(f'Timeout error: {e}')
|
||||
return
|
||||
except Exception as e:
|
||||
ConsoleOutput.print(f'Error while generating the graphs: {e}\n{traceback.print_exc()}')
|
||||
return
|
||||
|
||||
ConsoleOutput.print(f'{options.type} graphs created successfully!')
|
||||
graph_creator.clean_old_files(options.keep_results)
|
||||
ConsoleOutput.print(f'Cleaned output folder to keep only the last {options.keep_results} results!')
|
||||
|
||||
|
||||
class ShakeTune:
|
||||
def __init__(self, config) -> None:
|
||||
self._printer = config.get_printer()
|
||||
self._gcode = self._printer.lookup_object('gcode')
|
||||
self.timeout = config.getfloat('timeout', 2.0, above=0.0)
|
||||
|
||||
ConsoleOutput.register_output_callback(self._gcode.respond_info)
|
||||
|
||||
self._gcode.register_command(
|
||||
'SHAKETUNE_POSTPROCESS',
|
||||
self.cmd_SHAKETUNE_POSTPROCESS,
|
||||
desc='Post process data for ShakeTune graph creation',
|
||||
)
|
||||
|
||||
def shaketune_thread(self, options):
|
||||
try:
|
||||
os.nice(20)
|
||||
except Exception:
|
||||
ConsoleOutput.print('Failed reducing ShakeTune thread priority, continuing.')
|
||||
create_graph(options)
|
||||
|
||||
def cmd_SHAKETUNE_POSTPROCESS(self, gcmd) -> None:
|
||||
options = Config.parse_arguments(gcmd.get('PARAMS').split())
|
||||
t = threading.Thread(target=self.shaketune_thread, args=(options,))
|
||||
t.start()
|
||||
|
||||
reactor = self._printer.get_reactor()
|
||||
event_time = reactor.monotonic()
|
||||
end_time = event_time + self.timeout
|
||||
while event_time < end_time:
|
||||
event_time = reactor.pause(event_time + 0.05)
|
||||
if not t.is_alive():
|
||||
break
|
||||
|
||||
|
||||
def load_config(config) -> ShakeTune:
|
||||
return ShakeTune(config)
|
||||
10
shaketune/__main__.py
Normal file
10
shaketune/__main__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from . import Config, create_graph
|
||||
|
||||
|
||||
def main() -> None:
|
||||
options = Config.parse_arguments()
|
||||
create_graph(options)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
0
shaketune/graph_creators/__init__.py
Normal file
0
shaketune/graph_creators/__init__.py
Normal file
154
shaketune/graph_creators/analyze_axesmap.py
Normal file
154
shaketune/graph_creators/analyze_axesmap.py
Normal file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
######################################
|
||||
###### AXE_MAP DETECTION SCRIPT ######
|
||||
######################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import optparse
|
||||
|
||||
import numpy as np
|
||||
from scipy.signal import butter, filtfilt
|
||||
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
|
||||
NUM_POINTS = 500
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation
|
||||
######################################################################
|
||||
|
||||
|
||||
def accel_signal_filter(data, cutoff=2, fs=100, order=5):
|
||||
nyq = 0.5 * fs
|
||||
normal_cutoff = cutoff / nyq
|
||||
b, a = butter(order, normal_cutoff, btype='low', analog=False)
|
||||
filtered_data = filtfilt(b, a, data)
|
||||
filtered_data -= np.mean(filtered_data)
|
||||
return filtered_data
|
||||
|
||||
|
||||
def find_first_spike(data):
|
||||
min_index, max_index = np.argmin(data), np.argmax(data)
|
||||
return ('-', min_index) if min_index < max_index else ('', max_index)
|
||||
|
||||
|
||||
def get_movement_vector(data, start_idx, end_idx):
|
||||
if start_idx < end_idx:
|
||||
vector = []
|
||||
for i in range(3):
|
||||
vector.append(np.mean(data[i][start_idx:end_idx], axis=0))
|
||||
return vector
|
||||
else:
|
||||
return np.zeros(3)
|
||||
|
||||
|
||||
def angle_between(v1, v2):
|
||||
v1_u = v1 / np.linalg.norm(v1)
|
||||
v2_u = v2 / np.linalg.norm(v2)
|
||||
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
|
||||
|
||||
|
||||
def compute_errors(filtered_data, spikes_sorted, accel_value, num_points):
|
||||
# Get the movement start points in the correct order from the sorted bag of spikes
|
||||
movement_starts = [spike[0][1] for spike in spikes_sorted]
|
||||
|
||||
# Theoretical unit vectors for X, Y, Z printer axes
|
||||
printer_axes = {'x': np.array([1, 0, 0]), 'y': np.array([0, 1, 0]), 'z': np.array([0, 0, 1])}
|
||||
|
||||
alignment_errors = {}
|
||||
sensitivity_errors = {}
|
||||
for i, axis in enumerate(['x', 'y', 'z']):
|
||||
movement_start = movement_starts[i]
|
||||
movement_end = movement_start + num_points
|
||||
movement_vector = get_movement_vector(filtered_data, movement_start, movement_end)
|
||||
alignment_errors[axis] = angle_between(movement_vector, printer_axes[axis])
|
||||
|
||||
measured_accel_magnitude = np.linalg.norm(movement_vector)
|
||||
if accel_value != 0:
|
||||
sensitivity_errors[axis] = abs(measured_accel_magnitude - accel_value) / accel_value * 100
|
||||
else:
|
||||
sensitivity_errors[axis] = None
|
||||
|
||||
return alignment_errors, sensitivity_errors
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def parse_log(logname):
|
||||
with open(logname) as f:
|
||||
for header in f:
|
||||
if not header.startswith('#'):
|
||||
break
|
||||
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
||||
# Raw accelerometer data
|
||||
return np.loadtxt(logname, comments='#', delimiter=',')
|
||||
# Power spectral density data or shaper calibration data
|
||||
raise ValueError(
|
||||
'File %s does not contain raw accelerometer data and therefore '
|
||||
'is not supported by this script. Please use the official Klipper '
|
||||
'calibrate_shaper.py script to process it instead.' % (logname,)
|
||||
)
|
||||
|
||||
|
||||
def axesmap_calibration(lognames, accel=None):
|
||||
# Parse the raw data and get them ready for analysis
|
||||
raw_datas = [parse_log(filename) for filename in lognames]
|
||||
if len(raw_datas) > 1:
|
||||
raise ValueError('Analysis of multiple CSV files at once is not possible with this script')
|
||||
|
||||
filtered_data = [accel_signal_filter(raw_datas[0][:, i + 1]) for i in range(3)]
|
||||
spikes = [find_first_spike(filtered_data[i]) for i in range(3)]
|
||||
spikes_sorted = sorted([(spikes[0], 'x'), (spikes[1], 'y'), (spikes[2], 'z')], key=lambda x: x[0][1])
|
||||
|
||||
# Using the previous variables to get the axes_map and errors
|
||||
axes_map = ','.join([f'{spike[0][0]}{spike[1]}' for spike in spikes_sorted])
|
||||
# alignment_error, sensitivity_error = compute_errors(filtered_data, spikes_sorted, accel, NUM_POINTS)
|
||||
|
||||
results = f'Detected axes_map:\n {axes_map}\n'
|
||||
|
||||
# TODO: work on this function that is currently not giving good results...
|
||||
# results += "Accelerometer angle deviation:\n"
|
||||
# for axis, angle in alignment_error.items():
|
||||
# angle_degrees = np.degrees(angle) # Convert radians to degrees
|
||||
# results += f" {axis.upper()} axis: {angle_degrees:.2f} degrees\n"
|
||||
|
||||
# results += "Accelerometer sensitivity error:\n"
|
||||
# for axis, error in sensitivity_error.items():
|
||||
# results += f" {axis.upper()} axis: {error:.2f}%\n"
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <raw logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option(
|
||||
'-a', '--accel', type='string', dest='accel', default=None, help='acceleration value used to do the movements'
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('No CSV file(s) to analyse')
|
||||
if options.accel is None:
|
||||
opts.error('You must specify the acceleration value used when generating the CSV file (option -a)')
|
||||
try:
|
||||
accel_value = float(options.accel)
|
||||
except ValueError:
|
||||
opts.error('Invalid acceleration value. It should be a numeric value.')
|
||||
|
||||
results = axesmap_calibration(args, accel_value)
|
||||
ConsoleOutput.print(results)
|
||||
|
||||
if options.output is not None:
|
||||
with open(options.output, 'w') as f:
|
||||
f.write(results)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
555
shaketune/graph_creators/graph_belts.py
Normal file
555
shaketune/graph_creators/graph_belts.py
Normal file
@@ -0,0 +1,555 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
#################################################
|
||||
######## CoreXY BELTS CALIBRATION SCRIPT ########
|
||||
#################################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import optparse
|
||||
import os
|
||||
from collections import namedtuple
|
||||
from datetime import datetime
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.colors
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import (
|
||||
compute_curve_similarity_factor,
|
||||
compute_spectrogram,
|
||||
detect_peaks,
|
||||
parse_log,
|
||||
setup_klipper_import,
|
||||
)
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
|
||||
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # For paired peaks names
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.20
|
||||
CURVE_SIMILARITY_SIGMOID_K = 0.6
|
||||
DC_GRAIN_OF_SALT_FACTOR = 0.75
|
||||
DC_THRESHOLD_METRIC = 1.5e9
|
||||
DC_MAX_UNPAIRED_PEAKS_ALLOWED = 4
|
||||
|
||||
# Define the SignalData namedtuple
|
||||
SignalData = namedtuple('CalibrationData', ['freqs', 'psd', 'peaks', 'paired_peaks', 'unpaired_peaks'])
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation of the PSD graph
|
||||
######################################################################
|
||||
|
||||
|
||||
# This function create pairs of peaks that are close in frequency on two curves (that are known
|
||||
# to be resonances points and must be similar on both belts on a CoreXY kinematic)
|
||||
def pair_peaks(peaks1, freqs1, psd1, peaks2, freqs2, psd2):
|
||||
# Compute a dynamic detection threshold to filter and pair peaks efficiently
|
||||
# even if the signal is very noisy (this get clipped to a maximum of 10Hz diff)
|
||||
distances = []
|
||||
for p1 in peaks1:
|
||||
for p2 in peaks2:
|
||||
distances.append(abs(freqs1[p1] - freqs2[p2]))
|
||||
distances = np.array(distances)
|
||||
|
||||
median_distance = np.median(distances)
|
||||
iqr = np.percentile(distances, 75) - np.percentile(distances, 25)
|
||||
|
||||
threshold = median_distance + 1.5 * iqr
|
||||
threshold = min(threshold, 10)
|
||||
|
||||
# Pair the peaks using the dynamic thresold
|
||||
paired_peaks = []
|
||||
unpaired_peaks1 = list(peaks1)
|
||||
unpaired_peaks2 = list(peaks2)
|
||||
|
||||
while unpaired_peaks1 and unpaired_peaks2:
|
||||
min_distance = threshold + 1
|
||||
pair = None
|
||||
|
||||
for p1 in unpaired_peaks1:
|
||||
for p2 in unpaired_peaks2:
|
||||
distance = abs(freqs1[p1] - freqs2[p2])
|
||||
if distance < min_distance:
|
||||
min_distance = distance
|
||||
pair = (p1, p2)
|
||||
|
||||
if pair is None: # No more pairs below the threshold
|
||||
break
|
||||
|
||||
p1, p2 = pair
|
||||
paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2])))
|
||||
unpaired_peaks1.remove(p1)
|
||||
unpaired_peaks2.remove(p2)
|
||||
|
||||
return paired_peaks, unpaired_peaks1, unpaired_peaks2
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation of the differential spectrogram
|
||||
######################################################################
|
||||
|
||||
|
||||
# Interpolate source_data (2D) to match target_x and target_y in order to
|
||||
# get similar time and frequency dimensions for the differential spectrogram
|
||||
def interpolate_2d(target_x, target_y, source_x, source_y, source_data):
|
||||
# Create a grid of points in the source and target space
|
||||
source_points = np.array([(x, y) for y in source_y for x in source_x])
|
||||
target_points = np.array([(x, y) for y in target_y for x in target_x])
|
||||
|
||||
# Flatten the source data to match the flattened source points
|
||||
source_values = source_data.flatten()
|
||||
|
||||
# Interpolate and reshape the interpolated data to match the target grid shape and replace NaN with zeros
|
||||
interpolated_data = griddata(source_points, source_values, target_points, method='nearest')
|
||||
interpolated_data = interpolated_data.reshape((len(target_y), len(target_x)))
|
||||
interpolated_data = np.nan_to_num(interpolated_data)
|
||||
|
||||
return interpolated_data
|
||||
|
||||
|
||||
# Main logic function to combine two similar spectrogram - ie. from both belts paths - by substracting signals in order to create
|
||||
# a new composite spectrogram. This result of a divergent but mostly centered new spectrogram (center will be white) with some colored zones
|
||||
# highlighting differences in the belts paths. The summative spectrogram is used for the MHI calculation.
|
||||
def compute_combined_spectrogram(data1, data2):
|
||||
pdata1, bins1, t1 = compute_spectrogram(data1)
|
||||
pdata2, bins2, t2 = compute_spectrogram(data2)
|
||||
|
||||
# Interpolate the spectrograms
|
||||
pdata2_interpolated = interpolate_2d(bins1, t1, bins2, t2, pdata2)
|
||||
|
||||
# Combine them in two form: a summed diff for the MHI computation and a diverging diff for the spectrogram colors
|
||||
combined_sum = np.abs(pdata1 - pdata2_interpolated)
|
||||
combined_divergent = pdata1 - pdata2_interpolated
|
||||
|
||||
return combined_sum, combined_divergent, bins1, t1
|
||||
|
||||
|
||||
# Compute a composite and highly subjective value indicating the "mechanical health of the printer (0 to 100%)" that represent the
|
||||
# likelihood of mechanical issues on the printer. It is based on the differential spectrogram sum of gradient, salted with a bit
|
||||
# of the estimated similarity cross-correlation from compute_curve_similarity_factor() and with a bit of the number of unpaired peaks.
|
||||
# This result in a percentage value quantifying the machine behavior around the main resonances that give an hint if only touching belt tension
|
||||
# will give good graphs or if there is a chance of mechanical issues in the background (above 50% should be considered as probably problematic)
|
||||
def compute_mhi(combined_data, similarity_coefficient, num_unpaired_peaks):
|
||||
# filtered_data = combined_data[combined_data > 100]
|
||||
filtered_data = np.abs(combined_data)
|
||||
|
||||
# First compute a "total variability metric" based on the sum of the gradient that sum the magnitude of will emphasize regions of the
|
||||
# spectrogram where there are rapid changes in magnitude (like the edges of resonance peaks).
|
||||
total_variability_metric = np.sum(np.abs(np.gradient(filtered_data)))
|
||||
# Scale the metric to a percentage using the threshold (found empirically on a large number of user data shared to me)
|
||||
base_percentage = (np.log1p(total_variability_metric) / np.log1p(DC_THRESHOLD_METRIC)) * 100
|
||||
|
||||
# Adjust the percentage based on the similarity_coefficient to add a grain of salt
|
||||
adjusted_percentage = base_percentage * (1 - DC_GRAIN_OF_SALT_FACTOR * (similarity_coefficient / 100))
|
||||
|
||||
# Adjust the percentage again based on the number of unpaired peaks to add a second grain of salt
|
||||
peak_confidence = num_unpaired_peaks / DC_MAX_UNPAIRED_PEAKS_ALLOWED
|
||||
final_percentage = (1 - peak_confidence) * adjusted_percentage + peak_confidence * 100
|
||||
|
||||
# Ensure the result lies between 0 and 100 by clipping the computed value
|
||||
final_percentage = np.clip(final_percentage, 0, 100)
|
||||
|
||||
return final_percentage, mhi_lut(final_percentage)
|
||||
|
||||
|
||||
# LUT to transform the MHI into a textual value easy to understand for the users of the script
|
||||
def mhi_lut(mhi):
|
||||
ranges = [
|
||||
(0, 30, 'Excellent mechanical health'),
|
||||
(30, 45, 'Good mechanical health'),
|
||||
(45, 55, 'Acceptable mechanical health'),
|
||||
(55, 70, 'Potential signs of a mechanical issue'),
|
||||
(70, 85, 'Likely a mechanical issue'),
|
||||
(85, 100, 'Mechanical issue detected'),
|
||||
]
|
||||
for lower, upper, message in ranges:
|
||||
if lower < mhi <= upper:
|
||||
return message
|
||||
|
||||
return 'Error computing MHI value'
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_compare_frequency(ax, lognames, signal1, signal2, similarity_factor, max_freq):
|
||||
# Get the belt name for the legend to avoid putting the full file name
|
||||
signal1_belt = (lognames[0].split('/')[-1]).split('_')[-1][0]
|
||||
signal2_belt = (lognames[1].split('/')[-1]).split('_')[-1][0]
|
||||
|
||||
if signal1_belt == 'A' and signal2_belt == 'B':
|
||||
signal1_belt += ' (axis 1,-1)'
|
||||
signal2_belt += ' (axis 1, 1)'
|
||||
elif signal1_belt == 'B' and signal2_belt == 'A':
|
||||
signal1_belt += ' (axis 1, 1)'
|
||||
signal2_belt += ' (axis 1,-1)'
|
||||
else:
|
||||
ConsoleOutput.print(
|
||||
"Warning: belts doesn't seem to have the correct name A and B (extracted from the filename.csv)"
|
||||
)
|
||||
|
||||
# Plot the two belts PSD signals
|
||||
ax.plot(signal1.freqs, signal1.psd, label='Belt ' + signal1_belt, color=KLIPPAIN_COLORS['purple'])
|
||||
ax.plot(signal2.freqs, signal2.psd, label='Belt ' + signal2_belt, color=KLIPPAIN_COLORS['orange'])
|
||||
|
||||
# Trace the "relax region" (also used as a threshold to filter and detect the peaks)
|
||||
psd_lowest_max = min(signal1.psd.max(), signal2.psd.max())
|
||||
peaks_warning_threshold = PEAKS_DETECTION_THRESHOLD * psd_lowest_max
|
||||
ax.axhline(y=peaks_warning_threshold, color='black', linestyle='--', linewidth=0.5)
|
||||
ax.fill_between(signal1.freqs, 0, peaks_warning_threshold, color='green', alpha=0.15, label='Relax Region')
|
||||
|
||||
# Trace and annotate the peaks on the graph
|
||||
paired_peak_count = 0
|
||||
unpaired_peak_count = 0
|
||||
offsets_table_data = []
|
||||
|
||||
for _, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
||||
label = ALPHABET[paired_peak_count]
|
||||
amplitude_offset = abs(
|
||||
((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / max(signal1.psd[peak1[0]], signal2.psd[peak2[0]])) * 100
|
||||
)
|
||||
frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]])
|
||||
offsets_table_data.append([f'Peaks {label}', f'{frequency_offset:.1f} Hz', f'{amplitude_offset:.1f} %'])
|
||||
|
||||
ax.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], 'x', color='black')
|
||||
ax.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], 'x', color='black')
|
||||
ax.plot(
|
||||
[signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]],
|
||||
[signal1.psd[peak1[0]], signal2.psd[peak2[0]]],
|
||||
':',
|
||||
color='gray',
|
||||
)
|
||||
|
||||
ax.annotate(
|
||||
label + '1',
|
||||
(signal1.freqs[peak1[0]], signal1.psd[peak1[0]]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
ax.annotate(
|
||||
label + '2',
|
||||
(signal2.freqs[peak2[0]], signal2.psd[peak2[0]]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
paired_peak_count += 1
|
||||
|
||||
for peak in signal1.unpaired_peaks:
|
||||
ax.plot(signal1.freqs[peak], signal1.psd[peak], 'x', color='black')
|
||||
ax.annotate(
|
||||
str(unpaired_peak_count + 1),
|
||||
(signal1.freqs[peak], signal1.psd[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='red',
|
||||
weight='bold',
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
for peak in signal2.unpaired_peaks:
|
||||
ax.plot(signal2.freqs[peak], signal2.psd[peak], 'x', color='black')
|
||||
ax.annotate(
|
||||
str(unpaired_peak_count + 1),
|
||||
(signal2.freqs[peak], signal2.psd[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='red',
|
||||
weight='bold',
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
# Add estimated similarity to the graph
|
||||
ax2 = ax.twinx() # To split the legends in two box
|
||||
ax2.yaxis.set_visible(False)
|
||||
ax2.plot([], [], ' ', label=f'Estimated similarity: {similarity_factor:.1f}%')
|
||||
ax2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}')
|
||||
|
||||
# Setting axis parameters, grid and graph title
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
ax.set_xlim([0, max_freq])
|
||||
ax.set_ylabel('Power spectral density')
|
||||
psd_highest_max = max(signal1.psd.max(), signal2.psd.max())
|
||||
ax.set_ylim([0, psd_highest_max + psd_highest_max * 0.05])
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.set_title(
|
||||
'Belts Frequency Profiles (estimated similarity: {:.1f}%)'.format(similarity_factor),
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['dark_orange'],
|
||||
weight='bold',
|
||||
)
|
||||
|
||||
# Print the table of offsets ontop of the graph below the original legend (upper right)
|
||||
if len(offsets_table_data) > 0:
|
||||
columns = [
|
||||
'',
|
||||
'Frequency delta',
|
||||
'Amplitude delta',
|
||||
]
|
||||
offset_table = ax.table(
|
||||
cellText=offsets_table_data,
|
||||
colLabels=columns,
|
||||
bbox=[0.66, 0.75, 0.33, 0.15],
|
||||
loc='upper right',
|
||||
cellLoc='center',
|
||||
)
|
||||
offset_table.auto_set_font_size(False)
|
||||
offset_table.set_fontsize(8)
|
||||
offset_table.auto_set_column_width([0, 1, 2])
|
||||
offset_table.set_zorder(100)
|
||||
cells = [key for key in offset_table.get_celld().keys()]
|
||||
for cell in cells:
|
||||
offset_table[cell].set_facecolor('white')
|
||||
offset_table[cell].set_alpha(0.6)
|
||||
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_difference_spectrogram(ax, signal1, signal2, t, bins, combined_divergent, textual_mhi, max_freq):
|
||||
ax.set_title('Differential Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.plot([], [], ' ', label=f'{textual_mhi} (experimental)')
|
||||
|
||||
# Draw the differential spectrogram with a specific custom norm to get orange or purple values where there is signal or white near zeros
|
||||
# imgshow is better suited here than pcolormesh since its result is already rasterized and we doesn't need to keep vector graphics
|
||||
# when saving to a final .png file. Using it also allow to save ~150-200MB of RAM during the "fig.savefig" operation.
|
||||
colors = [
|
||||
KLIPPAIN_COLORS['dark_orange'],
|
||||
KLIPPAIN_COLORS['orange'],
|
||||
'white',
|
||||
KLIPPAIN_COLORS['purple'],
|
||||
KLIPPAIN_COLORS['dark_purple'],
|
||||
]
|
||||
cm = matplotlib.colors.LinearSegmentedColormap.from_list(
|
||||
'klippain_divergent', list(zip([0, 0.25, 0.5, 0.75, 1], colors))
|
||||
)
|
||||
norm = matplotlib.colors.TwoSlopeNorm(vmin=np.min(combined_divergent), vcenter=0, vmax=np.max(combined_divergent))
|
||||
ax.imshow(
|
||||
combined_divergent.T,
|
||||
cmap=cm,
|
||||
norm=norm,
|
||||
aspect='auto',
|
||||
extent=[t[0], t[-1], bins[0], bins[-1]],
|
||||
interpolation='bilinear',
|
||||
origin='lower',
|
||||
)
|
||||
|
||||
ax.set_xlabel('Frequency (hz)')
|
||||
ax.set_xlim([0.0, max_freq])
|
||||
ax.set_ylabel('Time (s)')
|
||||
ax.set_ylim([0, bins[-1]])
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('medium')
|
||||
ax.legend(loc='best', prop=fontP)
|
||||
|
||||
# Plot vertical lines for unpaired peaks
|
||||
unpaired_peak_count = 0
|
||||
for _, peak in enumerate(signal1.unpaired_peaks):
|
||||
ax.axvline(signal1.freqs[peak], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5)
|
||||
ax.annotate(
|
||||
f'Peak {unpaired_peak_count + 1}',
|
||||
(signal1.freqs[peak], t[-1] * 0.05),
|
||||
textcoords='data',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='bottom',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
for _, peak in enumerate(signal2.unpaired_peaks):
|
||||
ax.axvline(signal2.freqs[peak], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5)
|
||||
ax.annotate(
|
||||
f'Peak {unpaired_peak_count + 1}',
|
||||
(signal2.freqs[peak], t[-1] * 0.05),
|
||||
textcoords='data',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='bottom',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
# Plot vertical lines and zones for paired peaks
|
||||
for idx, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
||||
label = ALPHABET[idx]
|
||||
x_min = min(peak1[1], peak2[1])
|
||||
x_max = max(peak1[1], peak2[1])
|
||||
ax.axvline(x_min, color=KLIPPAIN_COLORS['dark_purple'], linestyle='dotted', linewidth=1.5)
|
||||
ax.axvline(x_max, color=KLIPPAIN_COLORS['dark_purple'], linestyle='dotted', linewidth=1.5)
|
||||
ax.fill_between([x_min, x_max], 0, np.max(combined_divergent), color=KLIPPAIN_COLORS['dark_purple'], alpha=0.3)
|
||||
ax.annotate(
|
||||
f'Peaks {label}',
|
||||
(x_min, t[-1] * 0.05),
|
||||
textcoords='data',
|
||||
color=KLIPPAIN_COLORS['dark_purple'],
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='bottom',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
|
||||
######################################################################
|
||||
# Custom tools
|
||||
######################################################################
|
||||
|
||||
|
||||
# Original Klipper function to get the PSD data of a raw accelerometer signal
|
||||
def compute_signal_data(data, max_freq):
|
||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||
calibration_data = helper.process_accelerometer_data(data)
|
||||
|
||||
freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq]
|
||||
psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq]
|
||||
|
||||
_, peaks, _ = detect_peaks(psd, freqs, PEAKS_DETECTION_THRESHOLD * psd.max())
|
||||
|
||||
return SignalData(freqs=freqs, psd=psd, peaks=peaks, paired_peaks=None, unpaired_peaks=None)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def belts_calibration(lognames, klipperdir='~/klipper', max_freq=200.0, st_version=None):
|
||||
global shaper_calibrate
|
||||
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||
|
||||
# Parse data from the log files while ignoring CSV in the wrong format
|
||||
datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None]
|
||||
if len(datas) > 2:
|
||||
raise ValueError('Incorrect number of .csv files used (this function needs exactly two files to compare them)!')
|
||||
|
||||
# Compute calibration data for the two datasets with automatic peaks detection
|
||||
signal1 = compute_signal_data(datas[0], max_freq)
|
||||
signal2 = compute_signal_data(datas[1], max_freq)
|
||||
combined_sum, combined_divergent, bins, t = compute_combined_spectrogram(datas[0], datas[1])
|
||||
del datas
|
||||
|
||||
# Pair the peaks across the two datasets
|
||||
paired_peaks, unpaired_peaks1, unpaired_peaks2 = pair_peaks(
|
||||
signal1.peaks, signal1.freqs, signal1.psd, signal2.peaks, signal2.freqs, signal2.psd
|
||||
)
|
||||
signal1 = signal1._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks1)
|
||||
signal2 = signal2._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks2)
|
||||
|
||||
# Compute the similarity (using cross-correlation of the PSD signals)
|
||||
similarity_factor = compute_curve_similarity_factor(
|
||||
signal1.freqs, signal1.psd, signal2.freqs, signal2.psd, CURVE_SIMILARITY_SIGMOID_K
|
||||
)
|
||||
ConsoleOutput.print(f'Belts estimated similarity: {similarity_factor:.1f}%')
|
||||
# Compute the MHI value from the differential spectrogram sum of gradient, salted with the similarity factor and the number of
|
||||
# unpaired peaks from the belts frequency profile. Be careful, this value is highly opinionated and is pretty experimental!
|
||||
mhi, textual_mhi = compute_mhi(
|
||||
combined_sum, similarity_factor, len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks)
|
||||
)
|
||||
ConsoleOutput.print(f'[experimental] Mechanical Health Indicator: {textual_mhi.lower()} ({mhi:.1f}%)')
|
||||
|
||||
# Create graph layout
|
||||
fig, (ax1, ax2) = plt.subplots(
|
||||
2,
|
||||
1,
|
||||
gridspec_kw={
|
||||
'height_ratios': [4, 3],
|
||||
'bottom': 0.050,
|
||||
'top': 0.890,
|
||||
'left': 0.085,
|
||||
'right': 0.966,
|
||||
'hspace': 0.169,
|
||||
'wspace': 0.200,
|
||||
},
|
||||
)
|
||||
fig.set_size_inches(8.3, 11.6)
|
||||
|
||||
# Add title
|
||||
title_line1 = 'RELATIVE BELTS CALIBRATION TOOL'
|
||||
fig.text(
|
||||
0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename = lognames[0].split('/')[-1]
|
||||
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X')
|
||||
except Exception:
|
||||
ConsoleOutput.print(f'Warning: CSV filenames look to be different than expected: {lognames}')
|
||||
title_line2 = lognames[0].split('/')[-1] + ' / ' + lognames[1].split('/')[-1]
|
||||
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Plot the graphs
|
||||
plot_compare_frequency(ax1, lognames, signal1, signal2, similarity_factor, max_freq)
|
||||
plot_difference_spectrogram(ax2, signal1, signal2, t, bins, combined_divergent, textual_mhi, max_freq)
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.8995, 0.1, 0.1], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <raw logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph')
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('Incorrect number of arguments')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
|
||||
fig = belts_calibration(args, options.klipperdir, options.max_freq)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
420
shaketune/graph_creators/graph_shaper.py
Normal file
420
shaketune/graph_creators/graph_shaper.py
Normal file
@@ -0,0 +1,420 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
#################################################
|
||||
######## INPUT SHAPER CALIBRATION SCRIPT ########
|
||||
#################################################
|
||||
# Derived from the calibrate_shaper.py official Klipper script
|
||||
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com>
|
||||
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
|
||||
# Highly modified and improved by Frix_x#0161 #
|
||||
|
||||
import optparse
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import (
|
||||
compute_mechanical_parameters,
|
||||
compute_spectrogram,
|
||||
detect_peaks,
|
||||
parse_log,
|
||||
setup_klipper_import,
|
||||
)
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||
PEAKS_EFFECT_THRESHOLD = 0.12
|
||||
SPECTROGRAM_LOW_PERCENTILE_FILTER = 5
|
||||
MAX_SMOOTHING = 0.1
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation
|
||||
######################################################################
|
||||
|
||||
|
||||
# Find the best shaper parameters using Klipper's official algorithm selection with
|
||||
# a proper precomputed damping ratio (zeta) and using the configured printer SQV value
|
||||
def calibrate_shaper(datas, max_smoothing, scv, max_freq):
|
||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||
calibration_data = helper.process_accelerometer_data(datas)
|
||||
calibration_data.normalize_to_frequencies()
|
||||
|
||||
fr, zeta, _, _ = compute_mechanical_parameters(calibration_data.psd_sum, calibration_data.freq_bins)
|
||||
|
||||
# If the damping ratio computation fail, we use Klipper default value instead
|
||||
if zeta is None:
|
||||
zeta = 0.1
|
||||
|
||||
compat = False
|
||||
try:
|
||||
shaper, all_shapers = helper.find_best_shaper(
|
||||
calibration_data,
|
||||
shapers=None,
|
||||
damping_ratio=zeta,
|
||||
scv=scv,
|
||||
shaper_freqs=None,
|
||||
max_smoothing=max_smoothing,
|
||||
test_damping_ratios=None,
|
||||
max_freq=max_freq,
|
||||
logger=ConsoleOutput.print,
|
||||
)
|
||||
except TypeError:
|
||||
ConsoleOutput.print(
|
||||
'[WARNING] You seem to be using an older version of Klipper that is not compatible with all the latest Shake&Tune features!'
|
||||
)
|
||||
ConsoleOutput.print(
|
||||
'Shake&Tune now runs in compatibility mode: be aware that the results may be slightly off, since the real damping ratio cannot be used to create the filter recommendations'
|
||||
)
|
||||
compat = True
|
||||
shaper, all_shapers = helper.find_best_shaper(calibration_data, max_smoothing, ConsoleOutput.print)
|
||||
|
||||
ConsoleOutput.print(
|
||||
'\n-> Recommended shaper is %s @ %.1f Hz (when using a square corner velocity of %.1f and a damping ratio of %.3f)'
|
||||
% (shaper.name.upper(), shaper.freq, scv, zeta)
|
||||
)
|
||||
|
||||
return shaper.name, all_shapers, calibration_data, fr, zeta, compat
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_freq_response(
|
||||
ax, calibration_data, shapers, performance_shaper, peaks, peaks_freqs, peaks_threshold, fr, zeta, max_freq
|
||||
):
|
||||
freqs = calibration_data.freqs
|
||||
psd = calibration_data.psd_sum
|
||||
px = calibration_data.psd_x
|
||||
py = calibration_data.psd_y
|
||||
pz = calibration_data.psd_z
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('x-small')
|
||||
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
ax.set_xlim([0, max_freq])
|
||||
ax.set_ylabel('Power spectral density')
|
||||
ax.set_ylim([0, psd.max() + psd.max() * 0.05])
|
||||
|
||||
ax.plot(freqs, psd, label='X+Y+Z', color='purple', zorder=5)
|
||||
ax.plot(freqs, px, label='X', color='red')
|
||||
ax.plot(freqs, py, label='Y', color='green')
|
||||
ax.plot(freqs, pz, label='Z', color='blue')
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5))
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
ax2 = ax.twinx()
|
||||
ax2.yaxis.set_visible(False)
|
||||
|
||||
lowvib_shaper_vibrs = float('inf')
|
||||
lowvib_shaper = None
|
||||
lowvib_shaper_freq = None
|
||||
lowvib_shaper_accel = 0
|
||||
|
||||
# Draw the shappers curves and add their specific parameters in the legend
|
||||
# This adds also a way to find the best shaper with a low level of vibrations (with a resonable level of smoothing)
|
||||
for shaper in shapers:
|
||||
shaper_max_accel = round(shaper.max_accel / 100.0) * 100.0
|
||||
label = '%s (%.1f Hz, vibr=%.1f%%, sm~=%.2f, accel<=%.f)' % (
|
||||
shaper.name.upper(),
|
||||
shaper.freq,
|
||||
shaper.vibrs * 100.0,
|
||||
shaper.smoothing,
|
||||
shaper_max_accel,
|
||||
)
|
||||
ax2.plot(freqs, shaper.vals, label=label, linestyle='dotted')
|
||||
|
||||
# Get the performance shaper
|
||||
if shaper.name == performance_shaper:
|
||||
performance_shaper_freq = shaper.freq
|
||||
performance_shaper_vibr = shaper.vibrs * 100.0
|
||||
performance_shaper_vals = shaper.vals
|
||||
|
||||
# Get the low vibration shaper
|
||||
if (
|
||||
shaper.vibrs * 100 < lowvib_shaper_vibrs
|
||||
or (shaper.vibrs * 100 == lowvib_shaper_vibrs and shaper_max_accel > lowvib_shaper_accel)
|
||||
) and shaper.smoothing < MAX_SMOOTHING:
|
||||
lowvib_shaper_accel = shaper_max_accel
|
||||
lowvib_shaper = shaper.name
|
||||
lowvib_shaper_freq = shaper.freq
|
||||
lowvib_shaper_vibrs = shaper.vibrs * 100
|
||||
lowvib_shaper_vals = shaper.vals
|
||||
|
||||
# User recommendations are added to the legend: one is Klipper's original suggestion that is usually good for performances
|
||||
# and the other one is the custom "low vibration" recommendation that looks for a suitable shaper that doesn't have excessive
|
||||
# smoothing and that have a lower vibration level. If both recommendation are the same shaper, or if no suitable "low
|
||||
# vibration" shaper is found, then only a single line as the "best shaper" recommendation is added to the legend
|
||||
if (
|
||||
lowvib_shaper is not None
|
||||
and lowvib_shaper != performance_shaper
|
||||
and lowvib_shaper_vibrs <= performance_shaper_vibr
|
||||
):
|
||||
ax2.plot(
|
||||
[],
|
||||
[],
|
||||
' ',
|
||||
label='Recommended performance shaper: %s @ %.1f Hz'
|
||||
% (performance_shaper.upper(), performance_shaper_freq),
|
||||
)
|
||||
ax.plot(
|
||||
freqs, psd * performance_shaper_vals, label='With %s applied' % (performance_shaper.upper()), color='cyan'
|
||||
)
|
||||
ax2.plot(
|
||||
[],
|
||||
[],
|
||||
' ',
|
||||
label='Recommended low vibrations shaper: %s @ %.1f Hz' % (lowvib_shaper.upper(), lowvib_shaper_freq),
|
||||
)
|
||||
ax.plot(freqs, psd * lowvib_shaper_vals, label='With %s applied' % (lowvib_shaper.upper()), color='lime')
|
||||
else:
|
||||
ax2.plot(
|
||||
[],
|
||||
[],
|
||||
' ',
|
||||
label='Recommended best shaper: %s @ %.1f Hz' % (performance_shaper.upper(), performance_shaper_freq),
|
||||
)
|
||||
ax.plot(
|
||||
freqs, psd * performance_shaper_vals, label='With %s applied' % (performance_shaper.upper()), color='cyan'
|
||||
)
|
||||
|
||||
# And the estimated damping ratio is finally added at the end of the legend
|
||||
ax2.plot([], [], ' ', label='Estimated damping ratio (ζ): %.3f' % (zeta))
|
||||
|
||||
# Draw the detected peaks and name them
|
||||
# This also draw the detection threshold and warning threshold (aka "effect zone")
|
||||
ax.plot(peaks_freqs, psd[peaks], 'x', color='black', markersize=8)
|
||||
for idx, peak in enumerate(peaks):
|
||||
if psd[peak] > peaks_threshold[1]:
|
||||
fontcolor = 'red'
|
||||
fontweight = 'bold'
|
||||
else:
|
||||
fontcolor = 'black'
|
||||
fontweight = 'normal'
|
||||
ax.annotate(
|
||||
f'{idx+1}',
|
||||
(freqs[peak], psd[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color=fontcolor,
|
||||
weight=fontweight,
|
||||
)
|
||||
ax.axhline(y=peaks_threshold[0], color='black', linestyle='--', linewidth=0.5)
|
||||
ax.axhline(y=peaks_threshold[1], color='black', linestyle='--', linewidth=0.5)
|
||||
ax.fill_between(freqs, 0, peaks_threshold[0], color='green', alpha=0.15, label='Relax Region')
|
||||
ax.fill_between(freqs, peaks_threshold[0], peaks_threshold[1], color='orange', alpha=0.2, label='Warning Region')
|
||||
|
||||
# Add the main resonant frequency and damping ratio of the axis to the graph title
|
||||
ax.set_title(
|
||||
'Axis Frequency Profile (ω0=%.1fHz, ζ=%.3f)' % (fr, zeta),
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['dark_orange'],
|
||||
weight='bold',
|
||||
)
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
# Plot a time-frequency spectrogram to see how the system respond over time during the
|
||||
# resonnance test. This can highlight hidden spots from the standard PSD graph from other harmonics
|
||||
def plot_spectrogram(ax, t, bins, pdata, peaks, max_freq):
|
||||
ax.set_title('Time-Frequency Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
|
||||
# We need to normalize the data to get a proper signal on the spectrogram
|
||||
# However, while using "LogNorm" provide too much background noise, using
|
||||
# "Normalize" make only the resonnance appearing and hide interesting elements
|
||||
# So we need to filter out the lower part of the data (ie. find the proper vmin for LogNorm)
|
||||
vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER)
|
||||
|
||||
# Draw the spectrogram using imgshow that is better suited here than pcolormesh since its result is already rasterized and
|
||||
# we doesn't need to keep vector graphics when saving to a final .png file. Using it also allow to
|
||||
# save ~150-200MB of RAM during the "fig.savefig" operation.
|
||||
cm = 'inferno'
|
||||
norm = matplotlib.colors.LogNorm(vmin=vmin_value)
|
||||
ax.imshow(
|
||||
pdata.T,
|
||||
norm=norm,
|
||||
cmap=cm,
|
||||
aspect='auto',
|
||||
extent=[t[0], t[-1], bins[0], bins[-1]],
|
||||
origin='lower',
|
||||
interpolation='antialiased',
|
||||
)
|
||||
|
||||
ax.set_xlim([0.0, max_freq])
|
||||
ax.set_ylabel('Time (s)')
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
|
||||
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
||||
if peaks is not None:
|
||||
for idx, peak in enumerate(peaks):
|
||||
ax.axvline(peak, color='cyan', linestyle='dotted', linewidth=1)
|
||||
ax.annotate(
|
||||
f'Peak {idx+1}',
|
||||
(peak, bins[-1] * 0.9),
|
||||
textcoords='data',
|
||||
color='cyan',
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='top',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def shaper_calibration(lognames, klipperdir='~/klipper', max_smoothing=None, scv=5.0, max_freq=200.0, st_version=None):
|
||||
global shaper_calibrate
|
||||
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||
|
||||
# Parse data from the log files while ignoring CSV in the wrong format
|
||||
datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None]
|
||||
if len(datas) > 1:
|
||||
ConsoleOutput.print('Warning: incorrect number of .csv files detected. Only the first one will be used!')
|
||||
|
||||
# Compute shapers, PSD outputs and spectrogram
|
||||
performance_shaper, shapers, calibration_data, fr, zeta, compat = calibrate_shaper(
|
||||
datas[0], max_smoothing, scv, max_freq
|
||||
)
|
||||
pdata, bins, t = compute_spectrogram(datas[0])
|
||||
del datas
|
||||
|
||||
# Select only the relevant part of the PSD data
|
||||
freqs = calibration_data.freq_bins
|
||||
calibration_data.psd_sum = calibration_data.psd_sum[freqs <= max_freq]
|
||||
calibration_data.psd_x = calibration_data.psd_x[freqs <= max_freq]
|
||||
calibration_data.psd_y = calibration_data.psd_y[freqs <= max_freq]
|
||||
calibration_data.psd_z = calibration_data.psd_z[freqs <= max_freq]
|
||||
calibration_data.freqs = freqs[freqs <= max_freq]
|
||||
|
||||
# Peak detection algorithm
|
||||
peaks_threshold = [
|
||||
PEAKS_DETECTION_THRESHOLD * calibration_data.psd_sum.max(),
|
||||
PEAKS_EFFECT_THRESHOLD * calibration_data.psd_sum.max(),
|
||||
]
|
||||
num_peaks, peaks, peaks_freqs = detect_peaks(calibration_data.psd_sum, calibration_data.freqs, peaks_threshold[0])
|
||||
|
||||
# Print the peaks info in the console
|
||||
peak_freqs_formated = ['{:.1f}'.format(f) for f in peaks_freqs]
|
||||
num_peaks_above_effect_threshold = np.sum(calibration_data.psd_sum[peaks] > peaks_threshold[1])
|
||||
ConsoleOutput.print(
|
||||
'\nPeaks detected on the graph: %d @ %s Hz (%d above effect threshold)'
|
||||
% (num_peaks, ', '.join(map(str, peak_freqs_formated)), num_peaks_above_effect_threshold)
|
||||
)
|
||||
|
||||
# Create graph layout
|
||||
fig, (ax1, ax2) = plt.subplots(
|
||||
2,
|
||||
1,
|
||||
gridspec_kw={
|
||||
'height_ratios': [4, 3],
|
||||
'bottom': 0.050,
|
||||
'top': 0.890,
|
||||
'left': 0.085,
|
||||
'right': 0.966,
|
||||
'hspace': 0.169,
|
||||
'wspace': 0.200,
|
||||
},
|
||||
)
|
||||
fig.set_size_inches(8.3, 11.6)
|
||||
|
||||
# Add a title with some test info
|
||||
title_line1 = 'INPUT SHAPER CALIBRATION TOOL'
|
||||
fig.text(
|
||||
0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||
dt = datetime.strptime(f'{filename_parts[1]} {filename_parts[2]}', '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis'
|
||||
if compat:
|
||||
title_line3 = '| Compatibility mode with older Klipper,'
|
||||
title_line4 = '| and no custom S&T parameters are used!'
|
||||
else:
|
||||
title_line3 = '| Square corner velocity: ' + str(scv) + 'mm/s'
|
||||
title_line4 = '| Max allowed smoothing: ' + str(max_smoothing)
|
||||
except Exception:
|
||||
ConsoleOutput.print('Warning: CSV filename look to be different than expected (%s)' % (lognames[0]))
|
||||
title_line2 = lognames[0].split('/')[-1]
|
||||
title_line3 = ''
|
||||
title_line4 = ''
|
||||
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.58, 0.960, title_line3, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.58, 0.946, title_line4, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Plot the graphs
|
||||
plot_freq_response(
|
||||
ax1, calibration_data, shapers, performance_shaper, peaks, peaks_freqs, peaks_threshold, fr, zeta, max_freq
|
||||
)
|
||||
plot_spectrogram(ax2, t, bins, pdata, peaks_freqs, max_freq)
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.8995, 0.1, 0.1], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph')
|
||||
opts.add_option('-s', '--max_smoothing', type='float', default=None, help='maximum shaper smoothing to allow')
|
||||
opts.add_option(
|
||||
'--scv', '--square_corner_velocity', type='float', dest='scv', default=5.0, help='square corner velocity'
|
||||
)
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('Incorrect number of arguments')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
if options.max_smoothing is not None and options.max_smoothing < 0.05:
|
||||
opts.error('Too small max_smoothing specified (must be at least 0.05)')
|
||||
|
||||
fig = shaper_calibration(args, options.klipperdir, options.max_smoothing, options.scv, options.max_freq)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
841
shaketune/graph_creators/graph_vibrations.py
Normal file
841
shaketune/graph_creators/graph_vibrations.py
Normal file
@@ -0,0 +1,841 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
##################################################
|
||||
#### DIRECTIONAL VIBRATIONS PLOTTING SCRIPT ######
|
||||
##################################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import math
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.gridspec
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import (
|
||||
compute_mechanical_parameters,
|
||||
detect_peaks,
|
||||
identify_low_energy_zones,
|
||||
parse_log,
|
||||
setup_klipper_import,
|
||||
)
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||
PEAKS_RELATIVE_HEIGHT_THRESHOLD = 0.04
|
||||
CURVE_SIMILARITY_SIGMOID_K = 0.5
|
||||
SPEEDS_VALLEY_DETECTION_THRESHOLD = 0.7 # Lower is more sensitive
|
||||
SPEEDS_AROUND_PEAK_DELETION = 3 # to delete +-3mm/s around a peak
|
||||
ANGLES_VALLEY_DETECTION_THRESHOLD = 1.1 # Lower is more sensitive
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation
|
||||
######################################################################
|
||||
|
||||
|
||||
# Call to the official Klipper input shaper object to do the PSD computation
|
||||
def calc_freq_response(data):
|
||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||
return helper.process_accelerometer_data(data)
|
||||
|
||||
|
||||
# Calculate motor frequency profiles based on the measured Power Spectral Density (PSD) measurements for the machine kinematics
|
||||
# main angles and then create a global motor profile as a weighted average (from their own vibrations) of all calculated profiles
|
||||
def compute_motor_profiles(freqs, psds, all_angles_energy, measured_angles=None, energy_amplification_factor=2):
|
||||
if measured_angles is None:
|
||||
measured_angles = [0, 90]
|
||||
|
||||
motor_profiles = {}
|
||||
weighted_sum_profiles = np.zeros_like(freqs)
|
||||
total_weight = 0
|
||||
conv_filter = np.ones(20) / 20
|
||||
|
||||
# Creating the PSD motor profiles for each angles
|
||||
for angle in measured_angles:
|
||||
# Calculate the sum of PSDs for the current angle and then convolve
|
||||
sum_curve = np.sum(np.array([psds[angle][speed] for speed in psds[angle]]), axis=0)
|
||||
motor_profiles[angle] = np.convolve(sum_curve / len(psds[angle]), conv_filter, mode='same')
|
||||
|
||||
# Calculate weights
|
||||
angle_energy = (
|
||||
all_angles_energy[angle] ** energy_amplification_factor
|
||||
) # First weighting factor is based on the total vibrations of the machine at the specified angle
|
||||
curve_area = (
|
||||
np.trapz(motor_profiles[angle], freqs) ** energy_amplification_factor
|
||||
) # Additional weighting factor is based on the area under the current motor profile at this specified angle
|
||||
total_angle_weight = angle_energy * curve_area
|
||||
|
||||
# Update weighted sum profiles to get the global motor profile
|
||||
weighted_sum_profiles += motor_profiles[angle] * total_angle_weight
|
||||
total_weight += total_angle_weight
|
||||
|
||||
# Creating a global average motor profile that is the weighted average of all the PSD motor profiles
|
||||
global_motor_profile = weighted_sum_profiles / total_weight if total_weight != 0 else weighted_sum_profiles
|
||||
|
||||
return motor_profiles, global_motor_profile
|
||||
|
||||
|
||||
# Since it was discovered that there is no non-linear mixing in the stepper "steps" vibrations, instead of measuring
|
||||
# the effects of each speeds at each angles, this function simplify it by using only the main motors axes (X/Y for Cartesian
|
||||
# printers and A/B for CoreXY) measurements and project each points on the [0,360] degrees range using trigonometry
|
||||
# to "sum" the vibration impact of each axis at every points of the generated spectrogram. The result is very similar at the end.
|
||||
def compute_dir_speed_spectrogram(measured_speeds, data, kinematics='cartesian', measured_angles=None):
|
||||
if measured_angles is None:
|
||||
measured_angles = [0, 90]
|
||||
|
||||
# We want to project the motor vibrations measured on their own axes on the [0, 360] range
|
||||
spectrum_angles = np.linspace(0, 360, 720) # One point every 0.5 degrees
|
||||
spectrum_speeds = np.linspace(min(measured_speeds), max(measured_speeds), len(measured_speeds) * 6)
|
||||
spectrum_vibrations = np.zeros((len(spectrum_angles), len(spectrum_speeds)))
|
||||
|
||||
def get_interpolated_vibrations(data, speed, speeds):
|
||||
idx = np.clip(np.searchsorted(speeds, speed, side='left'), 1, len(speeds) - 1)
|
||||
lower_speed = speeds[idx - 1]
|
||||
upper_speed = speeds[idx]
|
||||
lower_vibrations = data.get(lower_speed, 0)
|
||||
upper_vibrations = data.get(upper_speed, 0)
|
||||
return lower_vibrations + (speed - lower_speed) * (upper_vibrations - lower_vibrations) / (
|
||||
upper_speed - lower_speed
|
||||
)
|
||||
|
||||
# Precompute trigonometric values and constant before the loop
|
||||
angle_radians = np.deg2rad(spectrum_angles)
|
||||
cos_vals = np.cos(angle_radians)
|
||||
sin_vals = np.sin(angle_radians)
|
||||
sqrt_2_inv = 1 / math.sqrt(2)
|
||||
|
||||
# Compute the spectrum vibrations for each angle and speed combination
|
||||
for target_angle_idx, (cos_val, sin_val) in enumerate(zip(cos_vals, sin_vals)):
|
||||
for target_speed_idx, target_speed in enumerate(spectrum_speeds):
|
||||
if kinematics == 'cartesian':
|
||||
speed_1 = np.abs(target_speed * cos_val)
|
||||
speed_2 = np.abs(target_speed * sin_val)
|
||||
elif kinematics == 'corexy':
|
||||
speed_1 = np.abs(target_speed * (cos_val + sin_val) * sqrt_2_inv)
|
||||
speed_2 = np.abs(target_speed * (cos_val - sin_val) * sqrt_2_inv)
|
||||
|
||||
vibrations_1 = get_interpolated_vibrations(data[measured_angles[0]], speed_1, measured_speeds)
|
||||
vibrations_2 = get_interpolated_vibrations(data[measured_angles[1]], speed_2, measured_speeds)
|
||||
spectrum_vibrations[target_angle_idx, target_speed_idx] = vibrations_1 + vibrations_2
|
||||
|
||||
return spectrum_angles, spectrum_speeds, spectrum_vibrations
|
||||
|
||||
|
||||
def compute_angle_powers(spectrogram_data):
|
||||
angles_powers = np.trapz(spectrogram_data, axis=1)
|
||||
|
||||
# Since we want to plot it on a continuous polar plot later on, we need to append parts of
|
||||
# the array to start and end of it to smooth transitions when doing the convolution
|
||||
# and get the same value at modulo 360. Then we return the array without the extras
|
||||
extended_angles_powers = np.concatenate([angles_powers[-9:], angles_powers, angles_powers[:9]])
|
||||
convolved_extended = np.convolve(extended_angles_powers, np.ones(15) / 15, mode='same')
|
||||
|
||||
return convolved_extended[9:-9]
|
||||
|
||||
|
||||
def compute_speed_powers(spectrogram_data, smoothing_window=15):
|
||||
min_values = np.amin(spectrogram_data, axis=0)
|
||||
max_values = np.amax(spectrogram_data, axis=0)
|
||||
var_values = np.var(spectrogram_data, axis=0)
|
||||
|
||||
# rescale the variance to the same range as max_values to plot it on the same graph
|
||||
var_values = var_values / var_values.max() * max_values.max()
|
||||
|
||||
# Create a vibration metric that is the product of the max values and the variance to quantify the best
|
||||
# speeds that have at the same time a low global energy level that is also consistent at every angles
|
||||
vibration_metric = max_values * var_values
|
||||
|
||||
# utility function to pad and smooth the data avoiding edge effects
|
||||
conv_filter = np.ones(smoothing_window) / smoothing_window
|
||||
window = int(smoothing_window / 2)
|
||||
|
||||
def pad_and_smooth(data):
|
||||
data_padded = np.pad(data, (window,), mode='edge')
|
||||
smoothed_data = np.convolve(data_padded, conv_filter, mode='valid')
|
||||
return smoothed_data
|
||||
|
||||
# Stack the arrays and apply padding and smoothing in batch
|
||||
data_arrays = np.stack([min_values, max_values, var_values, vibration_metric])
|
||||
smoothed_arrays = np.array([pad_and_smooth(data) for data in data_arrays])
|
||||
|
||||
return smoothed_arrays
|
||||
|
||||
|
||||
# Function that filter and split the good_speed ranges. The goal is to remove some zones around
|
||||
# additional detected small peaks in order to suppress them if there is a peak, even if it's low,
|
||||
# that's probably due to a crossing in the motor resonance pattern that still need to be removed
|
||||
def filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range):
|
||||
# Process each range to filter out and split based on peak indices
|
||||
filtered_good_speeds = []
|
||||
for start, end, energy in good_speeds:
|
||||
start_speed, end_speed = all_speeds[start], all_speeds[end]
|
||||
# Identify peaks that intersect with the current speed range
|
||||
intersecting_peaks_indices = [
|
||||
idx for speed, idx in peak_speed_indices.items() if start_speed <= speed <= end_speed
|
||||
]
|
||||
|
||||
if not intersecting_peaks_indices:
|
||||
filtered_good_speeds.append((start, end, energy))
|
||||
else:
|
||||
intersecting_peaks_indices.sort()
|
||||
current_start = start
|
||||
|
||||
for peak_index in intersecting_peaks_indices:
|
||||
before_peak_end = max(current_start, peak_index - deletion_range)
|
||||
if current_start < before_peak_end:
|
||||
filtered_good_speeds.append((current_start, before_peak_end, energy))
|
||||
current_start = peak_index + deletion_range + 1
|
||||
|
||||
if current_start < end:
|
||||
filtered_good_speeds.append((current_start, end, energy))
|
||||
|
||||
# Sorting by start point once and then merge overlapping ranges
|
||||
sorted_ranges = sorted(filtered_good_speeds, key=lambda x: x[0])
|
||||
merged_ranges = [sorted_ranges[0]]
|
||||
|
||||
for current in sorted_ranges[1:]:
|
||||
last_merged_start, last_merged_end, last_merged_energy = merged_ranges[-1]
|
||||
if current[0] <= last_merged_end:
|
||||
new_end = max(last_merged_end, current[1])
|
||||
new_energy = min(last_merged_energy, current[2])
|
||||
merged_ranges[-1] = (last_merged_start, new_end, new_energy)
|
||||
else:
|
||||
merged_ranges.append(current)
|
||||
|
||||
return merged_ranges
|
||||
|
||||
|
||||
# This function allow the computation of a symmetry score that reflect the spectrogram apparent symmetry between
|
||||
# measured axes on both the shape of the signal and the energy level consistency across both side of the signal
|
||||
def compute_symmetry_analysis(all_angles, spectrogram_data, measured_angles=None):
|
||||
if measured_angles is None:
|
||||
measured_angles = [0, 90]
|
||||
|
||||
total_spectrogram_angles = len(all_angles)
|
||||
half_spectrogram_angles = total_spectrogram_angles // 2
|
||||
|
||||
# Extend the spectrogram by adding half to the beginning (in order to not get an out of bounds error later)
|
||||
extended_spectrogram = np.concatenate((spectrogram_data[-half_spectrogram_angles:], spectrogram_data), axis=0)
|
||||
|
||||
# Calculate the split index directly within the slicing
|
||||
midpoint_angle = np.mean(measured_angles)
|
||||
split_index = int(midpoint_angle * (total_spectrogram_angles / 360) + half_spectrogram_angles)
|
||||
half_segment_length = half_spectrogram_angles // 2
|
||||
|
||||
# Slice out the two segments of the spectrogram and flatten them for comparison
|
||||
segment_1_flattened = extended_spectrogram[split_index - half_segment_length : split_index].flatten()
|
||||
segment_2_flattened = extended_spectrogram[split_index : split_index + half_segment_length].flatten()
|
||||
|
||||
# Compute the correlation coefficient between the two segments of spectrogram
|
||||
correlation = np.corrcoef(segment_1_flattened, segment_2_flattened)[0, 1]
|
||||
percentage_correlation_biased = (100 * np.power(correlation, 0.75)) + 10
|
||||
|
||||
return np.clip(0, 100, percentage_correlation_biased)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_angle_profile_polar(ax, angles, angles_powers, low_energy_zones, symmetry_factor):
|
||||
angles_radians = np.deg2rad(angles)
|
||||
|
||||
ax.set_title('Polar angle energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_theta_zero_location('E')
|
||||
ax.set_theta_direction(1)
|
||||
|
||||
ax.plot(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||
ax.fill(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], alpha=0.3)
|
||||
ax.set_xlim([0, np.deg2rad(360)])
|
||||
ymax = angles_powers.max() * 1.05
|
||||
ax.set_ylim([0, ymax])
|
||||
ax.set_thetagrids([theta * 15 for theta in range(360 // 15)])
|
||||
|
||||
ax.text(
|
||||
0,
|
||||
0,
|
||||
f'Symmetry: {symmetry_factor:.1f}%',
|
||||
ha='center',
|
||||
va='center',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
fontsize=12,
|
||||
fontweight='bold',
|
||||
zorder=6,
|
||||
)
|
||||
|
||||
for _, (start, end, _) in enumerate(low_energy_zones):
|
||||
ax.axvline(
|
||||
angles_radians[start],
|
||||
angles_powers[start] / ymax,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
linestyle='dotted',
|
||||
linewidth=1.5,
|
||||
)
|
||||
ax.axvline(
|
||||
angles_radians[end],
|
||||
angles_powers[end] / ymax,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
linestyle='dotted',
|
||||
linewidth=1.5,
|
||||
)
|
||||
ax.fill_between(
|
||||
angles_radians[start:end], angles_powers[start:end], angles_powers.max() * 1.05, color='green', alpha=0.2
|
||||
)
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
# Polar plot doesn't follow the gridspec margin, so we adjust it manually here
|
||||
pos = ax.get_position()
|
||||
new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height]
|
||||
ax.set_position(new_pos)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_global_speed_profile(
|
||||
ax,
|
||||
all_speeds,
|
||||
sp_min_energy,
|
||||
sp_max_energy,
|
||||
sp_variance_energy,
|
||||
vibration_metric,
|
||||
num_peaks,
|
||||
peaks,
|
||||
low_energy_zones,
|
||||
):
|
||||
ax.set_title('Global speed energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Speed (mm/s)')
|
||||
ax.set_ylabel('Energy')
|
||||
ax2 = ax.twinx()
|
||||
ax2.yaxis.set_visible(False)
|
||||
|
||||
ax.plot(all_speeds, sp_min_energy, label='Minimum', color=KLIPPAIN_COLORS['dark_purple'], zorder=5)
|
||||
ax.plot(all_speeds, sp_max_energy, label='Maximum', color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||
ax.plot(all_speeds, sp_variance_energy, label='Variance', color=KLIPPAIN_COLORS['orange'], zorder=5, linestyle='--')
|
||||
ax2.plot(
|
||||
all_speeds,
|
||||
vibration_metric,
|
||||
label=f'Vibration metric ({num_peaks} bad peaks)',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
zorder=5,
|
||||
)
|
||||
|
||||
ax.set_xlim([all_speeds.min(), all_speeds.max()])
|
||||
ax.set_ylim([0, sp_max_energy.max() * 1.15])
|
||||
|
||||
y2min = -(vibration_metric.max() * 0.025)
|
||||
y2max = vibration_metric.max() * 1.07
|
||||
ax2.set_ylim([y2min, y2max])
|
||||
|
||||
if peaks is not None and len(peaks) > 0:
|
||||
ax2.plot(all_speeds[peaks], vibration_metric[peaks], 'x', color='black', markersize=8, zorder=10)
|
||||
for idx, peak in enumerate(peaks):
|
||||
ax2.annotate(
|
||||
f'{idx+1}',
|
||||
(all_speeds[peak], vibration_metric[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(5, 5),
|
||||
fontweight='bold',
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
zorder=10,
|
||||
)
|
||||
|
||||
for idx, (start, end, _) in enumerate(low_energy_zones):
|
||||
# ax2.axvline(all_speeds[start], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8)
|
||||
# ax2.axvline(all_speeds[end], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8)
|
||||
ax2.fill_between(
|
||||
all_speeds[start:end],
|
||||
y2min,
|
||||
vibration_metric[start:end],
|
||||
color='green',
|
||||
alpha=0.2,
|
||||
label=f'Zone {idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s',
|
||||
)
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_angular_speed_profiles(ax, speeds, angles, spectrogram_data, kinematics='cartesian'):
|
||||
ax.set_title('Angular speed energy profiles', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Speed (mm/s)')
|
||||
ax.set_ylabel('Energy')
|
||||
|
||||
# Define mappings for labels and colors to simplify plotting commands
|
||||
angle_settings = {
|
||||
0: ('X (0 deg)', 'purple', 10),
|
||||
90: ('Y (90 deg)', 'dark_purple', 5),
|
||||
45: ('A (45 deg)' if kinematics == 'corexy' else '45 deg', 'orange', 10),
|
||||
135: ('B (135 deg)' if kinematics == 'corexy' else '135 deg', 'dark_orange', 5),
|
||||
}
|
||||
|
||||
# Plot each angle using settings from the dictionary
|
||||
for angle, (label, color, zorder) in angle_settings.items():
|
||||
idx = np.searchsorted(angles, angle, side='left')
|
||||
ax.plot(speeds, spectrogram_data[idx], label=label, color=KLIPPAIN_COLORS[color], zorder=zorder)
|
||||
|
||||
ax.set_xlim([speeds.min(), speeds.max()])
|
||||
max_value = max(spectrogram_data[angle].max() for angle in [0, 45, 90, 135])
|
||||
ax.set_ylim([0, max_value * 1.1])
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_motor_profiles(ax, freqs, main_angles, motor_profiles, global_motor_profile, max_freq):
|
||||
ax.set_title('Motor frequency profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_ylabel('Energy')
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
|
||||
ax2 = ax.twinx()
|
||||
ax2.yaxis.set_visible(False)
|
||||
|
||||
# Global weighted average motor profile
|
||||
ax.plot(freqs, global_motor_profile, label='Combined', color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||
max_value = global_motor_profile.max()
|
||||
|
||||
# Mapping of angles to axis names
|
||||
angle_settings = {0: 'X', 90: 'Y', 45: 'A', 135: 'B'}
|
||||
|
||||
# And then plot the motor profiles at each measured angles
|
||||
for angle in main_angles:
|
||||
profile_max = motor_profiles[angle].max()
|
||||
if profile_max > max_value:
|
||||
max_value = profile_max
|
||||
label = f'{angle_settings[angle]} ({angle} deg)' if angle in angle_settings else f'{angle} deg'
|
||||
ax.plot(freqs, motor_profiles[angle], linestyle='--', label=label, zorder=2)
|
||||
|
||||
ax.set_xlim([0, max_freq])
|
||||
ax.set_ylim([0, max_value * 1.1])
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
|
||||
# Then add the motor resonance peak to the graph and print some infos about it
|
||||
motor_fr, motor_zeta, motor_res_idx, lowfreq_max = compute_mechanical_parameters(global_motor_profile, freqs, 30)
|
||||
if lowfreq_max:
|
||||
ConsoleOutput.print(
|
||||
'[WARNING] There are a lot of low frequency vibrations that can alter the readings. This is probably due to the test being performed at too high an acceleration!'
|
||||
)
|
||||
ConsoleOutput.print(
|
||||
'Try lowering the ACCEL value and/or increasing the SIZE value before restarting the macro to ensure that only constant speeds are being recorded and that the dynamic behavior of the machine is not affecting the measurements'
|
||||
)
|
||||
if motor_zeta is not None:
|
||||
ConsoleOutput.print(
|
||||
'Motors have a main resonant frequency at %.1fHz with an estimated damping ratio of %.3f'
|
||||
% (motor_fr, motor_zeta)
|
||||
)
|
||||
else:
|
||||
ConsoleOutput.print(
|
||||
'Motors have a main resonant frequency at %.1fHz but it was impossible to estimate a damping ratio.'
|
||||
% (motor_fr)
|
||||
)
|
||||
|
||||
ax.plot(freqs[motor_res_idx], global_motor_profile[motor_res_idx], 'x', color='black', markersize=10)
|
||||
ax.annotate(
|
||||
'R',
|
||||
(freqs[motor_res_idx], global_motor_profile[motor_res_idx]),
|
||||
textcoords='offset points',
|
||||
xytext=(15, 5),
|
||||
ha='right',
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
weight='bold',
|
||||
)
|
||||
|
||||
ax2.plot([], [], ' ', label='Motor resonant frequency (ω0): %.1fHz' % (motor_fr))
|
||||
if motor_zeta is not None:
|
||||
ax2.plot([], [], ' ', label='Motor damping ratio (ζ): %.3f' % (motor_zeta))
|
||||
else:
|
||||
ax2.plot([], [], ' ', label='No damping ratio computed')
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_vibration_spectrogram_polar(ax, angles, speeds, spectrogram_data):
|
||||
angles_radians = np.radians(angles)
|
||||
|
||||
# Assuming speeds defines the radial distance from the center, we need to create a meshgrid
|
||||
# for both angles and speeds to map the spectrogram data onto a polar plot correctly
|
||||
radius, theta = np.meshgrid(speeds, angles_radians)
|
||||
|
||||
ax.set_title(
|
||||
'Polar vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold', va='bottom'
|
||||
)
|
||||
ax.set_theta_zero_location('E')
|
||||
ax.set_theta_direction(1)
|
||||
|
||||
ax.pcolormesh(theta, radius, spectrogram_data, norm=matplotlib.colors.LogNorm(), cmap='inferno', shading='auto')
|
||||
ax.set_thetagrids([theta * 15 for theta in range(360 // 15)])
|
||||
ax.tick_params(axis='y', which='both', colors='white', labelsize='medium')
|
||||
ax.set_ylim([0, max(speeds)])
|
||||
|
||||
# Polar plot doesn't follow the gridspec margin, so we adjust it manually here
|
||||
pos = ax.get_position()
|
||||
new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height]
|
||||
ax.set_position(new_pos)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_vibration_spectrogram(ax, angles, speeds, spectrogram_data, peaks):
|
||||
ax.set_title('Vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Speed (mm/s)')
|
||||
ax.set_ylabel('Angle (deg)')
|
||||
|
||||
ax.imshow(
|
||||
spectrogram_data,
|
||||
norm=matplotlib.colors.LogNorm(),
|
||||
cmap='inferno',
|
||||
aspect='auto',
|
||||
extent=[speeds[0], speeds[-1], angles[0], angles[-1]],
|
||||
origin='lower',
|
||||
interpolation='antialiased',
|
||||
)
|
||||
|
||||
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
||||
if peaks is not None and len(peaks) > 0:
|
||||
for idx, peak in enumerate(peaks):
|
||||
ax.axvline(speeds[peak], color='cyan', linewidth=0.75)
|
||||
ax.annotate(
|
||||
f'Peak {idx+1}',
|
||||
(speeds[peak], angles[-1] * 0.9),
|
||||
textcoords='data',
|
||||
color='cyan',
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='top',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_motor_config_txt(fig, motors, differences):
|
||||
motor_details = [(motors[0], 'X motor'), (motors[1], 'Y motor')]
|
||||
|
||||
distance = 0.12
|
||||
if motors[0].get_property('autotune_enabled'):
|
||||
distance = 0.24
|
||||
config_blocks = [
|
||||
f"| {lbl}: {mot.get_property('motor').upper()} on {mot.get_property('tmc').upper()} @ {mot.get_property('voltage')}V {mot.get_property('run_current')}A"
|
||||
for mot, lbl in motor_details
|
||||
]
|
||||
config_blocks.append('| TMC Autotune enabled')
|
||||
else:
|
||||
config_blocks = [
|
||||
f"| {lbl}: {mot.get_property('tmc').upper()} @ {mot.get_property('run_current')}A"
|
||||
for mot, lbl in motor_details
|
||||
]
|
||||
config_blocks.append('| TMC Autotune not detected')
|
||||
|
||||
for idx, block in enumerate(config_blocks):
|
||||
fig.text(
|
||||
0.40, 0.990 - 0.015 * idx, block, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple']
|
||||
)
|
||||
|
||||
tmc_registers = motors[0].get_registers()
|
||||
idx = -1
|
||||
for idx, (register, settings) in enumerate(tmc_registers.items()):
|
||||
settings_str = ' '.join(f'{k}={v}' for k, v in settings.items())
|
||||
tmc_block = f'| {register.upper()}: {settings_str}'
|
||||
fig.text(
|
||||
0.40 + distance,
|
||||
0.990 - 0.015 * idx,
|
||||
tmc_block,
|
||||
ha='left',
|
||||
va='top',
|
||||
fontsize=10,
|
||||
color=KLIPPAIN_COLORS['dark_purple'],
|
||||
)
|
||||
|
||||
if differences is not None:
|
||||
differences_text = f'| Y motor diff: {differences}'
|
||||
fig.text(
|
||||
0.40 + distance,
|
||||
0.990 - 0.015 * (idx + 1),
|
||||
differences_text,
|
||||
ha='left',
|
||||
va='top',
|
||||
fontsize=10,
|
||||
color=KLIPPAIN_COLORS['dark_purple'],
|
||||
)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def extract_angle_and_speed(logname):
|
||||
try:
|
||||
match = re.search(r'an(\d+)_\d+sp(\d+)_\d+', os.path.basename(logname))
|
||||
if match:
|
||||
angle = match.group(1)
|
||||
speed = match.group(2)
|
||||
else:
|
||||
raise ValueError(f'File {logname} does not match expected format. Clean your /tmp folder and start again!')
|
||||
except AttributeError as err:
|
||||
raise ValueError(
|
||||
f'File {logname} does not match expected format. Clean your /tmp folder and start again!'
|
||||
) from err
|
||||
return float(angle), float(speed)
|
||||
|
||||
|
||||
def vibrations_profile(
|
||||
lognames, klipperdir='~/klipper', kinematics='cartesian', accel=None, max_freq=1000.0, st_version=None, motors=None
|
||||
):
|
||||
global shaper_calibrate
|
||||
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||
|
||||
if kinematics == 'cartesian':
|
||||
main_angles = [0, 90]
|
||||
elif kinematics == 'corexy':
|
||||
main_angles = [45, 135]
|
||||
else:
|
||||
raise ValueError('Only Cartesian and CoreXY kinematics are supported by this tool at the moment!')
|
||||
|
||||
psds = defaultdict(lambda: defaultdict(list))
|
||||
psds_sum = defaultdict(lambda: defaultdict(list))
|
||||
target_freqs_initialized = False
|
||||
|
||||
for logname in lognames:
|
||||
data = parse_log(logname)
|
||||
if data is None:
|
||||
continue # File is not in the expected format, skip it
|
||||
angle, speed = extract_angle_and_speed(logname)
|
||||
freq_response = calc_freq_response(data)
|
||||
first_freqs = freq_response.freq_bins
|
||||
psd_sum = freq_response.psd_sum
|
||||
|
||||
if not target_freqs_initialized:
|
||||
target_freqs = first_freqs[first_freqs <= max_freq]
|
||||
target_freqs_initialized = True
|
||||
|
||||
psd_sum = psd_sum[first_freqs <= max_freq]
|
||||
first_freqs = first_freqs[first_freqs <= max_freq]
|
||||
|
||||
# Store the interpolated PSD and integral values
|
||||
psds[angle][speed] = np.interp(target_freqs, first_freqs, psd_sum)
|
||||
psds_sum[angle][speed] = np.trapz(psd_sum, first_freqs)
|
||||
|
||||
measured_angles = sorted(psds_sum.keys())
|
||||
measured_speeds = sorted({speed for angle_speeds in psds_sum.values() for speed in angle_speeds.keys()})
|
||||
|
||||
for main_angle in main_angles:
|
||||
if main_angle not in measured_angles:
|
||||
raise ValueError('Measurements not taken at the correct angles for the specified kinematics!')
|
||||
|
||||
# Precompute the variables used in plot functions
|
||||
all_angles, all_speeds, spectrogram_data = compute_dir_speed_spectrogram(
|
||||
measured_speeds, psds_sum, kinematics, main_angles
|
||||
)
|
||||
all_angles_energy = compute_angle_powers(spectrogram_data)
|
||||
sp_min_energy, sp_max_energy, sp_variance_energy, vibration_metric = compute_speed_powers(spectrogram_data)
|
||||
motor_profiles, global_motor_profile = compute_motor_profiles(target_freqs, psds, all_angles_energy, main_angles)
|
||||
|
||||
# symmetry_factor = compute_symmetry_analysis(all_angles, all_angles_energy)
|
||||
symmetry_factor = compute_symmetry_analysis(all_angles, spectrogram_data, main_angles)
|
||||
ConsoleOutput.print(f'Machine estimated vibration symmetry: {symmetry_factor:.1f}%')
|
||||
|
||||
# Analyze low variance ranges of vibration energy across all angles for each speed to identify clean speeds
|
||||
# and highlight them. Also find the peaks to identify speeds to avoid due to high resonances
|
||||
num_peaks, vibration_peaks, peaks_speeds = detect_peaks(
|
||||
vibration_metric,
|
||||
all_speeds,
|
||||
PEAKS_DETECTION_THRESHOLD * vibration_metric.max(),
|
||||
PEAKS_RELATIVE_HEIGHT_THRESHOLD,
|
||||
10,
|
||||
10,
|
||||
)
|
||||
formated_peaks_speeds = ['{:.1f}'.format(pspeed) for pspeed in peaks_speeds]
|
||||
ConsoleOutput.print(
|
||||
'Vibrations peaks detected: %d @ %s mm/s (avoid setting a speed near these values in your slicer print profile)'
|
||||
% (num_peaks, ', '.join(map(str, formated_peaks_speeds)))
|
||||
)
|
||||
|
||||
good_speeds = identify_low_energy_zones(vibration_metric, SPEEDS_VALLEY_DETECTION_THRESHOLD)
|
||||
if good_speeds is not None:
|
||||
deletion_range = int(SPEEDS_AROUND_PEAK_DELETION / (all_speeds[1] - all_speeds[0]))
|
||||
peak_speed_indices = {pspeed: np.where(all_speeds == pspeed)[0][0] for pspeed in set(peaks_speeds)}
|
||||
|
||||
# Filter and split ranges based on peak indices, avoiding overlaps
|
||||
good_speeds = filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range)
|
||||
|
||||
# Add some logging about the good speeds found
|
||||
ConsoleOutput.print(f'Lowest vibrations speeds ({len(good_speeds)} ranges sorted from best to worse):')
|
||||
for idx, (start, end, _) in enumerate(good_speeds):
|
||||
ConsoleOutput.print(f'{idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s')
|
||||
|
||||
# Angle low energy valleys identification (good angles ranges) and print them to the console
|
||||
good_angles = identify_low_energy_zones(all_angles_energy, ANGLES_VALLEY_DETECTION_THRESHOLD)
|
||||
if good_angles is not None:
|
||||
ConsoleOutput.print(f'Lowest vibrations angles ({len(good_angles)} ranges sorted from best to worse):')
|
||||
for idx, (start, end, energy) in enumerate(good_angles):
|
||||
ConsoleOutput.print(
|
||||
f'{idx+1}: {all_angles[start]:.1f}° to {all_angles[end]:.1f}° (mean vibrations energy: {energy:.2f}% of max)'
|
||||
)
|
||||
|
||||
# Create graph layout
|
||||
fig, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots(
|
||||
2,
|
||||
3,
|
||||
gridspec_kw={
|
||||
'height_ratios': [1, 1],
|
||||
'width_ratios': [4, 8, 6],
|
||||
'bottom': 0.050,
|
||||
'top': 0.890,
|
||||
'left': 0.040,
|
||||
'right': 0.985,
|
||||
'hspace': 0.166,
|
||||
'wspace': 0.138,
|
||||
},
|
||||
)
|
||||
|
||||
# Transform ax3 and ax4 to polar plots
|
||||
ax1.remove()
|
||||
ax1 = fig.add_subplot(2, 3, 1, projection='polar')
|
||||
ax4.remove()
|
||||
ax4 = fig.add_subplot(2, 3, 4, projection='polar')
|
||||
|
||||
# Set the global .png figure size
|
||||
fig.set_size_inches(20, 11.5)
|
||||
|
||||
# Add title
|
||||
title_line1 = 'MACHINE VIBRATIONS ANALYSIS TOOL'
|
||||
fig.text(
|
||||
0.060, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||
dt = datetime.strptime(f"{filename_parts[1]} {filename_parts[2].split('-')[0]}", '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X')
|
||||
if accel is not None:
|
||||
title_line2 += ' at ' + str(accel) + ' mm/s² -- ' + kinematics.upper() + ' kinematics'
|
||||
except Exception:
|
||||
ConsoleOutput.print('Warning: CSV filenames appear to be different than expected (%s)' % (lognames[0]))
|
||||
title_line2 = lognames[0].split('/')[-1]
|
||||
fig.text(0.060, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Add the motors infos to the top of the graph
|
||||
if motors is not None and len(motors) == 2:
|
||||
differences = motors[0].compare_to(motors[1])
|
||||
plot_motor_config_txt(fig, motors, differences)
|
||||
if differences is not None and kinematics == 'corexy':
|
||||
ConsoleOutput.print(f'Warning: motors have different TMC configurations!\n{differences}')
|
||||
|
||||
# Plot the graphs
|
||||
plot_angle_profile_polar(ax1, all_angles, all_angles_energy, good_angles, symmetry_factor)
|
||||
plot_vibration_spectrogram_polar(ax4, all_angles, all_speeds, spectrogram_data)
|
||||
|
||||
plot_global_speed_profile(
|
||||
ax2,
|
||||
all_speeds,
|
||||
sp_min_energy,
|
||||
sp_max_energy,
|
||||
sp_variance_energy,
|
||||
vibration_metric,
|
||||
num_peaks,
|
||||
vibration_peaks,
|
||||
good_speeds,
|
||||
)
|
||||
plot_angular_speed_profiles(ax3, all_speeds, all_angles, spectrogram_data, kinematics)
|
||||
plot_vibration_spectrogram(ax5, all_angles, all_speeds, spectrogram_data, vibration_peaks)
|
||||
|
||||
plot_motor_profiles(ax6, target_freqs, main_angles, motor_profiles, global_motor_profile, max_freq)
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <raw logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option(
|
||||
'-c', '--accel', type='int', dest='accel', default=None, help='accel value to be printed on the graph'
|
||||
)
|
||||
opts.add_option('-f', '--max_freq', type='float', default=1000.0, help='maximum frequency to graph')
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
opts.add_option(
|
||||
'-m',
|
||||
'--kinematics',
|
||||
type='string',
|
||||
dest='kinematics',
|
||||
default='cartesian',
|
||||
help='machine kinematics configuration',
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('No CSV file(s) to analyse')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
if options.kinematics not in ['cartesian', 'corexy']:
|
||||
opts.error('Only cartesian and corexy kinematics are supported by this tool at the moment!')
|
||||
|
||||
fig = vibrations_profile(args, options.klipperdir, options.kinematics, options.accel, options.max_freq)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
BIN
shaketune/graph_creators/klippain.png
Normal file
BIN
shaketune/graph_creators/klippain.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 607 KiB |
0
shaketune/helpers/__init__.py
Normal file
0
shaketune/helpers/__init__.py
Normal file
257
shaketune/helpers/common_func.py
Normal file
257
shaketune/helpers/common_func.py
Normal file
@@ -0,0 +1,257 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Common functions for the Shake&Tune package
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import math
|
||||
import os
|
||||
import sys
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
from scipy.signal import spectrogram
|
||||
from .console_output import ConsoleOutput
|
||||
|
||||
|
||||
def parse_log(logname):
|
||||
try:
|
||||
with open(logname) as f:
|
||||
header = None
|
||||
for line in f:
|
||||
cleaned_line = line.strip()
|
||||
|
||||
# Check for a PSD file generated by Klipper and raise a warning
|
||||
if cleaned_line.startswith('#freq,psd_x,psd_y,psd_z,psd_xyz'):
|
||||
ConsoleOutput.print(
|
||||
'Warning: %s does not contain raw accelerometer data. '
|
||||
'Please use the official Klipper script to process it instead. '
|
||||
'It will be ignored by Shake&Tune!' % (logname,)
|
||||
)
|
||||
return None
|
||||
|
||||
# Check for the expected header for Shake&Tune (raw accelerometer data from Klipper)
|
||||
elif cleaned_line.startswith('#time,accel_x,accel_y,accel_z'):
|
||||
header = cleaned_line
|
||||
break
|
||||
|
||||
if not header:
|
||||
ConsoleOutput.print(
|
||||
'Warning: file %s has an incorrect header and will be ignored by Shake&Tune!\n'
|
||||
"Expected '#time,accel_x,accel_y,accel_z', but got '%s'." % (logname, header.strip())
|
||||
)
|
||||
return None
|
||||
|
||||
# If we have the correct raw data header, proceed to load the data
|
||||
data = np.loadtxt(logname, comments='#', delimiter=',', skiprows=1)
|
||||
if data.ndim == 1 or data.shape[1] != 4:
|
||||
ConsoleOutput.print(
|
||||
'Warning: %s does not have the correct data format; expected 4 columns. '
|
||||
'It will be ignored by Shake&Tune!' % (logname,)
|
||||
)
|
||||
return None
|
||||
|
||||
return data
|
||||
|
||||
except Exception as err:
|
||||
ConsoleOutput.print(f'Error while reading {logname}: {err}. It will be ignored by Shake&Tune!')
|
||||
return None
|
||||
|
||||
|
||||
def setup_klipper_import(kdir):
|
||||
kdir = os.path.expanduser(kdir)
|
||||
sys.path.append(os.path.join(kdir, 'klippy'))
|
||||
return import_module('.shaper_calibrate', 'extras')
|
||||
|
||||
|
||||
# This is used to print the current S&T version on top of the png graph file
|
||||
def get_git_version():
|
||||
try:
|
||||
# Get the absolute path of the script, resolving any symlinks
|
||||
# Then get 2 times to parent dir to be at the git root folder
|
||||
from git import GitCommandError, Repo
|
||||
script_path = Path(__file__).resolve()
|
||||
repo_path = script_path.parents[1]
|
||||
repo = Repo(repo_path)
|
||||
|
||||
try:
|
||||
version = repo.git.describe('--tags')
|
||||
except GitCommandError:
|
||||
# If no tag is found, use the simplified commit SHA instead
|
||||
version = repo.head.commit.hexsha[:7]
|
||||
return version
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# This is Klipper's spectrogram generation function adapted to use Scipy
|
||||
def compute_spectrogram(data):
|
||||
N = data.shape[0]
|
||||
Fs = N / (data[-1, 0] - data[0, 0])
|
||||
# Round up to a power of 2 for faster FFT
|
||||
M = 1 << int(0.5 * Fs - 1).bit_length()
|
||||
window = np.kaiser(M, 6.0)
|
||||
|
||||
def _specgram(x):
|
||||
return spectrogram(
|
||||
x, fs=Fs, window=window, nperseg=M, noverlap=M // 2, detrend='constant', scaling='density', mode='psd'
|
||||
)
|
||||
|
||||
d = {'x': data[:, 1], 'y': data[:, 2], 'z': data[:, 3]}
|
||||
f, t, pdata = _specgram(d['x'])
|
||||
for axis in 'yz':
|
||||
pdata += _specgram(d[axis])[2]
|
||||
return pdata, t, f
|
||||
|
||||
|
||||
# Compute natural resonant frequency and damping ratio by using the half power bandwidth method with interpolated frequencies
|
||||
def compute_mechanical_parameters(psd, freqs, min_freq=None):
|
||||
max_under_min_freq = False
|
||||
|
||||
if min_freq is not None:
|
||||
min_freq_index = np.searchsorted(freqs, min_freq, side='left')
|
||||
if min_freq_index >= len(freqs):
|
||||
return None, None, None, max_under_min_freq
|
||||
if np.argmax(psd) < min_freq_index:
|
||||
max_under_min_freq = True
|
||||
else:
|
||||
min_freq_index = 0
|
||||
|
||||
# Consider only the part of the signal above min_freq
|
||||
psd_above_min_freq = psd[min_freq_index:]
|
||||
if len(psd_above_min_freq) == 0:
|
||||
return None, None, None, max_under_min_freq
|
||||
|
||||
max_power_index_above_min_freq = np.argmax(psd_above_min_freq)
|
||||
max_power_index = max_power_index_above_min_freq + min_freq_index
|
||||
fr = freqs[max_power_index]
|
||||
max_power = psd[max_power_index]
|
||||
|
||||
half_power = max_power / math.sqrt(2)
|
||||
indices_below = np.where(psd[:max_power_index] <= half_power)[0]
|
||||
indices_above = np.where(psd[max_power_index:] <= half_power)[0]
|
||||
|
||||
# If we are not able to find points around the half power, we can't compute the damping ratio and return None instead
|
||||
if len(indices_below) == 0 or len(indices_above) == 0:
|
||||
return fr, None, max_power_index, max_under_min_freq
|
||||
|
||||
idx_below = indices_below[-1]
|
||||
idx_above = indices_above[0] + max_power_index
|
||||
freq_below_half_power = freqs[idx_below] + (half_power - psd[idx_below]) * (
|
||||
freqs[idx_below + 1] - freqs[idx_below]
|
||||
) / (psd[idx_below + 1] - psd[idx_below])
|
||||
freq_above_half_power = freqs[idx_above - 1] + (half_power - psd[idx_above - 1]) * (
|
||||
freqs[idx_above] - freqs[idx_above - 1]
|
||||
) / (psd[idx_above] - psd[idx_above - 1])
|
||||
|
||||
bandwidth = freq_above_half_power - freq_below_half_power
|
||||
bw1 = math.pow(bandwidth / fr, 2)
|
||||
bw2 = math.pow(bandwidth / fr, 4)
|
||||
|
||||
try:
|
||||
zeta = math.sqrt(0.5 - math.sqrt(1 / (4 + 4 * bw1 - bw2)))
|
||||
except ValueError:
|
||||
# If a math problem arise such as a negative sqrt term, we also return None instead for damping ratio
|
||||
return fr, None, max_power_index, max_under_min_freq
|
||||
|
||||
return fr, zeta, max_power_index, max_under_min_freq
|
||||
|
||||
|
||||
# This find all the peaks in a curve by looking at when the derivative term goes from positive to negative
|
||||
# Then only the peaks found above a threshold are kept to avoid capturing peaks in the low amplitude noise of a signal
|
||||
def detect_peaks(data, indices, detection_threshold, relative_height_threshold=None, window_size=5, vicinity=3):
|
||||
# Smooth the curve using a moving average to avoid catching peaks everywhere in noisy signals
|
||||
kernel = np.ones(window_size) / window_size
|
||||
smoothed_data = np.convolve(data, kernel, mode='valid')
|
||||
mean_pad = [np.mean(data[:window_size])] * (window_size // 2)
|
||||
smoothed_data = np.concatenate((mean_pad, smoothed_data))
|
||||
|
||||
# Find peaks on the smoothed curve
|
||||
smoothed_peaks = (
|
||||
np.where((smoothed_data[:-2] < smoothed_data[1:-1]) & (smoothed_data[1:-1] > smoothed_data[2:]))[0] + 1
|
||||
)
|
||||
smoothed_peaks = smoothed_peaks[smoothed_data[smoothed_peaks] > detection_threshold]
|
||||
|
||||
# Additional validation for peaks based on relative height
|
||||
valid_peaks = smoothed_peaks
|
||||
if relative_height_threshold is not None:
|
||||
valid_peaks = []
|
||||
for peak in smoothed_peaks:
|
||||
peak_height = smoothed_data[peak] - np.min(
|
||||
smoothed_data[max(0, peak - vicinity) : min(len(smoothed_data), peak + vicinity + 1)]
|
||||
)
|
||||
if peak_height > relative_height_threshold * smoothed_data[peak]:
|
||||
valid_peaks.append(peak)
|
||||
|
||||
# Refine peak positions on the original curve
|
||||
refined_peaks = []
|
||||
for peak in valid_peaks:
|
||||
local_max = peak + np.argmax(data[max(0, peak - vicinity) : min(len(data), peak + vicinity + 1)]) - vicinity
|
||||
refined_peaks.append(local_max)
|
||||
|
||||
num_peaks = len(refined_peaks)
|
||||
|
||||
return num_peaks, np.array(refined_peaks), indices[refined_peaks]
|
||||
|
||||
|
||||
# The goal is to find zone outside of peaks (flat low energy zones) in a signal
|
||||
def identify_low_energy_zones(power_total, detection_threshold=0.1):
|
||||
valleys = []
|
||||
|
||||
# Calculate the a "mean + 1/4" and standard deviation of the entire power_total
|
||||
mean_energy = np.mean(power_total) + (np.max(power_total) - np.min(power_total)) / 4
|
||||
std_energy = np.std(power_total)
|
||||
|
||||
# Define a threshold value as "mean + 1/4" minus a certain number of standard deviations
|
||||
threshold_value = mean_energy - detection_threshold * std_energy
|
||||
|
||||
# Find valleys in power_total based on the threshold
|
||||
in_valley = False
|
||||
start_idx = 0
|
||||
for i, value in enumerate(power_total):
|
||||
if not in_valley and value < threshold_value:
|
||||
in_valley = True
|
||||
start_idx = i
|
||||
elif in_valley and value >= threshold_value:
|
||||
in_valley = False
|
||||
valleys.append((start_idx, i))
|
||||
|
||||
# If the last point is still in a valley, close the valley
|
||||
if in_valley:
|
||||
valleys.append((start_idx, len(power_total) - 1))
|
||||
|
||||
max_signal = np.max(power_total)
|
||||
|
||||
# Calculate mean energy for each valley as a percentage of the maximum of the signal
|
||||
valley_means_percentage = []
|
||||
for start, end in valleys:
|
||||
if not np.isnan(np.mean(power_total[start:end])):
|
||||
valley_means_percentage.append((start, end, (np.mean(power_total[start:end]) / max_signal) * 100))
|
||||
|
||||
# Sort valleys based on mean percentage values
|
||||
sorted_valleys = sorted(valley_means_percentage, key=lambda x: x[2])
|
||||
|
||||
return sorted_valleys
|
||||
|
||||
|
||||
# Calculate or estimate a "similarity" factor between two PSD curves and scale it to a percentage. This is
|
||||
# used here to quantify how close the two belts path behavior and responses are close together.
|
||||
def compute_curve_similarity_factor(x1, y1, x2, y2, sim_sigmoid_k=0.6):
|
||||
# Interpolate PSDs to match the same frequency bins and do a cross-correlation
|
||||
y2_interp = np.interp(x1, x2, y2)
|
||||
cross_corr = np.correlate(y1, y2_interp, mode='full')
|
||||
|
||||
# Find the peak of the cross-correlation and compute a similarity normalized by the energy of the signals
|
||||
peak_value = np.max(cross_corr)
|
||||
similarity = peak_value / (np.sqrt(np.sum(y1**2) * np.sum(y2_interp**2)))
|
||||
|
||||
# Apply sigmoid scaling to get better numbers and get a final percentage value
|
||||
scaled_similarity = sigmoid_scale(-np.log(1 - similarity), sim_sigmoid_k)
|
||||
|
||||
return scaled_similarity
|
||||
|
||||
|
||||
# Simple helper to compute a sigmoid scalling (from 0 to 100%)
|
||||
def sigmoid_scale(x, k=1):
|
||||
return 1 / (1 + np.exp(-k * x)) * 100
|
||||
24
shaketune/helpers/console_output.py
Normal file
24
shaketune/helpers/console_output.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import io
|
||||
from typing import Callable, Optional
|
||||
|
||||
|
||||
class ConsoleOutput:
|
||||
"""
|
||||
Print output to stdout or to an alternative like the Klipper console through a callback
|
||||
"""
|
||||
|
||||
_output_func: Optional[Callable[[str], None]] = None
|
||||
|
||||
@classmethod
|
||||
def register_output_callback(cls, output_func: Optional[Callable[[str], None]]):
|
||||
cls._output_func = output_func
|
||||
|
||||
@classmethod
|
||||
def print(cls, *args, **kwargs):
|
||||
if not cls._output_func:
|
||||
print(*args, **kwargs)
|
||||
return
|
||||
|
||||
with io.StringIO() as mem_output:
|
||||
print(*args, file=mem_output, **kwargs)
|
||||
cls._output_func(mem_output.getvalue())
|
||||
38
shaketune/helpers/filemanager.py
Normal file
38
shaketune/helpers/filemanager.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Common file management functions for the Shake&Tune package
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def wait_file_ready(filepath: Path, timeout: int = 60) -> None:
|
||||
file_busy = True
|
||||
loop_count = 0
|
||||
|
||||
while file_busy:
|
||||
if loop_count >= timeout:
|
||||
raise TimeoutError(f'Klipper is taking too long to release the CSV file ({filepath})!')
|
||||
|
||||
# Try to open the file in write-only mode to check if it is in use
|
||||
# If we successfully open and close the file, it is not in use
|
||||
try:
|
||||
fd = os.open(filepath, os.O_WRONLY)
|
||||
os.close(fd)
|
||||
file_busy = False
|
||||
except OSError:
|
||||
# If OSError is caught, it indicates the file is still being used
|
||||
pass
|
||||
except Exception:
|
||||
# If another exception is raised, it's not a problem, we just loop again
|
||||
pass
|
||||
|
||||
loop_count += 1
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def ensure_folders_exist(folders: list[Path]) -> None:
|
||||
for folder in folders:
|
||||
folder.mkdir(parents=True, exist_ok=True)
|
||||
205
shaketune/helpers/motorlogparser.py
Normal file
205
shaketune/helpers/motorlogparser.py
Normal file
@@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Classes to parse the Klipper log and parse the TMC dump to extract the relevant information
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
|
||||
class Motor:
|
||||
def __init__(self, name: str):
|
||||
self._name: str = name
|
||||
self._registers: Dict[str, Dict[str, Any]] = {}
|
||||
self._properties: Dict[str, Any] = {}
|
||||
|
||||
def set_register(self, register: str, value: Any) -> None:
|
||||
# Special parsing for CHOPCONF to extract meaningful values
|
||||
if register == 'CHOPCONF':
|
||||
# Add intpol=0 if missing from the register dump
|
||||
if 'intpol=' not in value:
|
||||
value += ' intpol=0'
|
||||
# Simplify the microstep resolution format
|
||||
mres_match = re.search(r'mres=\d+\((\d+)usteps\)', value)
|
||||
if mres_match:
|
||||
value = re.sub(r'mres=\d+\(\d+usteps\)', f'mres={mres_match.group(1)}', value)
|
||||
|
||||
# Special parsing for CHOPCONF to avoid pwm_ before each values
|
||||
if register == 'PWMCONF':
|
||||
parts = value.split()
|
||||
new_parts = []
|
||||
for part in parts:
|
||||
key, val = part.split('=', 1)
|
||||
if key.startswith('pwm_'):
|
||||
key = key[4:]
|
||||
new_parts.append(f'{key}={val}')
|
||||
value = ' '.join(new_parts)
|
||||
|
||||
# General cleaning to remove extraneous labels and colons and parse the whole into Motor _registers
|
||||
cleaned_values = re.sub(r'\b\w+:\s+\S+\s+', '', value)
|
||||
|
||||
# Then fill the registers while merging all the thresholds into the same THRS virtual register
|
||||
if register in ['TPWMTHRS', 'TCOOLTHRS']:
|
||||
existing_thrs = self._registers.get('THRS', {})
|
||||
new_values = self._parse_register_values(cleaned_values)
|
||||
merged_values = {**existing_thrs, **new_values}
|
||||
self._registers['THRS'] = merged_values
|
||||
else:
|
||||
self._registers[register] = self._parse_register_values(cleaned_values)
|
||||
|
||||
def _parse_register_values(self, register_string: str) -> Dict[str, Any]:
|
||||
parsed = {}
|
||||
parts = register_string.split()
|
||||
for part in parts:
|
||||
if '=' in part:
|
||||
k, v = part.split('=', 1)
|
||||
parsed[k] = v
|
||||
return parsed
|
||||
|
||||
def get_register(self, register: str) -> Optional[Dict[str, Any]]:
|
||||
return self._registers.get(register)
|
||||
|
||||
def get_registers(self) -> Dict[str, Dict[str, Any]]:
|
||||
return self._registers
|
||||
|
||||
def set_property(self, property: str, value: Any) -> None:
|
||||
self._properties[property] = value
|
||||
|
||||
def get_property(self, property: str) -> Optional[Any]:
|
||||
return self._properties.get(property)
|
||||
|
||||
def __str__(self):
|
||||
return f'Stepper: {self._name}\nKlipper config: {self._properties}\nTMC Registers: {self._registers}'
|
||||
|
||||
# Return the other motor properties and registers that are different from the current motor
|
||||
def compare_to(self, other: 'Motor') -> Optional[Dict[str, Dict[str, Any]]]:
|
||||
differences = {'properties': {}, 'registers': {}}
|
||||
|
||||
# Compare properties
|
||||
all_keys = self._properties.keys() | other._properties.keys()
|
||||
for key in all_keys:
|
||||
val1 = self._properties.get(key)
|
||||
val2 = other._properties.get(key)
|
||||
if val1 != val2:
|
||||
differences['properties'][key] = val2
|
||||
|
||||
# Compare registers
|
||||
all_keys = self._registers.keys() | other._registers.keys()
|
||||
for key in all_keys:
|
||||
reg1 = self._registers.get(key, {})
|
||||
reg2 = other._registers.get(key, {})
|
||||
if reg1 != reg2:
|
||||
reg_diffs = {}
|
||||
sub_keys = reg1.keys() | reg2.keys()
|
||||
for sub_key in sub_keys:
|
||||
reg_val1 = reg1.get(sub_key)
|
||||
reg_val2 = reg2.get(sub_key)
|
||||
if reg_val1 != reg_val2:
|
||||
reg_diffs[sub_key] = reg_val2
|
||||
if reg_diffs:
|
||||
differences['registers'][key] = reg_diffs
|
||||
|
||||
# Clean up: remove empty sections if there are no differences
|
||||
if not differences['properties']:
|
||||
del differences['properties']
|
||||
if not differences['registers']:
|
||||
del differences['registers']
|
||||
|
||||
if not differences:
|
||||
return None
|
||||
|
||||
return differences
|
||||
|
||||
|
||||
class MotorLogParser:
|
||||
_section_pattern: str = r'DUMP_TMC stepper_(x|y)'
|
||||
_register_patterns: Dict[str, str] = {
|
||||
'CHOPCONF': r'CHOPCONF:\s+\S+\s+(.*)',
|
||||
'PWMCONF': r'PWMCONF:\s+\S+\s+(.*)',
|
||||
'COOLCONF': r'COOLCONF:\s+(.*)',
|
||||
'TPWMTHRS': r'TPWMTHRS:\s+\S+\s+(.*)',
|
||||
'TCOOLTHRS': r'TCOOLTHRS:\s+\S+\s+(.*)',
|
||||
}
|
||||
|
||||
def __init__(self, filepath: Path, config_string: Optional[str] = None):
|
||||
self._filepath = filepath
|
||||
|
||||
self._motors: List[Motor] = []
|
||||
self._config = self._parse_config(config_string) if config_string else {}
|
||||
|
||||
self._parse_registers()
|
||||
|
||||
def _parse_config(self, config_string: str) -> Dict[str, Any]:
|
||||
config = {}
|
||||
entries = config_string.split('|')
|
||||
for entry in entries:
|
||||
if entry:
|
||||
key, value = entry.split(':')
|
||||
config[key.strip()] = self._convert_value(value.strip())
|
||||
return config
|
||||
|
||||
def _convert_value(self, value: str) -> Union[int, float, bool, str]:
|
||||
if value.isdigit():
|
||||
return int(value)
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
if value.lower() in ['true', 'false']:
|
||||
return value.lower() == 'true'
|
||||
return value
|
||||
|
||||
def _parse_registers(self) -> None:
|
||||
with open(self._filepath, 'r') as file:
|
||||
log_content = file.read()
|
||||
|
||||
sections = re.split(self._section_pattern, log_content)
|
||||
|
||||
# Detect only the latest dumps from the log (to ignore potential previous and outdated dumps)
|
||||
last_sections: Dict[str, int] = {}
|
||||
for i in range(1, len(sections), 2):
|
||||
stepper_name = 'stepper_' + sections[i].strip()
|
||||
last_sections[stepper_name] = i
|
||||
|
||||
for stepper_name, index in last_sections.items():
|
||||
content = sections[index + 1]
|
||||
motor = Motor(stepper_name)
|
||||
|
||||
# Apply general properties from config string
|
||||
for key, value in self._config.items():
|
||||
if stepper_name in key:
|
||||
prop_key = key.replace(stepper_name + '_', '')
|
||||
motor.set_property(prop_key, value)
|
||||
elif 'autotune' in key:
|
||||
motor.set_property(key, value)
|
||||
|
||||
# Parse TMC registers
|
||||
for key, pattern in self._register_patterns.items():
|
||||
match = re.search(pattern, content)
|
||||
if match:
|
||||
values = match.group(1).strip()
|
||||
motor.set_register(key, values)
|
||||
|
||||
self._motors.append(motor)
|
||||
|
||||
# Find and return the motor by its name
|
||||
def get_motor(self, motor_name: str) -> Optional[Motor]:
|
||||
for motor in self._motors:
|
||||
if motor._name == motor_name:
|
||||
return motor
|
||||
return None
|
||||
|
||||
# Get all the motor list at once
|
||||
def get_motors(self) -> List[Motor]:
|
||||
return self._motors
|
||||
|
||||
|
||||
# # Usage example:
|
||||
# config_string = "stepper_x_tmc:tmc2240|stepper_x_run_current:0.9|stepper_x_hold_current:0.9|stepper_y_tmc:tmc2240|stepper_y_run_current:0.9|stepper_y_hold_current:0.9|autotune_enabled:True|stepper_x_motor:ldo-35sth48-1684ah|stepper_x_voltage:|stepper_y_motor:ldo-35sth48-1684ah|stepper_y_voltage:|"
|
||||
# parser = MotorLogParser('/path/to/your/logfile.log', config_string)
|
||||
|
||||
# stepper_x = parser.get_motor('stepper_x')
|
||||
# stepper_y = parser.get_motor('stepper_y')
|
||||
|
||||
# print(stepper_x)
|
||||
# print(stepper_y)
|
||||
Reference in New Issue
Block a user