Code cleanup before release (#114)
This commit is contained in:
8
shaketune/graph_creators/__init__.py
Normal file
8
shaketune/graph_creators/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from .axes_map_graph_creator import AxesMapGraphCreator as AxesMapGraphCreator
|
||||
from .belts_graph_creator import BeltsGraphCreator as BeltsGraphCreator
|
||||
from .graph_creator import GraphCreator as GraphCreator
|
||||
from .shaper_graph_creator import ShaperGraphCreator as ShaperGraphCreator
|
||||
from .static_graph_creator import StaticGraphCreator as StaticGraphCreator
|
||||
from .vibrations_graph_creator import VibrationsGraphCreator as VibrationsGraphCreator
|
||||
481
shaketune/graph_creators/axes_map_graph_creator.py
Normal file
481
shaketune/graph_creators/axes_map_graph_creator.py
Normal file
@@ -0,0 +1,481 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
######################################
|
||||
###### AXE_MAP DETECTION SCRIPT ######
|
||||
######################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import optparse
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.colors
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
import pywt
|
||||
from scipy import stats
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import parse_log
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
from ..shaketune_config import ShakeTuneConfig
|
||||
from .graph_creator import GraphCreator
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
MACHINE_AXES = ['x', 'y', 'z']
|
||||
|
||||
|
||||
class AxesMapGraphCreator(GraphCreator):
|
||||
def __init__(self, config: ShakeTuneConfig):
|
||||
super().__init__(config, 'axes map')
|
||||
self._accel: Optional[int] = None
|
||||
self._segment_length: Optional[float] = None
|
||||
|
||||
def configure(self, accel: int, segment_length: float) -> None:
|
||||
self._accel = accel
|
||||
self._segment_length = segment_length
|
||||
|
||||
def create_graph(self) -> None:
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='shaketune-axesmap_*.csv',
|
||||
min_files_required=3,
|
||||
custom_name_func=lambda f: f.stem.split('_')[1].upper(),
|
||||
)
|
||||
fig = axesmap_calibration(
|
||||
lognames=[str(path) for path in lognames],
|
||||
accel=self._accel,
|
||||
fixed_length=self._segment_length,
|
||||
st_version=self._version,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames)
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
if len(files) <= keep_results:
|
||||
return # No need to delete any files
|
||||
for old_file in files[keep_results:]:
|
||||
file_date = '_'.join(old_file.stem.split('_')[1:3])
|
||||
for suffix in ['X', 'Y', 'Z']:
|
||||
csv_file = self._folder / f'axesmap_{file_date}_{suffix}.csv'
|
||||
csv_file.unlink(missing_ok=True)
|
||||
old_file.unlink()
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation
|
||||
######################################################################
|
||||
|
||||
|
||||
def wavelet_denoise(data: np.ndarray, wavelet: str = 'db1', level: int = 1) -> Tuple[np.ndarray, np.ndarray]:
|
||||
coeffs = pywt.wavedec(data, wavelet, mode='smooth')
|
||||
threshold = np.median(np.abs(coeffs[-level])) / 0.6745 * np.sqrt(2 * np.log(len(data)))
|
||||
new_coeffs = [pywt.threshold(c, threshold, mode='soft') for c in coeffs]
|
||||
denoised_data = pywt.waverec(new_coeffs, wavelet)
|
||||
|
||||
# Compute noise by subtracting denoised data from original data
|
||||
noise = data - denoised_data[: len(data)]
|
||||
return denoised_data, noise
|
||||
|
||||
|
||||
def integrate_trapz(accel: np.ndarray, time: np.ndarray) -> np.ndarray:
|
||||
return np.array([np.trapz(accel[:i], time[:i]) for i in range(2, len(time) + 1)])
|
||||
|
||||
|
||||
def process_acceleration_data(
|
||||
time: np.ndarray, accel_x: np.ndarray, accel_y: np.ndarray, accel_z: np.ndarray
|
||||
) -> Tuple[float, float, float, np.ndarray, np.ndarray, np.ndarray, float]:
|
||||
# Calculate the constant offset (gravity component)
|
||||
offset_x = np.mean(accel_x)
|
||||
offset_y = np.mean(accel_y)
|
||||
offset_z = np.mean(accel_z)
|
||||
|
||||
# Remove the constant offset from acceleration data
|
||||
accel_x -= offset_x
|
||||
accel_y -= offset_y
|
||||
accel_z -= offset_z
|
||||
|
||||
# Apply wavelet denoising
|
||||
accel_x, noise_x = wavelet_denoise(accel_x)
|
||||
accel_y, noise_y = wavelet_denoise(accel_y)
|
||||
accel_z, noise_z = wavelet_denoise(accel_z)
|
||||
|
||||
# Integrate acceleration to get velocity using trapezoidal rule
|
||||
velocity_x = integrate_trapz(accel_x, time)
|
||||
velocity_y = integrate_trapz(accel_y, time)
|
||||
velocity_z = integrate_trapz(accel_z, time)
|
||||
|
||||
# Correct drift in velocity by resetting to zero at the beginning and end
|
||||
velocity_x -= np.linspace(velocity_x[0], velocity_x[-1], len(velocity_x))
|
||||
velocity_y -= np.linspace(velocity_y[0], velocity_y[-1], len(velocity_y))
|
||||
velocity_z -= np.linspace(velocity_z[0], velocity_z[-1], len(velocity_z))
|
||||
|
||||
# Integrate velocity to get position using trapezoidal rule
|
||||
position_x = integrate_trapz(velocity_x, time[1:])
|
||||
position_y = integrate_trapz(velocity_y, time[1:])
|
||||
position_z = integrate_trapz(velocity_z, time[1:])
|
||||
|
||||
noise_intensity = np.mean([np.std(noise_x), np.std(noise_y), np.std(noise_z)])
|
||||
|
||||
return offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity
|
||||
|
||||
|
||||
def scale_positions_to_fixed_length(
|
||||
position_x: np.ndarray, position_y: np.ndarray, position_z: np.ndarray, fixed_length: float
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||
# Calculate the total distance traveled in 3D space
|
||||
total_distance = np.sqrt(np.diff(position_x) ** 2 + np.diff(position_y) ** 2 + np.diff(position_z) ** 2).sum()
|
||||
scale_factor = fixed_length / total_distance
|
||||
|
||||
# Apply the scale factor to the positions
|
||||
position_x *= scale_factor
|
||||
position_y *= scale_factor
|
||||
position_z *= scale_factor
|
||||
|
||||
return position_x, position_y, position_z
|
||||
|
||||
|
||||
def find_nearest_perfect_vector(average_direction_vector: np.ndarray) -> Tuple[np.ndarray, float]:
|
||||
# Define the perfect vectors
|
||||
perfect_vectors = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [-1, 0, 0], [0, -1, 0], [0, 0, -1]])
|
||||
|
||||
# Find the nearest perfect vector
|
||||
dot_products = perfect_vectors @ average_direction_vector
|
||||
nearest_vector_idx = np.argmax(dot_products)
|
||||
nearest_vector = perfect_vectors[nearest_vector_idx]
|
||||
|
||||
# Calculate the angle error
|
||||
angle_error = np.arccos(dot_products[nearest_vector_idx]) * 180 / np.pi
|
||||
|
||||
return nearest_vector, angle_error
|
||||
|
||||
|
||||
def linear_regression_direction(
|
||||
position_x: np.ndarray, position_y: np.ndarray, position_z: np.ndarray, trim_length: float = 0.25
|
||||
) -> np.ndarray:
|
||||
# Trim the start and end of the position data to keep only the center of the segment
|
||||
# as the start and stop positions are not always perfectly aligned and can be a bit noisy
|
||||
t = len(position_x)
|
||||
trim_start = int(t * trim_length)
|
||||
trim_end = int(t * (1 - trim_length))
|
||||
position_x = position_x[trim_start:trim_end]
|
||||
position_y = position_y[trim_start:trim_end]
|
||||
position_z = position_z[trim_start:trim_end]
|
||||
|
||||
# Compute the direction vector using linear regression over the position data
|
||||
time = np.arange(len(position_x))
|
||||
slope_x, intercept_x, _, _, _ = stats.linregress(time, position_x)
|
||||
slope_y, intercept_y, _, _, _ = stats.linregress(time, position_y)
|
||||
slope_z, intercept_z, _, _, _ = stats.linregress(time, position_z)
|
||||
end_position = np.array(
|
||||
[slope_x * time[-1] + intercept_x, slope_y * time[-1] + intercept_y, slope_z * time[-1] + intercept_z]
|
||||
)
|
||||
direction_vector = end_position - np.array([intercept_x, intercept_y, intercept_z])
|
||||
direction_vector = direction_vector / np.linalg.norm(direction_vector)
|
||||
return direction_vector
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_compare_frequency(
|
||||
ax: plt.Axes, time: np.ndarray, accel_x: np.ndarray, accel_y: np.ndarray, accel_z: np.ndarray, offset: float, i: int
|
||||
) -> None:
|
||||
# Plot acceleration data
|
||||
ax.plot(
|
||||
time,
|
||||
accel_x,
|
||||
label='X' if i == 0 else '',
|
||||
color=KLIPPAIN_COLORS['purple'],
|
||||
linewidth=0.5,
|
||||
zorder=50 if i == 0 else 10,
|
||||
)
|
||||
ax.plot(
|
||||
time,
|
||||
accel_y,
|
||||
label='Y' if i == 0 else '',
|
||||
color=KLIPPAIN_COLORS['orange'],
|
||||
linewidth=0.5,
|
||||
zorder=50 if i == 1 else 10,
|
||||
)
|
||||
ax.plot(
|
||||
time,
|
||||
accel_z,
|
||||
label='Z' if i == 0 else '',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
linewidth=0.5,
|
||||
zorder=50 if i == 2 else 10,
|
||||
)
|
||||
|
||||
# Setting axis parameters, grid and graph title
|
||||
ax.set_xlabel('Time (s)')
|
||||
ax.set_ylabel('Acceleration (mm/s²)')
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.set_title(
|
||||
'Acceleration (gravity offset removed)',
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['dark_orange'],
|
||||
weight='bold',
|
||||
)
|
||||
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
|
||||
# Add gravity offset to the graph
|
||||
if i == 0:
|
||||
ax2 = ax.twinx() # To split the legends in two box
|
||||
ax2.yaxis.set_visible(False)
|
||||
ax2.plot([], [], ' ', label=f'Measured gravity: {offset / 1000:0.3f} m/s²')
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
|
||||
def plot_3d_path(
|
||||
ax: plt.Axes,
|
||||
i: int,
|
||||
position_x: np.ndarray,
|
||||
position_y: np.ndarray,
|
||||
position_z: np.ndarray,
|
||||
average_direction_vector: np.ndarray,
|
||||
angle_error: float,
|
||||
) -> None:
|
||||
ax.plot(position_x, position_y, position_z, color=KLIPPAIN_COLORS['orange'], linestyle=':', linewidth=2)
|
||||
ax.scatter(position_x[0], position_y[0], position_z[0], color=KLIPPAIN_COLORS['red_pink'], zorder=10)
|
||||
ax.text(
|
||||
position_x[0] + 1,
|
||||
position_y[0],
|
||||
position_z[0],
|
||||
str(i + 1),
|
||||
color='black',
|
||||
fontsize=16,
|
||||
fontweight='bold',
|
||||
zorder=20,
|
||||
)
|
||||
|
||||
# Plot the average direction vector
|
||||
start_position = np.array([position_x[0], position_y[0], position_z[0]])
|
||||
end_position = start_position + average_direction_vector * np.linalg.norm(
|
||||
[position_x[-1] - position_x[0], position_y[-1] - position_y[0], position_z[-1] - position_z[0]]
|
||||
)
|
||||
axes = ['X', 'Y', 'Z']
|
||||
ax.plot(
|
||||
[start_position[0], end_position[0]],
|
||||
[start_position[1], end_position[1]],
|
||||
[start_position[2], end_position[2]],
|
||||
label=f'{axes[i]} angle: {angle_error:0.2f}°',
|
||||
color=KLIPPAIN_COLORS['purple'],
|
||||
linestyle='-',
|
||||
linewidth=2,
|
||||
)
|
||||
|
||||
# Setting axis parameters, grid and graph title
|
||||
ax.set_xlabel('X Position (mm)')
|
||||
ax.set_ylabel('Y Position (mm)')
|
||||
ax.set_zlabel('Z Position (mm)')
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.set_title(
|
||||
'Estimated movement in 3D space',
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['dark_orange'],
|
||||
weight='bold',
|
||||
)
|
||||
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
|
||||
|
||||
def format_direction_vector(vectors: List[np.ndarray]) -> str:
|
||||
formatted_vector = []
|
||||
for vector in vectors:
|
||||
for i in range(len(vector)):
|
||||
if vector[i] > 0:
|
||||
formatted_vector.append(MACHINE_AXES[i])
|
||||
break
|
||||
elif vector[i] < 0:
|
||||
formatted_vector.append(f'-{MACHINE_AXES[i]}')
|
||||
break
|
||||
return ', '.join(formatted_vector)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def axesmap_calibration(
|
||||
lognames: List[str], fixed_length: float, accel: Optional[float] = None, st_version: str = 'unknown'
|
||||
) -> plt.Figure:
|
||||
# Parse data from the log files while ignoring CSV in the wrong format (sorted by axis name)
|
||||
raw_datas = {}
|
||||
for logname in lognames:
|
||||
data = parse_log(logname)
|
||||
if data is not None:
|
||||
_axis = logname.split('_')[-1].split('.')[0].lower()
|
||||
raw_datas[_axis] = data
|
||||
|
||||
if len(raw_datas) != 3:
|
||||
raise ValueError('This tool needs 3 CSVs to work with (like axesmap_X.csv, axesmap_Y.csv and axesmap_Z.csv)')
|
||||
|
||||
fig, ((ax1, ax2)) = plt.subplots(
|
||||
1,
|
||||
2,
|
||||
gridspec_kw={
|
||||
'width_ratios': [5, 3],
|
||||
'bottom': 0.080,
|
||||
'top': 0.840,
|
||||
'left': 0.055,
|
||||
'right': 0.960,
|
||||
'hspace': 0.166,
|
||||
'wspace': 0.060,
|
||||
},
|
||||
)
|
||||
fig.set_size_inches(15, 7)
|
||||
ax2.remove()
|
||||
ax2 = fig.add_subplot(122, projection='3d')
|
||||
|
||||
cumulative_start_position = np.array([0, 0, 0])
|
||||
direction_vectors = []
|
||||
total_noise_intensity = 0.0
|
||||
for i, machine_axis in enumerate(MACHINE_AXES):
|
||||
if machine_axis not in raw_datas:
|
||||
raise ValueError(f'Missing CSV file for axis {machine_axis}')
|
||||
|
||||
# Get the accel data according to the current axes_map
|
||||
time = raw_datas[machine_axis][:, 0]
|
||||
accel_x = raw_datas[machine_axis][:, 1]
|
||||
accel_y = raw_datas[machine_axis][:, 2]
|
||||
accel_z = raw_datas[machine_axis][:, 3]
|
||||
|
||||
offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity = process_acceleration_data(
|
||||
time, accel_x, accel_y, accel_z
|
||||
)
|
||||
position_x, position_y, position_z = scale_positions_to_fixed_length(
|
||||
position_x, position_y, position_z, fixed_length
|
||||
)
|
||||
position_x += cumulative_start_position[0]
|
||||
position_y += cumulative_start_position[1]
|
||||
position_z += cumulative_start_position[2]
|
||||
|
||||
gravity = np.linalg.norm(np.array([offset_x, offset_y, offset_z]))
|
||||
average_direction_vector = linear_regression_direction(position_x, position_y, position_z)
|
||||
direction_vector, angle_error = find_nearest_perfect_vector(average_direction_vector)
|
||||
ConsoleOutput.print(
|
||||
f'Machine axis {machine_axis.upper()} -> nearest accelerometer direction vector: {direction_vector} (angle error: {angle_error:.2f}°)'
|
||||
)
|
||||
direction_vectors.append(direction_vector)
|
||||
|
||||
total_noise_intensity += noise_intensity
|
||||
|
||||
plot_compare_frequency(ax1, time, accel_x, accel_y, accel_z, gravity, i)
|
||||
plot_3d_path(ax2, i, position_x, position_y, position_z, average_direction_vector, angle_error)
|
||||
|
||||
# Update the cumulative start position for the next segment
|
||||
cumulative_start_position = np.array([position_x[-1], position_y[-1], position_z[-1]])
|
||||
|
||||
average_noise_intensity = total_noise_intensity / len(raw_datas)
|
||||
if average_noise_intensity <= 350:
|
||||
average_noise_intensity_text = '-> OK'
|
||||
elif 350 < average_noise_intensity <= 700:
|
||||
average_noise_intensity_text = '-> WARNING: accelerometer noise is a bit high'
|
||||
else:
|
||||
average_noise_intensity_text = '-> ERROR: accelerometer noise is too high!'
|
||||
|
||||
formatted_direction_vector = format_direction_vector(direction_vectors)
|
||||
ConsoleOutput.print(f'--> Detected axes_map: {formatted_direction_vector}')
|
||||
ConsoleOutput.print(
|
||||
f'Average accelerometer noise level: {average_noise_intensity:.2f} mm/s² {average_noise_intensity_text}'
|
||||
)
|
||||
|
||||
# Add title
|
||||
title_line1 = 'AXES MAP CALIBRATION TOOL'
|
||||
fig.text(
|
||||
0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename = lognames[0].split('/')[-1]
|
||||
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X')
|
||||
if accel is not None:
|
||||
title_line2 += f' -- at {accel:0.0f} mm/s²'
|
||||
except Exception:
|
||||
ConsoleOutput.print(
|
||||
'Warning: CSV filenames look to be different than expected (%s , %s, %s)'
|
||||
% (lognames[0], lognames[1], lognames[2])
|
||||
)
|
||||
title_line2 = lognames[0].split('/')[-1] + ' ...'
|
||||
fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
title_line3 = f'| Detected axes_map: {formatted_direction_vector}'
|
||||
title_line4 = f'| Accelerometer noise level: {average_noise_intensity:.2f} mm/s² {average_noise_intensity_text}'
|
||||
fig.text(0.50, 0.985, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.50, 0.950, title_line4, ha='left', va='top', fontsize=11, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <raw logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option(
|
||||
'-a', '--accel', type='string', dest='accel', default=None, help='acceleration value used to do the movements'
|
||||
)
|
||||
opts.add_option(
|
||||
'-l', '--length', type='float', dest='length', default=None, help='recorded length for each segment'
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('No CSV file(s) to analyse')
|
||||
if options.accel is None:
|
||||
opts.error('You must specify the acceleration value used when generating the CSV file (option -a)')
|
||||
try:
|
||||
accel_value = float(options.accel)
|
||||
except ValueError:
|
||||
opts.error('Invalid acceleration value. It should be a numeric value.')
|
||||
if options.length is None:
|
||||
opts.error('You must specify the length of the measured segments (option -l)')
|
||||
try:
|
||||
length_value = float(options.length)
|
||||
except ValueError:
|
||||
opts.error('Invalid length value. It should be a numeric value.')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
|
||||
fig = axesmap_calibration(args, length_value, accel_value, 'unknown')
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
636
shaketune/graph_creators/belts_graph_creator.py
Normal file
636
shaketune/graph_creators/belts_graph_creator.py
Normal file
@@ -0,0 +1,636 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
#################################################
|
||||
######## CoreXY BELTS CALIBRATION SCRIPT ########
|
||||
#################################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import optparse
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import List, NamedTuple, Optional, Tuple
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.colors
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import detect_peaks, parse_log, setup_klipper_import
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
from ..shaketune_config import ShakeTuneConfig
|
||||
from .graph_creator import GraphCreator
|
||||
|
||||
ALPHABET = (
|
||||
'αβγδεζηθικλμνξοπρστυφχψω' # For paired peak names (using the Greek alphabet to avoid confusion with belt names)
|
||||
)
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.1 # Threshold to detect peaks in the PSD signal (10% of max)
|
||||
DC_MAX_PEAKS = 2 # Maximum ideal number of peaks
|
||||
DC_MAX_UNPAIRED_PEAKS_ALLOWED = 0 # No unpaired peaks are tolerated
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
# Define the SignalData type to store the data of a signal (PSD, peaks, etc.)
|
||||
class SignalData(NamedTuple):
|
||||
freqs: np.ndarray
|
||||
psd: np.ndarray
|
||||
peaks: np.ndarray
|
||||
paired_peaks: Optional[List[Tuple[Tuple[int, float, float], Tuple[int, float, float]]]] = None
|
||||
unpaired_peaks: Optional[List[int]] = None
|
||||
|
||||
|
||||
# Define the PeakPairingResult type to store the result of the peak pairing function
|
||||
class PeakPairingResult(NamedTuple):
|
||||
paired_peaks: List[Tuple[Tuple[int, float, float], Tuple[int, float, float]]]
|
||||
unpaired_peaks1: List[int]
|
||||
unpaired_peaks2: List[int]
|
||||
|
||||
|
||||
class BeltsGraphCreator(GraphCreator):
|
||||
def __init__(self, config: ShakeTuneConfig):
|
||||
super().__init__(config, 'belts comparison')
|
||||
self._kinematics: Optional[str] = None
|
||||
self._accel_per_hz: Optional[float] = None
|
||||
|
||||
def configure(self, kinematics: Optional[str] = None, accel_per_hz: Optional[float] = None) -> None:
|
||||
self._kinematics = kinematics
|
||||
self._accel_per_hz = accel_per_hz
|
||||
|
||||
def create_graph(self) -> None:
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='shaketune-belt_*.csv',
|
||||
min_files_required=2,
|
||||
custom_name_func=lambda f: f.stem.split('_')[1].upper(),
|
||||
)
|
||||
fig = belts_calibration(
|
||||
lognames=[str(path) for path in lognames],
|
||||
kinematics=self._kinematics,
|
||||
klipperdir=str(self._config.klipper_folder),
|
||||
accel_per_hz=self._accel_per_hz,
|
||||
st_version=self._version,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames)
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
if len(files) <= keep_results:
|
||||
return # No need to delete any files
|
||||
for old_file in files[keep_results:]:
|
||||
file_date = '_'.join(old_file.stem.split('_')[1:3])
|
||||
for suffix in ['A', 'B']:
|
||||
csv_file = self._folder / f'beltscomparison_{file_date}_{suffix}.csv'
|
||||
csv_file.unlink(missing_ok=True)
|
||||
old_file.unlink()
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation of the PSD graph
|
||||
######################################################################
|
||||
|
||||
|
||||
# This function create pairs of peaks that are close in frequency on two curves (that are known
|
||||
# to be resonances points and must be similar on both belts on a CoreXY kinematic)
|
||||
def pair_peaks(
|
||||
peaks1: np.ndarray, freqs1: np.ndarray, psd1: np.ndarray, peaks2: np.ndarray, freqs2: np.ndarray, psd2: np.ndarray
|
||||
) -> PeakPairingResult:
|
||||
# Compute a dynamic detection threshold to filter and pair peaks efficiently
|
||||
# even if the signal is very noisy (this get clipped to a maximum of 10Hz diff)
|
||||
distances = []
|
||||
for p1 in peaks1:
|
||||
for p2 in peaks2:
|
||||
distances.append(abs(freqs1[p1] - freqs2[p2]))
|
||||
distances = np.array(distances)
|
||||
|
||||
median_distance = np.median(distances)
|
||||
iqr = np.percentile(distances, 75) - np.percentile(distances, 25)
|
||||
|
||||
threshold = median_distance + 1.5 * iqr
|
||||
threshold = min(threshold, 10)
|
||||
|
||||
# Pair the peaks using the dynamic thresold
|
||||
paired_peaks = []
|
||||
unpaired_peaks1 = list(peaks1)
|
||||
unpaired_peaks2 = list(peaks2)
|
||||
|
||||
while unpaired_peaks1 and unpaired_peaks2:
|
||||
min_distance = threshold + 1
|
||||
pair = None
|
||||
|
||||
for p1 in unpaired_peaks1:
|
||||
for p2 in unpaired_peaks2:
|
||||
distance = abs(freqs1[p1] - freqs2[p2])
|
||||
if distance < min_distance:
|
||||
min_distance = distance
|
||||
pair = (p1, p2)
|
||||
|
||||
if pair is None: # No more pairs below the threshold
|
||||
break
|
||||
|
||||
p1, p2 = pair
|
||||
paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2])))
|
||||
unpaired_peaks1.remove(p1)
|
||||
unpaired_peaks2.remove(p2)
|
||||
|
||||
return PeakPairingResult(
|
||||
paired_peaks=paired_peaks, unpaired_peaks1=unpaired_peaks1, unpaired_peaks2=unpaired_peaks2
|
||||
)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation of the differential spectrogram
|
||||
######################################################################
|
||||
|
||||
|
||||
def compute_mhi(similarity_factor: float, signal1: SignalData, signal2: SignalData) -> str:
|
||||
num_unpaired_peaks = len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks)
|
||||
num_paired_peaks = len(signal1.paired_peaks)
|
||||
# Combine unpaired peaks from both signals, tagging each peak with its respective signal
|
||||
combined_unpaired_peaks = [(peak, signal1) for peak in signal1.unpaired_peaks] + [
|
||||
(peak, signal2) for peak in signal2.unpaired_peaks
|
||||
]
|
||||
psd_highest_max = max(signal1.psd.max(), signal2.psd.max())
|
||||
|
||||
# Start with the similarity factor directly scaled to a percentage
|
||||
mhi = similarity_factor
|
||||
|
||||
# Bonus for ideal number of total peaks (1 or 2)
|
||||
if num_paired_peaks >= DC_MAX_PEAKS:
|
||||
mhi *= DC_MAX_PEAKS / num_paired_peaks # Reduce MHI if more than ideal number of peaks
|
||||
|
||||
# Penalty from unpaired peaks weighted by their amplitude relative to the maximum PSD amplitude
|
||||
unpaired_peak_penalty = 0
|
||||
if num_unpaired_peaks > DC_MAX_UNPAIRED_PEAKS_ALLOWED:
|
||||
for peak, signal in combined_unpaired_peaks:
|
||||
unpaired_peak_penalty += (signal.psd[peak] / psd_highest_max) * 30
|
||||
mhi -= unpaired_peak_penalty
|
||||
|
||||
# Ensure the result lies between 0 and 100 by clipping the computed value
|
||||
mhi = np.clip(mhi, 0, 100)
|
||||
|
||||
return mhi_lut(mhi)
|
||||
|
||||
|
||||
# LUT to transform the MHI into a textual value easy to understand for the users of the script
|
||||
def mhi_lut(mhi: float) -> str:
|
||||
ranges = [
|
||||
(70, 100, 'Excellent mechanical health'),
|
||||
(55, 70, 'Good mechanical health'),
|
||||
(45, 55, 'Acceptable mechanical health'),
|
||||
(30, 45, 'Potential signs of a mechanical issue'),
|
||||
(15, 30, 'Likely a mechanical issue'),
|
||||
(0, 15, 'Mechanical issue detected'),
|
||||
]
|
||||
mhi = np.clip(mhi, 1, 100)
|
||||
for lower, upper, message in ranges:
|
||||
if lower < mhi <= upper:
|
||||
return message
|
||||
|
||||
return 'Unknown mechanical health' # Should never happen
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_compare_frequency(
|
||||
ax: plt.Axes, signal1: SignalData, signal2: SignalData, signal1_belt: str, signal2_belt: str, max_freq: float
|
||||
) -> None:
|
||||
# Plot the two belts PSD signals
|
||||
ax.plot(signal1.freqs, signal1.psd, label='Belt ' + signal1_belt, color=KLIPPAIN_COLORS['purple'])
|
||||
ax.plot(signal2.freqs, signal2.psd, label='Belt ' + signal2_belt, color=KLIPPAIN_COLORS['orange'])
|
||||
|
||||
psd_highest_max = max(signal1.psd.max(), signal2.psd.max())
|
||||
|
||||
# Trace and annotate the peaks on the graph
|
||||
paired_peak_count = 0
|
||||
unpaired_peak_count = 0
|
||||
offsets_table_data = []
|
||||
|
||||
for _, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
||||
label = ALPHABET[paired_peak_count]
|
||||
# amplitude_offset = abs(
|
||||
# ((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / max(signal1.psd[peak1[0]], signal2.psd[peak2[0]])) * 100
|
||||
# )
|
||||
amplitude_offset = abs(((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / psd_highest_max) * 100)
|
||||
frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]])
|
||||
offsets_table_data.append([f'Peaks {label}', f'{frequency_offset:.1f} Hz', f'{amplitude_offset:.1f} %'])
|
||||
|
||||
ax.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], 'x', color='black')
|
||||
ax.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], 'x', color='black')
|
||||
ax.plot(
|
||||
[signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]],
|
||||
[signal1.psd[peak1[0]], signal2.psd[peak2[0]]],
|
||||
':',
|
||||
color='gray',
|
||||
)
|
||||
|
||||
ax.annotate(
|
||||
label + '1',
|
||||
(signal1.freqs[peak1[0]], signal1.psd[peak1[0]]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
ax.annotate(
|
||||
label + '2',
|
||||
(signal2.freqs[peak2[0]], signal2.psd[peak2[0]]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
paired_peak_count += 1
|
||||
|
||||
for peak in signal1.unpaired_peaks:
|
||||
ax.plot(signal1.freqs[peak], signal1.psd[peak], 'x', color='black')
|
||||
ax.annotate(
|
||||
str(unpaired_peak_count + 1),
|
||||
(signal1.freqs[peak], signal1.psd[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='red',
|
||||
weight='bold',
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
for peak in signal2.unpaired_peaks:
|
||||
ax.plot(signal2.freqs[peak], signal2.psd[peak], 'x', color='black')
|
||||
ax.annotate(
|
||||
str(unpaired_peak_count + 1),
|
||||
(signal2.freqs[peak], signal2.psd[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color='red',
|
||||
weight='bold',
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
# Add estimated similarity to the graph
|
||||
ax2 = ax.twinx() # To split the legends in two box
|
||||
ax2.yaxis.set_visible(False)
|
||||
ax2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}')
|
||||
|
||||
# Setting axis parameters, grid and graph title
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
ax.set_xlim([0, max_freq])
|
||||
ax.set_ylabel('Power spectral density')
|
||||
ax.set_ylim([0, psd_highest_max * 1.1])
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='x', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.set_title(
|
||||
'Belts frequency profiles',
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['dark_orange'],
|
||||
weight='bold',
|
||||
)
|
||||
|
||||
# Print the table of offsets ontop of the graph below the original legend (upper right)
|
||||
if len(offsets_table_data) > 0:
|
||||
columns = [
|
||||
'',
|
||||
'Frequency delta',
|
||||
'Amplitude delta',
|
||||
]
|
||||
offset_table = ax.table(
|
||||
cellText=offsets_table_data,
|
||||
colLabels=columns,
|
||||
bbox=[0.66, 0.79, 0.33, 0.15],
|
||||
loc='upper right',
|
||||
cellLoc='center',
|
||||
)
|
||||
offset_table.auto_set_font_size(False)
|
||||
offset_table.set_fontsize(8)
|
||||
offset_table.auto_set_column_width([0, 1, 2])
|
||||
offset_table.set_zorder(100)
|
||||
cells = [key for key in offset_table.get_celld().keys()]
|
||||
for cell in cells:
|
||||
offset_table[cell].set_facecolor('white')
|
||||
offset_table[cell].set_alpha(0.6)
|
||||
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
# Compute quantile-quantile plot to compare the two belts
|
||||
def plot_versus_belts(
|
||||
ax: plt.Axes,
|
||||
common_freqs: np.ndarray,
|
||||
signal1: SignalData,
|
||||
signal2: SignalData,
|
||||
interp_psd1: np.ndarray,
|
||||
interp_psd2: np.ndarray,
|
||||
signal1_belt: str,
|
||||
signal2_belt: str,
|
||||
) -> None:
|
||||
ax.set_title('Cross-belts comparison plot', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
|
||||
max_psd = max(np.max(interp_psd1), np.max(interp_psd2))
|
||||
ideal_line = np.linspace(0, max_psd * 1.1, 500)
|
||||
green_boundary = ideal_line + (0.35 * max_psd * np.exp(-ideal_line / (0.6 * max_psd)))
|
||||
ax.fill_betweenx(ideal_line, ideal_line, green_boundary, color='green', alpha=0.15)
|
||||
ax.fill_between(ideal_line, ideal_line, green_boundary, color='green', alpha=0.15, label='Good zone')
|
||||
ax.plot(
|
||||
ideal_line,
|
||||
ideal_line,
|
||||
'--',
|
||||
label='Ideal line',
|
||||
color='red',
|
||||
linewidth=2,
|
||||
)
|
||||
|
||||
ax.plot(interp_psd1, interp_psd2, color='dimgrey', marker='o', markersize=1.5)
|
||||
ax.fill_betweenx(interp_psd2, interp_psd1, color=KLIPPAIN_COLORS['red_pink'], alpha=0.1)
|
||||
|
||||
paired_peak_count = 0
|
||||
unpaired_peak_count = 0
|
||||
|
||||
for _, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
||||
label = ALPHABET[paired_peak_count]
|
||||
freq1 = signal1.freqs[peak1[0]]
|
||||
freq2 = signal2.freqs[peak2[0]]
|
||||
nearest_idx1 = np.argmin(np.abs(common_freqs - freq1))
|
||||
nearest_idx2 = np.argmin(np.abs(common_freqs - freq2))
|
||||
|
||||
if nearest_idx1 == nearest_idx2:
|
||||
psd1_peak_value = interp_psd1[nearest_idx1]
|
||||
psd2_peak_value = interp_psd2[nearest_idx1]
|
||||
ax.plot(psd1_peak_value, psd2_peak_value, marker='o', color='black', markersize=7)
|
||||
ax.annotate(
|
||||
f'{label}1/{label}2',
|
||||
(psd1_peak_value, psd2_peak_value),
|
||||
textcoords='offset points',
|
||||
xytext=(-7, 7),
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
else:
|
||||
psd1_peak_value = interp_psd1[nearest_idx1]
|
||||
psd1_on_peak = interp_psd1[nearest_idx2]
|
||||
psd2_peak_value = interp_psd2[nearest_idx2]
|
||||
psd2_on_peak = interp_psd2[nearest_idx1]
|
||||
ax.plot(psd1_on_peak, psd2_peak_value, marker='o', color=KLIPPAIN_COLORS['orange'], markersize=7)
|
||||
ax.plot(psd1_peak_value, psd2_on_peak, marker='o', color=KLIPPAIN_COLORS['purple'], markersize=7)
|
||||
ax.annotate(
|
||||
f'{label}1',
|
||||
(psd1_peak_value, psd2_on_peak),
|
||||
textcoords='offset points',
|
||||
xytext=(0, 7),
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
ax.annotate(
|
||||
f'{label}2',
|
||||
(psd1_on_peak, psd2_peak_value),
|
||||
textcoords='offset points',
|
||||
xytext=(0, 7),
|
||||
fontsize=13,
|
||||
color='black',
|
||||
)
|
||||
paired_peak_count += 1
|
||||
|
||||
for _, peak_index in enumerate(signal1.unpaired_peaks):
|
||||
freq1 = signal1.freqs[peak_index]
|
||||
freq2 = signal2.freqs[peak_index]
|
||||
nearest_idx1 = np.argmin(np.abs(common_freqs - freq1))
|
||||
nearest_idx2 = np.argmin(np.abs(common_freqs - freq2))
|
||||
psd1_peak_value = interp_psd1[nearest_idx1]
|
||||
psd2_peak_value = interp_psd2[nearest_idx1]
|
||||
ax.plot(psd1_peak_value, psd2_peak_value, marker='o', color=KLIPPAIN_COLORS['purple'], markersize=7)
|
||||
ax.annotate(
|
||||
str(unpaired_peak_count + 1),
|
||||
(psd1_peak_value, psd2_peak_value),
|
||||
textcoords='offset points',
|
||||
fontsize=13,
|
||||
weight='bold',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
xytext=(0, 7),
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
for _, peak_index in enumerate(signal2.unpaired_peaks):
|
||||
freq1 = signal1.freqs[peak_index]
|
||||
freq2 = signal2.freqs[peak_index]
|
||||
nearest_idx1 = np.argmin(np.abs(common_freqs - freq1))
|
||||
nearest_idx2 = np.argmin(np.abs(common_freqs - freq2))
|
||||
psd1_peak_value = interp_psd1[nearest_idx1]
|
||||
psd2_peak_value = interp_psd2[nearest_idx1]
|
||||
ax.plot(psd1_peak_value, psd2_peak_value, marker='o', color=KLIPPAIN_COLORS['orange'], markersize=7)
|
||||
ax.annotate(
|
||||
str(unpaired_peak_count + 1),
|
||||
(psd1_peak_value, psd2_peak_value),
|
||||
textcoords='offset points',
|
||||
fontsize=13,
|
||||
weight='bold',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
xytext=(0, 7),
|
||||
)
|
||||
unpaired_peak_count += 1
|
||||
|
||||
ax.set_xlabel(f'Belt {signal1_belt}')
|
||||
ax.set_ylabel(f'Belt {signal2_belt}')
|
||||
ax.set_xlim([0, max_psd * 1.1])
|
||||
ax.set_ylim([0, max_psd * 1.1])
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('medium')
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
######################################################################
|
||||
# Custom tools
|
||||
######################################################################
|
||||
|
||||
|
||||
# Original Klipper function to get the PSD data of a raw accelerometer signal
|
||||
def compute_signal_data(data: np.ndarray, max_freq: float) -> SignalData:
|
||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||
calibration_data = helper.process_accelerometer_data(data)
|
||||
|
||||
freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq]
|
||||
psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq]
|
||||
|
||||
_, peaks, _ = detect_peaks(psd, freqs, PEAKS_DETECTION_THRESHOLD * psd.max())
|
||||
|
||||
return SignalData(freqs=freqs, psd=psd, peaks=peaks)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def belts_calibration(
|
||||
lognames: List[str],
|
||||
kinematics: Optional[str],
|
||||
klipperdir: str = '~/klipper',
|
||||
max_freq: float = 200.0,
|
||||
accel_per_hz: Optional[float] = None,
|
||||
st_version: str = 'unknown',
|
||||
) -> plt.Figure:
|
||||
global shaper_calibrate
|
||||
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||
|
||||
# Parse data from the log files while ignoring CSV in the wrong format
|
||||
datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None]
|
||||
if len(datas) > 2:
|
||||
raise ValueError('Incorrect number of .csv files used (this function needs exactly two files to compare them)!')
|
||||
|
||||
# Get the belts name for the legend to avoid putting the full file name
|
||||
belt_info = {'A': ' (axis 1,-1)', 'B': ' (axis 1, 1)'}
|
||||
signal1_belt = (lognames[0].split('/')[-1]).split('_')[-1][0]
|
||||
signal2_belt = (lognames[1].split('/')[-1]).split('_')[-1][0]
|
||||
signal1_belt += belt_info.get(signal1_belt, '')
|
||||
signal2_belt += belt_info.get(signal2_belt, '')
|
||||
|
||||
# Compute calibration data for the two datasets with automatic peaks detection
|
||||
signal1 = compute_signal_data(datas[0], max_freq)
|
||||
signal2 = compute_signal_data(datas[1], max_freq)
|
||||
del datas
|
||||
|
||||
# Pair the peaks across the two datasets
|
||||
pairing_result = pair_peaks(signal1.peaks, signal1.freqs, signal1.psd, signal2.peaks, signal2.freqs, signal2.psd)
|
||||
signal1 = signal1._replace(paired_peaks=pairing_result.paired_peaks, unpaired_peaks=pairing_result.unpaired_peaks1)
|
||||
signal2 = signal2._replace(paired_peaks=pairing_result.paired_peaks, unpaired_peaks=pairing_result.unpaired_peaks2)
|
||||
|
||||
# Re-interpolate the PSD signals to a common frequency range to be able to plot them one against the other point by point
|
||||
common_freqs = np.linspace(0, max_freq, 500)
|
||||
interp_psd1 = np.interp(common_freqs, signal1.freqs, signal1.psd)
|
||||
interp_psd2 = np.interp(common_freqs, signal2.freqs, signal2.psd)
|
||||
|
||||
# Calculating R^2 to y=x line to compute the similarity between the two belts
|
||||
ss_res = np.sum((interp_psd2 - interp_psd1) ** 2)
|
||||
ss_tot = np.sum((interp_psd2 - np.mean(interp_psd2)) ** 2)
|
||||
similarity_factor = (1 - (ss_res / ss_tot)) * 100
|
||||
ConsoleOutput.print(f'Belts estimated similarity: {similarity_factor:.1f}%')
|
||||
|
||||
# mhi = compute_mhi(similarity_factor, num_peaks, num_unpaired_peaks)
|
||||
mhi = compute_mhi(similarity_factor, signal1, signal2)
|
||||
ConsoleOutput.print(f'[experimental] Mechanical health: {mhi}')
|
||||
|
||||
fig, ((ax1, ax3)) = plt.subplots(
|
||||
1,
|
||||
2,
|
||||
gridspec_kw={
|
||||
'width_ratios': [5, 3],
|
||||
'bottom': 0.080,
|
||||
'top': 0.840,
|
||||
'left': 0.050,
|
||||
'right': 0.985,
|
||||
'hspace': 0.166,
|
||||
'wspace': 0.138,
|
||||
},
|
||||
)
|
||||
fig.set_size_inches(15, 7)
|
||||
|
||||
# Add title
|
||||
title_line1 = 'RELATIVE BELTS CALIBRATION TOOL'
|
||||
fig.text(
|
||||
0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename = lognames[0].split('/')[-1]
|
||||
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X')
|
||||
if kinematics is not None:
|
||||
title_line2 += ' -- ' + kinematics.upper() + ' kinematics'
|
||||
except Exception:
|
||||
ConsoleOutput.print(
|
||||
'Warning: CSV filenames look to be different than expected (%s , %s)' % (lognames[0], lognames[1])
|
||||
)
|
||||
title_line2 = lognames[0].split('/')[-1] + ' / ' + lognames[1].split('/')[-1]
|
||||
fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# We add the estimated similarity and the MHI value to the title only if the kinematics is CoreXY
|
||||
# as it make no sense to compute these values for other kinematics that doesn't have paired belts
|
||||
if kinematics in ['corexy', 'corexz']:
|
||||
title_line3 = f'| Estimated similarity: {similarity_factor:.1f}%'
|
||||
title_line4 = f'| {mhi} (experimental)'
|
||||
fig.text(0.55, 0.985, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.55, 0.950, title_line4, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Add the accel_per_hz value to the title
|
||||
title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz'
|
||||
fig.text(0.55, 0.915, title_line5, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Plot the graphs
|
||||
plot_compare_frequency(ax1, signal1, signal2, signal1_belt, signal2_belt, max_freq)
|
||||
plot_versus_belts(ax3, common_freqs, signal1, signal2, interp_psd1, interp_psd2, signal1_belt, signal2_belt)
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <raw logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph')
|
||||
opts.add_option('--accel_per_hz', type='float', default=None, help='accel_per_hz used during the measurement')
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
opts.add_option(
|
||||
'-m',
|
||||
'--kinematics',
|
||||
type='string',
|
||||
dest='kinematics',
|
||||
help='machine kinematics configuration',
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('Incorrect number of arguments')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
|
||||
fig = belts_calibration(
|
||||
args, options.kinematics, options.klipperdir, options.max_freq, options.accel_per_hz, 'unknown'
|
||||
)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
74
shaketune/graph_creators/graph_creator.py
Normal file
74
shaketune/graph_creators/graph_creator.py
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import abc
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from matplotlib.figure import Figure
|
||||
|
||||
from ..shaketune_config import ShakeTuneConfig
|
||||
|
||||
|
||||
class GraphCreator(abc.ABC):
|
||||
def __init__(self, config: ShakeTuneConfig, graph_type: str):
|
||||
self._config = config
|
||||
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
self._version = ShakeTuneConfig.get_git_version()
|
||||
self._type = graph_type
|
||||
self._folder = self._config.get_results_folder(graph_type)
|
||||
|
||||
def _move_and_prepare_files(
|
||||
self,
|
||||
glob_pattern: str,
|
||||
min_files_required: Optional[int] = None,
|
||||
custom_name_func: Optional[Callable[[Path], str]] = None,
|
||||
) -> List[Path]:
|
||||
tmp_path = Path('/tmp')
|
||||
globbed_files = list(tmp_path.glob(glob_pattern))
|
||||
|
||||
# If min_files_required is not set, use the number of globbed files as the minimum
|
||||
min_files_required = min_files_required or len(globbed_files)
|
||||
|
||||
if not globbed_files:
|
||||
raise FileNotFoundError(f'no CSV files found in the /tmp folder to create the {self._type} graphs!')
|
||||
if len(globbed_files) < min_files_required:
|
||||
raise FileNotFoundError(f'{min_files_required} CSV files are needed to create the {self._type} graphs!')
|
||||
|
||||
lognames = []
|
||||
for filename in sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[:min_files_required]:
|
||||
custom_name = custom_name_func(filename) if custom_name_func else filename.name
|
||||
new_file = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}_{custom_name}.csv"
|
||||
# shutil.move() is needed to move the file across filesystems (mainly for BTT CB1 Pi default OS image)
|
||||
shutil.move(filename, new_file)
|
||||
lognames.append(new_file)
|
||||
return lognames
|
||||
|
||||
def _save_figure_and_cleanup(self, fig: Figure, lognames: List[Path], axis_label: Optional[str] = None) -> None:
|
||||
axis_suffix = f'_{axis_label}' if axis_label else ''
|
||||
png_filename = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}{axis_suffix}.png"
|
||||
fig.savefig(png_filename, dpi=self._config.dpi)
|
||||
|
||||
if self._config.keep_csv:
|
||||
self._archive_files(lognames)
|
||||
else:
|
||||
self._remove_files(lognames)
|
||||
|
||||
def _archive_files(self, lognames: List[Path]) -> None:
|
||||
return
|
||||
|
||||
def _remove_files(self, lognames: List[Path]) -> None:
|
||||
for csv in lognames:
|
||||
csv.unlink(missing_ok=True)
|
||||
|
||||
def get_type(self) -> str:
|
||||
return self._type
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_graph(self) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def clean_old_files(self, keep_results: int) -> None:
|
||||
pass
|
||||
BIN
shaketune/graph_creators/klippain.png
Normal file
BIN
shaketune/graph_creators/klippain.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 607 KiB |
495
shaketune/graph_creators/shaper_graph_creator.py
Normal file
495
shaketune/graph_creators/shaper_graph_creator.py
Normal file
@@ -0,0 +1,495 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
#################################################
|
||||
######## INPUT SHAPER CALIBRATION SCRIPT ########
|
||||
#################################################
|
||||
# Derived from the calibrate_shaper.py official Klipper script
|
||||
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com>
|
||||
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
|
||||
# Highly modified and improved by Frix_x#0161 #
|
||||
|
||||
import optparse
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import (
|
||||
compute_mechanical_parameters,
|
||||
compute_spectrogram,
|
||||
detect_peaks,
|
||||
parse_log,
|
||||
setup_klipper_import,
|
||||
)
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
from ..shaketune_config import ShakeTuneConfig
|
||||
from .graph_creator import GraphCreator
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||
PEAKS_EFFECT_THRESHOLD = 0.12
|
||||
SPECTROGRAM_LOW_PERCENTILE_FILTER = 5
|
||||
MAX_VIBRATIONS = 5.0
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
class ShaperGraphCreator(GraphCreator):
|
||||
def __init__(self, config: ShakeTuneConfig):
|
||||
super().__init__(config, 'input shaper')
|
||||
self._max_smoothing: Optional[float] = None
|
||||
self._scv: Optional[float] = None
|
||||
self._accel_per_hz: Optional[float] = None
|
||||
|
||||
def configure(
|
||||
self, scv: float, max_smoothing: Optional[float] = None, accel_per_hz: Optional[float] = None
|
||||
) -> None:
|
||||
self._scv = scv
|
||||
self._max_smoothing = max_smoothing
|
||||
self._accel_per_hz = accel_per_hz
|
||||
|
||||
def create_graph(self) -> None:
|
||||
if not self._scv:
|
||||
raise ValueError('scv must be set to create the input shaper graph!')
|
||||
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='shaketune-axis_*.csv',
|
||||
min_files_required=1,
|
||||
custom_name_func=lambda f: f.stem.split('_')[1].upper(),
|
||||
)
|
||||
fig = shaper_calibration(
|
||||
lognames=[str(path) for path in lognames],
|
||||
klipperdir=str(self._config.klipper_folder),
|
||||
max_smoothing=self._max_smoothing,
|
||||
scv=self._scv,
|
||||
accel_per_hz=self._accel_per_hz,
|
||||
st_version=self._version,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames, lognames[0].stem.split('_')[-1])
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
if len(files) <= 2 * keep_results:
|
||||
return # No need to delete any files
|
||||
for old_file in files[2 * keep_results :]:
|
||||
csv_file = old_file.with_suffix('.csv')
|
||||
csv_file.unlink(missing_ok=True)
|
||||
old_file.unlink()
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation
|
||||
######################################################################
|
||||
|
||||
|
||||
# Find the best shaper parameters using Klipper's official algorithm selection with
|
||||
# a proper precomputed damping ratio (zeta) and using the configured printer SQV value
|
||||
def calibrate_shaper(datas: List[np.ndarray], max_smoothing: Optional[float], scv: float, max_freq: float):
|
||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||
calibration_data = helper.process_accelerometer_data(datas)
|
||||
calibration_data.normalize_to_frequencies()
|
||||
|
||||
fr, zeta, _, _ = compute_mechanical_parameters(calibration_data.psd_sum, calibration_data.freq_bins)
|
||||
|
||||
# If the damping ratio computation fail, we use Klipper default value instead
|
||||
if zeta is None:
|
||||
zeta = 0.1
|
||||
|
||||
compat = False
|
||||
try:
|
||||
shaper, all_shapers = helper.find_best_shaper(
|
||||
calibration_data,
|
||||
shapers=None,
|
||||
damping_ratio=zeta,
|
||||
scv=scv,
|
||||
shaper_freqs=None,
|
||||
max_smoothing=max_smoothing,
|
||||
test_damping_ratios=None,
|
||||
max_freq=max_freq,
|
||||
logger=ConsoleOutput.print,
|
||||
)
|
||||
except TypeError:
|
||||
ConsoleOutput.print(
|
||||
'[WARNING] You seem to be using an older version of Klipper that is not compatible with all the latest Shake&Tune features!'
|
||||
)
|
||||
ConsoleOutput.print(
|
||||
'Shake&Tune now runs in compatibility mode: be aware that the results may be slightly off, since the real damping ratio cannot be used to create the filter recommendations'
|
||||
)
|
||||
compat = True
|
||||
shaper, all_shapers = helper.find_best_shaper(calibration_data, max_smoothing, ConsoleOutput.print)
|
||||
|
||||
ConsoleOutput.print(
|
||||
'\n-> Recommended shaper is %s @ %.1f Hz (when using a square corner velocity of %.1f and a damping ratio of %.3f)'
|
||||
% (shaper.name.upper(), shaper.freq, scv, zeta)
|
||||
)
|
||||
|
||||
return shaper.name, all_shapers, calibration_data, fr, zeta, compat
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_freq_response(
|
||||
ax: plt.Axes,
|
||||
calibration_data,
|
||||
shapers,
|
||||
klipper_shaper_choice: str,
|
||||
peaks: np.ndarray,
|
||||
peaks_freqs: np.ndarray,
|
||||
peaks_threshold: List[float],
|
||||
fr: float,
|
||||
zeta: float,
|
||||
max_freq: float,
|
||||
) -> None:
|
||||
freqs = calibration_data.freqs
|
||||
psd = calibration_data.psd_sum
|
||||
px = calibration_data.psd_x
|
||||
py = calibration_data.psd_y
|
||||
pz = calibration_data.psd_z
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('x-small')
|
||||
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
ax.set_xlim([0, max_freq])
|
||||
ax.set_ylabel('Power spectral density')
|
||||
ax.set_ylim([0, psd.max() + psd.max() * 0.05])
|
||||
|
||||
ax.plot(freqs, psd, label='X+Y+Z', color='purple', zorder=5)
|
||||
ax.plot(freqs, px, label='X', color='red')
|
||||
ax.plot(freqs, py, label='Y', color='green')
|
||||
ax.plot(freqs, pz, label='Z', color='blue')
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5))
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
ax2 = ax.twinx()
|
||||
ax2.yaxis.set_visible(False)
|
||||
|
||||
# Draw the shappers curves and add their specific parameters in the legend
|
||||
perf_shaper_choice = None
|
||||
perf_shaper_vals = None
|
||||
perf_shaper_freq = None
|
||||
perf_shaper_accel = 0
|
||||
for shaper in shapers:
|
||||
shaper_max_accel = round(shaper.max_accel / 100.0) * 100.0
|
||||
label = '%s (%.1f Hz, vibr=%.1f%%, sm~=%.2f, accel<=%.f)' % (
|
||||
shaper.name.upper(),
|
||||
shaper.freq,
|
||||
shaper.vibrs * 100.0,
|
||||
shaper.smoothing,
|
||||
shaper_max_accel,
|
||||
)
|
||||
ax2.plot(freqs, shaper.vals, label=label, linestyle='dotted')
|
||||
|
||||
# Get the Klipper recommended shaper (usually it's a good low vibration compromise)
|
||||
if shaper.name == klipper_shaper_choice:
|
||||
klipper_shaper_freq = shaper.freq
|
||||
klipper_shaper_vals = shaper.vals
|
||||
klipper_shaper_accel = shaper_max_accel
|
||||
|
||||
# Find the shaper with the highest accel but with vibrs under MAX_VIBRATIONS as it's
|
||||
# a good performance compromise when injecting the SCV and damping ratio in the computation
|
||||
if perf_shaper_accel < shaper_max_accel and shaper.vibrs * 100 < MAX_VIBRATIONS:
|
||||
perf_shaper_choice = shaper.name
|
||||
perf_shaper_accel = shaper_max_accel
|
||||
perf_shaper_freq = shaper.freq
|
||||
perf_shaper_vals = shaper.vals
|
||||
|
||||
# Recommendations are added to the legend: one is Klipper's original suggestion that is usually good for low vibrations
|
||||
# and the other one is the custom "performance" recommendation that looks for a suitable shaper that doesn't have excessive
|
||||
# vibrations level but have higher accelerations. If both recommendations are the same shaper, or if no suitable "performance"
|
||||
# shaper is found, then only a single line as the "best shaper" recommendation is added to the legend
|
||||
if (
|
||||
perf_shaper_choice is not None
|
||||
and perf_shaper_choice != klipper_shaper_choice
|
||||
and perf_shaper_accel >= klipper_shaper_accel
|
||||
):
|
||||
ax2.plot(
|
||||
[],
|
||||
[],
|
||||
' ',
|
||||
label='Recommended performance shaper: %s @ %.1f Hz' % (perf_shaper_choice.upper(), perf_shaper_freq),
|
||||
)
|
||||
ax.plot(
|
||||
freqs,
|
||||
psd * perf_shaper_vals,
|
||||
label='With %s applied' % (perf_shaper_choice.upper()),
|
||||
color='cyan',
|
||||
)
|
||||
ax2.plot(
|
||||
[],
|
||||
[],
|
||||
' ',
|
||||
label='Recommended low vibrations shaper: %s @ %.1f Hz'
|
||||
% (klipper_shaper_choice.upper(), klipper_shaper_freq),
|
||||
)
|
||||
ax.plot(
|
||||
freqs, psd * klipper_shaper_vals, label='With %s applied' % (klipper_shaper_choice.upper()), color='lime'
|
||||
)
|
||||
else:
|
||||
ax2.plot(
|
||||
[],
|
||||
[],
|
||||
' ',
|
||||
label='Recommended best shaper: %s @ %.1f Hz' % (klipper_shaper_choice.upper(), klipper_shaper_freq),
|
||||
)
|
||||
ax.plot(
|
||||
freqs,
|
||||
psd * klipper_shaper_vals,
|
||||
label='With %s applied' % (klipper_shaper_choice.upper()),
|
||||
color='cyan',
|
||||
)
|
||||
|
||||
# And the estimated damping ratio is finally added at the end of the legend
|
||||
ax2.plot([], [], ' ', label='Estimated damping ratio (ζ): %.3f' % (zeta))
|
||||
|
||||
# Draw the detected peaks and name them
|
||||
# This also draw the detection threshold and warning threshold (aka "effect zone")
|
||||
ax.plot(peaks_freqs, psd[peaks], 'x', color='black', markersize=8)
|
||||
for idx, peak in enumerate(peaks):
|
||||
if psd[peak] > peaks_threshold[1]:
|
||||
fontcolor = 'red'
|
||||
fontweight = 'bold'
|
||||
else:
|
||||
fontcolor = 'black'
|
||||
fontweight = 'normal'
|
||||
ax.annotate(
|
||||
f'{idx+1}',
|
||||
(freqs[peak], psd[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(8, 5),
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color=fontcolor,
|
||||
weight=fontweight,
|
||||
)
|
||||
ax.axhline(y=peaks_threshold[0], color='black', linestyle='--', linewidth=0.5)
|
||||
ax.axhline(y=peaks_threshold[1], color='black', linestyle='--', linewidth=0.5)
|
||||
ax.fill_between(freqs, 0, peaks_threshold[0], color='green', alpha=0.15, label='Relax Region')
|
||||
ax.fill_between(freqs, peaks_threshold[0], peaks_threshold[1], color='orange', alpha=0.2, label='Warning Region')
|
||||
|
||||
# Add the main resonant frequency and damping ratio of the axis to the graph title
|
||||
ax.set_title(
|
||||
'Axis Frequency Profile (ω0=%.1fHz, ζ=%.3f)' % (fr, zeta),
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['dark_orange'],
|
||||
weight='bold',
|
||||
)
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
# Plot a time-frequency spectrogram to see how the system respond over time during the
|
||||
# resonnance test. This can highlight hidden spots from the standard PSD graph from other harmonics
|
||||
def plot_spectrogram(
|
||||
ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdata: np.ndarray, peaks: np.ndarray, max_freq: float
|
||||
) -> None:
|
||||
ax.set_title('Time-Frequency Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
|
||||
# We need to normalize the data to get a proper signal on the spectrogram
|
||||
# However, while using "LogNorm" provide too much background noise, using
|
||||
# "Normalize" make only the resonnance appearing and hide interesting elements
|
||||
# So we need to filter out the lower part of the data (ie. find the proper vmin for LogNorm)
|
||||
vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER)
|
||||
|
||||
# Draw the spectrogram using imgshow that is better suited here than pcolormesh since its result is already rasterized and
|
||||
# we doesn't need to keep vector graphics when saving to a final .png file. Using it also allow to
|
||||
# save ~150-200MB of RAM during the "fig.savefig" operation.
|
||||
cm = 'inferno'
|
||||
norm = matplotlib.colors.LogNorm(vmin=vmin_value)
|
||||
ax.imshow(
|
||||
pdata.T,
|
||||
norm=norm,
|
||||
cmap=cm,
|
||||
aspect='auto',
|
||||
extent=[t[0], t[-1], bins[0], bins[-1]],
|
||||
origin='lower',
|
||||
interpolation='antialiased',
|
||||
)
|
||||
|
||||
ax.set_xlim([0.0, max_freq])
|
||||
ax.set_ylabel('Time (s)')
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
|
||||
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
||||
if peaks is not None:
|
||||
for idx, peak in enumerate(peaks):
|
||||
ax.axvline(peak, color='cyan', linestyle='dotted', linewidth=1)
|
||||
ax.annotate(
|
||||
f'Peak {idx+1}',
|
||||
(peak, bins[-1] * 0.9),
|
||||
textcoords='data',
|
||||
color='cyan',
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='top',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def shaper_calibration(
|
||||
lognames: List[str],
|
||||
klipperdir: str = '~/klipper',
|
||||
max_smoothing: Optional[float] = None,
|
||||
scv: float = 5.0,
|
||||
max_freq: float = 200.0,
|
||||
accel_per_hz: Optional[float] = None,
|
||||
st_version: str = 'unknown',
|
||||
) -> plt.Figure:
|
||||
global shaper_calibrate
|
||||
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||
|
||||
# Parse data from the log files while ignoring CSV in the wrong format
|
||||
datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None]
|
||||
if len(datas) > 1:
|
||||
ConsoleOutput.print('Warning: incorrect number of .csv files detected. Only the first one will be used!')
|
||||
|
||||
# Compute shapers, PSD outputs and spectrogram
|
||||
klipper_shaper_choice, shapers, calibration_data, fr, zeta, compat = calibrate_shaper(
|
||||
datas[0], max_smoothing, scv, max_freq
|
||||
)
|
||||
pdata, bins, t = compute_spectrogram(datas[0])
|
||||
del datas
|
||||
|
||||
# Select only the relevant part of the PSD data
|
||||
freqs = calibration_data.freq_bins
|
||||
calibration_data.psd_sum = calibration_data.psd_sum[freqs <= max_freq]
|
||||
calibration_data.psd_x = calibration_data.psd_x[freqs <= max_freq]
|
||||
calibration_data.psd_y = calibration_data.psd_y[freqs <= max_freq]
|
||||
calibration_data.psd_z = calibration_data.psd_z[freqs <= max_freq]
|
||||
calibration_data.freqs = freqs[freqs <= max_freq]
|
||||
|
||||
# Peak detection algorithm
|
||||
peaks_threshold = [
|
||||
PEAKS_DETECTION_THRESHOLD * calibration_data.psd_sum.max(),
|
||||
PEAKS_EFFECT_THRESHOLD * calibration_data.psd_sum.max(),
|
||||
]
|
||||
num_peaks, peaks, peaks_freqs = detect_peaks(calibration_data.psd_sum, calibration_data.freqs, peaks_threshold[0])
|
||||
|
||||
# Print the peaks info in the console
|
||||
peak_freqs_formated = ['{:.1f}'.format(f) for f in peaks_freqs]
|
||||
num_peaks_above_effect_threshold = np.sum(calibration_data.psd_sum[peaks] > peaks_threshold[1])
|
||||
ConsoleOutput.print(
|
||||
'\nPeaks detected on the graph: %d @ %s Hz (%d above effect threshold)'
|
||||
% (num_peaks, ', '.join(map(str, peak_freqs_formated)), num_peaks_above_effect_threshold)
|
||||
)
|
||||
|
||||
# Create graph layout
|
||||
fig, (ax1, ax2) = plt.subplots(
|
||||
2,
|
||||
1,
|
||||
gridspec_kw={
|
||||
'height_ratios': [4, 3],
|
||||
'bottom': 0.050,
|
||||
'top': 0.890,
|
||||
'left': 0.085,
|
||||
'right': 0.966,
|
||||
'hspace': 0.169,
|
||||
'wspace': 0.200,
|
||||
},
|
||||
)
|
||||
fig.set_size_inches(8.3, 11.6)
|
||||
|
||||
# Add a title with some test info
|
||||
title_line1 = 'INPUT SHAPER CALIBRATION TOOL'
|
||||
fig.text(
|
||||
0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||
dt = datetime.strptime(f'{filename_parts[1]} {filename_parts[2]}', '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis'
|
||||
if compat:
|
||||
title_line3 = '| Older Klipper version detected, damping ratio'
|
||||
title_line4 = '| and SCV are not used for filter recommendations!'
|
||||
title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else ''
|
||||
else:
|
||||
title_line3 = f'| Square corner velocity: {scv} mm/s'
|
||||
title_line4 = f'| Max allowed smoothing: {max_smoothing}'
|
||||
title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else ''
|
||||
except Exception:
|
||||
ConsoleOutput.print('Warning: CSV filename look to be different than expected (%s)' % (lognames[0]))
|
||||
title_line2 = lognames[0].split('/')[-1]
|
||||
title_line3 = ''
|
||||
title_line4 = ''
|
||||
title_line5 = ''
|
||||
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.58, 0.963, title_line3, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.58, 0.948, title_line4, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.58, 0.933, title_line5, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Plot the graphs
|
||||
plot_freq_response(
|
||||
ax1, calibration_data, shapers, klipper_shaper_choice, peaks, peaks_freqs, peaks_threshold, fr, zeta, max_freq
|
||||
)
|
||||
plot_spectrogram(ax2, t, bins, pdata, peaks_freqs, max_freq)
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.8995, 0.1, 0.1], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph')
|
||||
opts.add_option('-s', '--max_smoothing', type='float', default=None, help='maximum shaper smoothing to allow')
|
||||
opts.add_option(
|
||||
'--scv', '--square_corner_velocity', type='float', dest='scv', default=5.0, help='square corner velocity'
|
||||
)
|
||||
opts.add_option('--accel_per_hz', type='float', default=None, help='accel_per_hz used during the measurement')
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('Incorrect number of arguments')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
if options.max_smoothing is not None and options.max_smoothing < 0.05:
|
||||
opts.error('Too small max_smoothing specified (must be at least 0.05)')
|
||||
|
||||
fig = shaper_calibration(
|
||||
args, options.klipperdir, options.max_smoothing, options.scv, options.max_freq, options.accel_per_hz, 'unknown'
|
||||
)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
217
shaketune/graph_creators/static_graph_creator.py
Normal file
217
shaketune/graph_creators/static_graph_creator.py
Normal file
@@ -0,0 +1,217 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import optparse
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import compute_spectrogram, parse_log
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
from ..shaketune_config import ShakeTuneConfig
|
||||
from .graph_creator import GraphCreator
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||
PEAKS_EFFECT_THRESHOLD = 0.12
|
||||
SPECTROGRAM_LOW_PERCENTILE_FILTER = 5
|
||||
MAX_VIBRATIONS = 5.0
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
class StaticGraphCreator(GraphCreator):
|
||||
def __init__(self, config: ShakeTuneConfig):
|
||||
super().__init__(config, 'static frequency')
|
||||
self._freq: Optional[float] = None
|
||||
self._duration: Optional[float] = None
|
||||
self._accel_per_hz: Optional[float] = None
|
||||
|
||||
def configure(self, freq: float, duration: float, accel_per_hz: Optional[float] = None) -> None:
|
||||
self._freq = freq
|
||||
self._duration = duration
|
||||
self._accel_per_hz = accel_per_hz
|
||||
|
||||
def create_graph(self) -> None:
|
||||
if not self._freq or not self._duration or not self._accel_per_hz:
|
||||
raise ValueError('freq, duration and accel_per_hz must be set to create the static frequency graph!')
|
||||
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='shaketune-staticfreq_*.csv',
|
||||
min_files_required=1,
|
||||
custom_name_func=lambda f: f.stem.split('_')[1].upper(),
|
||||
)
|
||||
fig = static_frequency_tool(
|
||||
lognames=[str(path) for path in lognames],
|
||||
klipperdir=str(self._config.klipper_folder),
|
||||
freq=self._freq,
|
||||
duration=self._duration,
|
||||
max_freq=200.0,
|
||||
accel_per_hz=self._accel_per_hz,
|
||||
st_version=self._version,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames, lognames[0].stem.split('_')[-1])
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
if len(files) <= keep_results:
|
||||
return # No need to delete any files
|
||||
for old_file in files[keep_results:]:
|
||||
csv_file = old_file.with_suffix('.csv')
|
||||
csv_file.unlink(missing_ok=True)
|
||||
old_file.unlink()
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_spectrogram(ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdata: np.ndarray, max_freq: float) -> None:
|
||||
ax.set_title('Time-Frequency Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
|
||||
vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER)
|
||||
|
||||
cm = 'inferno'
|
||||
norm = matplotlib.colors.LogNorm(vmin=vmin_value)
|
||||
ax.imshow(
|
||||
pdata.T,
|
||||
norm=norm,
|
||||
cmap=cm,
|
||||
aspect='auto',
|
||||
extent=[t[0], t[-1], bins[0], bins[-1]],
|
||||
origin='lower',
|
||||
interpolation='antialiased',
|
||||
)
|
||||
|
||||
ax.set_xlim([0.0, max_freq])
|
||||
ax.set_ylabel('Time (s)')
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_energy_accumulation(ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdata: np.ndarray) -> None:
|
||||
# Integrate the energy over the frequency bins for each time step and plot this vertically
|
||||
ax.plot(np.trapz(pdata, t, axis=0), bins, color=KLIPPAIN_COLORS['orange'])
|
||||
ax.set_title('Vibrations', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Cumulative Energy')
|
||||
ax.set_ylabel('Time (s)')
|
||||
ax.set_ylim([bins[0], bins[-1]])
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.ticklabel_format(axis='x', style='scientific', scilimits=(0, 0))
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
# ax.legend()
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def static_frequency_tool(
|
||||
lognames: List[str],
|
||||
klipperdir: str = '~/klipper',
|
||||
freq: Optional[float] = None,
|
||||
duration: Optional[float] = None,
|
||||
max_freq: float = 500.0,
|
||||
accel_per_hz: Optional[float] = None,
|
||||
st_version: str = 'unknown',
|
||||
) -> plt.Figure:
|
||||
if freq is None or duration is None:
|
||||
raise ValueError('Error: missing frequency or duration parameters!')
|
||||
|
||||
datas = [data for data in (parse_log(fn) for fn in lognames) if data is not None]
|
||||
if len(datas) > 1:
|
||||
ConsoleOutput.print('Warning: incorrect number of .csv files detected. Only the first one will be used!')
|
||||
|
||||
pdata, bins, t = compute_spectrogram(datas[0])
|
||||
del datas
|
||||
|
||||
fig, ((ax1, ax3)) = plt.subplots(
|
||||
1,
|
||||
2,
|
||||
gridspec_kw={
|
||||
'width_ratios': [5, 3],
|
||||
'bottom': 0.080,
|
||||
'top': 0.840,
|
||||
'left': 0.050,
|
||||
'right': 0.985,
|
||||
'hspace': 0.166,
|
||||
'wspace': 0.138,
|
||||
},
|
||||
)
|
||||
fig.set_size_inches(15, 7)
|
||||
|
||||
title_line1 = 'STATIC FREQUENCY HELPER TOOL'
|
||||
fig.text(
|
||||
0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||
dt = datetime.strptime(f'{filename_parts[1]} {filename_parts[2]}', '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis'
|
||||
title_line3 = f'| Maintained frequency: {freq}Hz for {duration}s'
|
||||
title_line4 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else ''
|
||||
except Exception:
|
||||
ConsoleOutput.print(f'Warning: CSV filename look to be different than expected ({lognames[0]})')
|
||||
title_line2 = lognames[0].split('/')[-1]
|
||||
title_line3 = ''
|
||||
title_line4 = ''
|
||||
fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.55, 0.985, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
fig.text(0.55, 0.950, title_line4, ha='left', va='top', fontsize=11, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
plot_spectrogram(ax1, t, bins, pdata, max_freq)
|
||||
plot_energy_accumulation(ax3, t, bins, pdata)
|
||||
|
||||
ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
usage = '%prog [options] <logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option('-f', '--freq', type='float', default=None, help='frequency maintained during the measurement')
|
||||
opts.add_option('-d', '--duration', type='float', default=None, help='duration of the measurement')
|
||||
opts.add_option('--max_freq', type='float', default=500.0, help='maximum frequency to graph')
|
||||
opts.add_option('--accel_per_hz', type='float', default=None, help='accel_per_hz used during the measurement')
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('Incorrect number of arguments')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
|
||||
fig = static_frequency_tool(
|
||||
args, options.klipperdir, options.freq, options.duration, options.max_freq, options.accel_per_hz, 'unknown'
|
||||
)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
934
shaketune/graph_creators/vibrations_graph_creator.py
Normal file
934
shaketune/graph_creators/vibrations_graph_creator.py
Normal file
@@ -0,0 +1,934 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
##################################################
|
||||
#### DIRECTIONAL VIBRATIONS PLOTTING SCRIPT ######
|
||||
##################################################
|
||||
# Written by Frix_x#0161 #
|
||||
|
||||
import math
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import tarfile
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import matplotlib
|
||||
import matplotlib.font_manager
|
||||
import matplotlib.gridspec
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker
|
||||
import numpy as np
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
from ..helpers.common_func import (
|
||||
compute_mechanical_parameters,
|
||||
detect_peaks,
|
||||
identify_low_energy_zones,
|
||||
parse_log,
|
||||
setup_klipper_import,
|
||||
)
|
||||
from ..helpers.console_output import ConsoleOutput
|
||||
from ..helpers.motors_config_parser import MotorsConfigParser
|
||||
from ..shaketune_config import ShakeTuneConfig
|
||||
from .graph_creator import GraphCreator
|
||||
|
||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||
PEAKS_RELATIVE_HEIGHT_THRESHOLD = 0.04
|
||||
CURVE_SIMILARITY_SIGMOID_K = 0.5
|
||||
SPEEDS_VALLEY_DETECTION_THRESHOLD = 0.7 # Lower is more sensitive
|
||||
SPEEDS_AROUND_PEAK_DELETION = 3 # to delete +-3mm/s around a peak
|
||||
ANGLES_VALLEY_DETECTION_THRESHOLD = 1.1 # Lower is more sensitive
|
||||
|
||||
KLIPPAIN_COLORS = {
|
||||
'purple': '#70088C',
|
||||
'orange': '#FF8D32',
|
||||
'dark_purple': '#150140',
|
||||
'dark_orange': '#F24130',
|
||||
'red_pink': '#F2055C',
|
||||
}
|
||||
|
||||
|
||||
class VibrationsGraphCreator(GraphCreator):
|
||||
def __init__(self, config: ShakeTuneConfig):
|
||||
super().__init__(config, 'vibrations profile')
|
||||
self._kinematics: Optional[str] = None
|
||||
self._accel: Optional[float] = None
|
||||
self._motors: Optional[List[MotorsConfigParser]] = None
|
||||
|
||||
def configure(self, kinematics: str, accel: float, motor_config_parser: MotorsConfigParser) -> None:
|
||||
self._kinematics = kinematics
|
||||
self._accel = accel
|
||||
self._motors = motor_config_parser.get_motors()
|
||||
|
||||
def _archive_files(self, lognames: List[Path]) -> None:
|
||||
tar_path = self._folder / f'{self._type}_{self._graph_date}.tar.gz'
|
||||
with tarfile.open(tar_path, 'w:gz') as tar:
|
||||
for csv_file in lognames:
|
||||
tar.add(csv_file, arcname=csv_file.name, recursive=False)
|
||||
csv_file.unlink()
|
||||
|
||||
def create_graph(self) -> None:
|
||||
if not self._accel or not self._kinematics:
|
||||
raise ValueError('accel and kinematics must be set to create the vibrations profile graph!')
|
||||
|
||||
lognames = self._move_and_prepare_files(
|
||||
glob_pattern='shaketune-vib_*.csv',
|
||||
min_files_required=None,
|
||||
custom_name_func=lambda f: re.search(r'shaketune-vib_(.*?)_\d{8}_\d{6}', f.name).group(1),
|
||||
)
|
||||
fig = vibrations_profile(
|
||||
lognames=[str(path) for path in lognames],
|
||||
klipperdir=str(self._config.klipper_folder),
|
||||
kinematics=self._kinematics,
|
||||
accel=self._accel,
|
||||
st_version=self._version,
|
||||
motors=self._motors,
|
||||
)
|
||||
self._save_figure_and_cleanup(fig, lognames)
|
||||
|
||||
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
if len(files) <= keep_results:
|
||||
return # No need to delete any files
|
||||
for old_file in files[keep_results:]:
|
||||
old_file.unlink()
|
||||
tar_file = old_file.with_suffix('.tar.gz')
|
||||
tar_file.unlink(missing_ok=True)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Computation
|
||||
######################################################################
|
||||
|
||||
|
||||
# Call to the official Klipper input shaper object to do the PSD computation
|
||||
def calc_freq_response(data) -> Tuple[np.ndarray, np.ndarray]:
|
||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||
return helper.process_accelerometer_data(data)
|
||||
|
||||
|
||||
# Calculate motor frequency profiles based on the measured Power Spectral Density (PSD) measurements for the machine kinematics
|
||||
# main angles and then create a global motor profile as a weighted average (from their own vibrations) of all calculated profiles
|
||||
def compute_motor_profiles(
|
||||
freqs: np.ndarray,
|
||||
psds: dict,
|
||||
all_angles_energy: dict,
|
||||
measured_angles: Optional[List[int]] = None,
|
||||
energy_amplification_factor: int = 2,
|
||||
) -> Tuple[dict, np.ndarray]:
|
||||
if measured_angles is None:
|
||||
measured_angles = [0, 90]
|
||||
|
||||
motor_profiles = {}
|
||||
weighted_sum_profiles = np.zeros_like(freqs)
|
||||
total_weight = 0
|
||||
conv_filter = np.ones(20) / 20
|
||||
|
||||
# Creating the PSD motor profiles for each angles
|
||||
for angle in measured_angles:
|
||||
# Calculate the sum of PSDs for the current angle and then convolve
|
||||
sum_curve = np.sum(np.array([psds[angle][speed] for speed in psds[angle]]), axis=0)
|
||||
motor_profiles[angle] = np.convolve(sum_curve / len(psds[angle]), conv_filter, mode='same')
|
||||
|
||||
# Calculate weights
|
||||
angle_energy = (
|
||||
all_angles_energy[angle] ** energy_amplification_factor
|
||||
) # First weighting factor is based on the total vibrations of the machine at the specified angle
|
||||
curve_area = (
|
||||
np.trapz(motor_profiles[angle], freqs) ** energy_amplification_factor
|
||||
) # Additional weighting factor is based on the area under the current motor profile at this specified angle
|
||||
total_angle_weight = angle_energy * curve_area
|
||||
|
||||
# Update weighted sum profiles to get the global motor profile
|
||||
weighted_sum_profiles += motor_profiles[angle] * total_angle_weight
|
||||
total_weight += total_angle_weight
|
||||
|
||||
# Creating a global average motor profile that is the weighted average of all the PSD motor profiles
|
||||
global_motor_profile = weighted_sum_profiles / total_weight if total_weight != 0 else weighted_sum_profiles
|
||||
|
||||
return motor_profiles, global_motor_profile
|
||||
|
||||
|
||||
# Since it was discovered that there is no non-linear mixing in the stepper "steps" vibrations, instead of measuring
|
||||
# the effects of each speeds at each angles, this function simplify it by using only the main motors axes (X/Y for Cartesian
|
||||
# printers and A/B for CoreXY) measurements and project each points on the [0,360] degrees range using trigonometry
|
||||
# to "sum" the vibration impact of each axis at every points of the generated spectrogram. The result is very similar at the end.
|
||||
def compute_dir_speed_spectrogram(
|
||||
measured_speeds: List[float], data: dict, kinematics: str = 'cartesian', measured_angles: Optional[List[int]] = None
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||
if measured_angles is None:
|
||||
measured_angles = [0, 90]
|
||||
|
||||
# We want to project the motor vibrations measured on their own axes on the [0, 360] range
|
||||
spectrum_angles = np.linspace(0, 360, 720) # One point every 0.5 degrees
|
||||
spectrum_speeds = np.linspace(min(measured_speeds), max(measured_speeds), len(measured_speeds) * 6)
|
||||
spectrum_vibrations = np.zeros((len(spectrum_angles), len(spectrum_speeds)))
|
||||
|
||||
def get_interpolated_vibrations(data: dict, speed: float, speeds: List[float]) -> float:
|
||||
idx = np.clip(np.searchsorted(speeds, speed, side='left'), 1, len(speeds) - 1)
|
||||
lower_speed = speeds[idx - 1]
|
||||
upper_speed = speeds[idx]
|
||||
lower_vibrations = data.get(lower_speed, 0)
|
||||
upper_vibrations = data.get(upper_speed, 0)
|
||||
return lower_vibrations + (speed - lower_speed) * (upper_vibrations - lower_vibrations) / (
|
||||
upper_speed - lower_speed
|
||||
)
|
||||
|
||||
# Precompute trigonometric values and constant before the loop
|
||||
angle_radians = np.deg2rad(spectrum_angles)
|
||||
cos_vals = np.cos(angle_radians)
|
||||
sin_vals = np.sin(angle_radians)
|
||||
sqrt_2_inv = 1 / math.sqrt(2)
|
||||
|
||||
# Compute the spectrum vibrations for each angle and speed combination
|
||||
for target_angle_idx, (cos_val, sin_val) in enumerate(zip(cos_vals, sin_vals)):
|
||||
for target_speed_idx, target_speed in enumerate(spectrum_speeds):
|
||||
if kinematics == 'cartesian' or kinematics == 'corexz':
|
||||
speed_1 = np.abs(target_speed * cos_val)
|
||||
speed_2 = np.abs(target_speed * sin_val)
|
||||
elif kinematics == 'corexy':
|
||||
speed_1 = np.abs(target_speed * (cos_val + sin_val) * sqrt_2_inv)
|
||||
speed_2 = np.abs(target_speed * (cos_val - sin_val) * sqrt_2_inv)
|
||||
|
||||
vibrations_1 = get_interpolated_vibrations(data[measured_angles[0]], speed_1, measured_speeds)
|
||||
vibrations_2 = get_interpolated_vibrations(data[measured_angles[1]], speed_2, measured_speeds)
|
||||
spectrum_vibrations[target_angle_idx, target_speed_idx] = vibrations_1 + vibrations_2
|
||||
|
||||
return spectrum_angles, spectrum_speeds, spectrum_vibrations
|
||||
|
||||
|
||||
def compute_angle_powers(spectrogram_data: np.ndarray) -> np.ndarray:
|
||||
angles_powers = np.trapz(spectrogram_data, axis=1)
|
||||
|
||||
# Since we want to plot it on a continuous polar plot later on, we need to append parts of
|
||||
# the array to start and end of it to smooth transitions when doing the convolution
|
||||
# and get the same value at modulo 360. Then we return the array without the extras
|
||||
extended_angles_powers = np.concatenate([angles_powers[-9:], angles_powers, angles_powers[:9]])
|
||||
convolved_extended = np.convolve(extended_angles_powers, np.ones(15) / 15, mode='same')
|
||||
|
||||
return convolved_extended[9:-9]
|
||||
|
||||
|
||||
def compute_speed_powers(spectrogram_data: np.ndarray, smoothing_window: int = 15) -> np.ndarray:
|
||||
min_values = np.amin(spectrogram_data, axis=0)
|
||||
max_values = np.amax(spectrogram_data, axis=0)
|
||||
var_values = np.var(spectrogram_data, axis=0)
|
||||
|
||||
# rescale the variance to the same range as max_values to plot it on the same graph
|
||||
var_values = var_values / var_values.max() * max_values.max()
|
||||
|
||||
# Create a vibration metric that is the product of the max values and the variance to quantify the best
|
||||
# speeds that have at the same time a low global energy level that is also consistent at every angles
|
||||
vibration_metric = max_values * var_values
|
||||
|
||||
# utility function to pad and smooth the data avoiding edge effects
|
||||
conv_filter = np.ones(smoothing_window) / smoothing_window
|
||||
window = int(smoothing_window / 2)
|
||||
|
||||
def pad_and_smooth(data: np.ndarray) -> np.ndarray:
|
||||
data_padded = np.pad(data, (window,), mode='edge')
|
||||
smoothed_data = np.convolve(data_padded, conv_filter, mode='valid')
|
||||
return smoothed_data
|
||||
|
||||
# Stack the arrays and apply padding and smoothing in batch
|
||||
data_arrays = np.stack([min_values, max_values, var_values, vibration_metric])
|
||||
smoothed_arrays = np.array([pad_and_smooth(data) for data in data_arrays])
|
||||
|
||||
return smoothed_arrays
|
||||
|
||||
|
||||
# Function that filter and split the good_speed ranges. The goal is to remove some zones around
|
||||
# additional detected small peaks in order to suppress them if there is a peak, even if it's low,
|
||||
# that's probably due to a crossing in the motor resonance pattern that still need to be removed
|
||||
def filter_and_split_ranges(
|
||||
all_speeds: np.ndarray, good_speeds: List[Tuple[int, int, float]], peak_speed_indices: dict, deletion_range: int
|
||||
) -> List[Tuple[int, int, float]]:
|
||||
# Process each range to filter out and split based on peak indices
|
||||
filtered_good_speeds = []
|
||||
for start, end, energy in good_speeds:
|
||||
start_speed, end_speed = all_speeds[start], all_speeds[end]
|
||||
# Identify peaks that intersect with the current speed range
|
||||
intersecting_peaks_indices = [
|
||||
idx for speed, idx in peak_speed_indices.items() if start_speed <= speed <= end_speed
|
||||
]
|
||||
|
||||
if not intersecting_peaks_indices:
|
||||
filtered_good_speeds.append((start, end, energy))
|
||||
else:
|
||||
intersecting_peaks_indices.sort()
|
||||
current_start = start
|
||||
|
||||
for peak_index in intersecting_peaks_indices:
|
||||
before_peak_end = max(current_start, peak_index - deletion_range)
|
||||
if current_start < before_peak_end:
|
||||
filtered_good_speeds.append((current_start, before_peak_end, energy))
|
||||
current_start = peak_index + deletion_range + 1
|
||||
|
||||
if current_start < end:
|
||||
filtered_good_speeds.append((current_start, end, energy))
|
||||
|
||||
# Sorting by start point once and then merge overlapping ranges
|
||||
sorted_ranges = sorted(filtered_good_speeds, key=lambda x: x[0])
|
||||
merged_ranges = [sorted_ranges[0]]
|
||||
|
||||
for current in sorted_ranges[1:]:
|
||||
last_merged_start, last_merged_end, last_merged_energy = merged_ranges[-1]
|
||||
if current[0] <= last_merged_end:
|
||||
new_end = max(last_merged_end, current[1])
|
||||
new_energy = min(last_merged_energy, current[2])
|
||||
merged_ranges[-1] = (last_merged_start, new_end, new_energy)
|
||||
else:
|
||||
merged_ranges.append(current)
|
||||
|
||||
return merged_ranges
|
||||
|
||||
|
||||
# This function allow the computation of a symmetry score that reflect the spectrogram apparent symmetry between
|
||||
# measured axes on both the shape of the signal and the energy level consistency across both side of the signal
|
||||
def compute_symmetry_analysis(
|
||||
all_angles: np.ndarray, spectrogram_data: np.ndarray, measured_angles: Optional[List[int]] = None
|
||||
) -> float:
|
||||
if measured_angles is None:
|
||||
measured_angles = [0, 90]
|
||||
|
||||
total_spectrogram_angles = len(all_angles)
|
||||
half_spectrogram_angles = total_spectrogram_angles // 2
|
||||
|
||||
# Extend the spectrogram by adding half to the beginning (in order to not get an out of bounds error later)
|
||||
extended_spectrogram = np.concatenate((spectrogram_data[-half_spectrogram_angles:], spectrogram_data), axis=0)
|
||||
|
||||
# Calculate the split index directly within the slicing
|
||||
midpoint_angle = np.mean(measured_angles)
|
||||
split_index = int(midpoint_angle * (total_spectrogram_angles / 360) + half_spectrogram_angles)
|
||||
half_segment_length = half_spectrogram_angles // 2
|
||||
|
||||
# Slice out the two segments of the spectrogram and flatten them for comparison
|
||||
segment_1_flattened = extended_spectrogram[split_index - half_segment_length : split_index].flatten()
|
||||
segment_2_flattened = extended_spectrogram[split_index : split_index + half_segment_length].flatten()
|
||||
|
||||
# Compute the correlation coefficient between the two segments of spectrogram
|
||||
correlation = np.corrcoef(segment_1_flattened, segment_2_flattened)[0, 1]
|
||||
percentage_correlation_biased = (100 * np.power(correlation, 0.75)) + 10
|
||||
|
||||
return np.clip(0, 100, percentage_correlation_biased)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
|
||||
def plot_angle_profile_polar(
|
||||
ax: plt.Axes,
|
||||
angles: np.ndarray,
|
||||
angles_powers: np.ndarray,
|
||||
low_energy_zones: List[Tuple[int, int, float]],
|
||||
symmetry_factor: float,
|
||||
) -> None:
|
||||
angles_radians = np.deg2rad(angles)
|
||||
|
||||
ax.set_title('Polar angle energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_theta_zero_location('E')
|
||||
ax.set_theta_direction(1)
|
||||
|
||||
ax.plot(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||
ax.fill(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], alpha=0.3)
|
||||
ax.set_xlim([0, np.deg2rad(360)])
|
||||
ymax = angles_powers.max() * 1.05
|
||||
ax.set_ylim([0, ymax])
|
||||
ax.set_thetagrids([theta * 15 for theta in range(360 // 15)])
|
||||
|
||||
ax.text(
|
||||
0,
|
||||
0,
|
||||
f'Symmetry: {symmetry_factor:.1f}%',
|
||||
ha='center',
|
||||
va='center',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
fontsize=12,
|
||||
fontweight='bold',
|
||||
zorder=6,
|
||||
)
|
||||
|
||||
for _, (start, end, _) in enumerate(low_energy_zones):
|
||||
ax.axvline(
|
||||
angles_radians[start],
|
||||
angles_powers[start] / ymax,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
linestyle='dotted',
|
||||
linewidth=1.5,
|
||||
)
|
||||
ax.axvline(
|
||||
angles_radians[end],
|
||||
angles_powers[end] / ymax,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
linestyle='dotted',
|
||||
linewidth=1.5,
|
||||
)
|
||||
ax.fill_between(
|
||||
angles_radians[start:end], angles_powers[start:end], angles_powers.max() * 1.05, color='green', alpha=0.2
|
||||
)
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
# Polar plot doesn't follow the gridspec margin, so we adjust it manually here
|
||||
pos = ax.get_position()
|
||||
new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height]
|
||||
ax.set_position(new_pos)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_global_speed_profile(
|
||||
ax: plt.Axes,
|
||||
all_speeds: np.ndarray,
|
||||
sp_min_energy: np.ndarray,
|
||||
sp_max_energy: np.ndarray,
|
||||
sp_variance_energy: np.ndarray,
|
||||
vibration_metric: np.ndarray,
|
||||
num_peaks: int,
|
||||
peaks: np.ndarray,
|
||||
low_energy_zones: List[Tuple[int, int, float]],
|
||||
) -> None:
|
||||
ax.set_title('Global speed energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Speed (mm/s)')
|
||||
ax.set_ylabel('Energy')
|
||||
ax2 = ax.twinx()
|
||||
ax2.yaxis.set_visible(False)
|
||||
|
||||
ax.plot(all_speeds, sp_min_energy, label='Minimum', color=KLIPPAIN_COLORS['dark_purple'], zorder=5)
|
||||
ax.plot(all_speeds, sp_max_energy, label='Maximum', color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||
ax.plot(all_speeds, sp_variance_energy, label='Variance', color=KLIPPAIN_COLORS['orange'], zorder=5, linestyle='--')
|
||||
ax2.plot(
|
||||
all_speeds,
|
||||
vibration_metric,
|
||||
label=f'Vibration metric ({num_peaks} bad peaks)',
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
zorder=5,
|
||||
)
|
||||
|
||||
ax.set_xlim([all_speeds.min(), all_speeds.max()])
|
||||
ax.set_ylim([0, sp_max_energy.max() * 1.15])
|
||||
|
||||
y2min = -(vibration_metric.max() * 0.025)
|
||||
y2max = vibration_metric.max() * 1.07
|
||||
ax2.set_ylim([y2min, y2max])
|
||||
|
||||
if peaks is not None and len(peaks) > 0:
|
||||
ax2.plot(all_speeds[peaks], vibration_metric[peaks], 'x', color='black', markersize=8, zorder=10)
|
||||
for idx, peak in enumerate(peaks):
|
||||
ax2.annotate(
|
||||
f'{idx+1}',
|
||||
(all_speeds[peak], vibration_metric[peak]),
|
||||
textcoords='offset points',
|
||||
xytext=(5, 5),
|
||||
fontweight='bold',
|
||||
ha='left',
|
||||
fontsize=13,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
zorder=10,
|
||||
)
|
||||
|
||||
for idx, (start, end, _) in enumerate(low_energy_zones):
|
||||
# ax2.axvline(all_speeds[start], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8)
|
||||
# ax2.axvline(all_speeds[end], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8)
|
||||
ax2.fill_between(
|
||||
all_speeds[start:end],
|
||||
y2min,
|
||||
vibration_metric[start:end],
|
||||
color='green',
|
||||
alpha=0.2,
|
||||
label=f'Zone {idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s',
|
||||
)
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_angular_speed_profiles(
|
||||
ax: plt.Axes, speeds: np.ndarray, angles: np.ndarray, spectrogram_data: np.ndarray, kinematics: str = 'cartesian'
|
||||
) -> None:
|
||||
ax.set_title('Angular speed energy profiles', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Speed (mm/s)')
|
||||
ax.set_ylabel('Energy')
|
||||
|
||||
# Define mappings for labels and colors to simplify plotting commands
|
||||
angle_settings = {
|
||||
0: ('X (0 deg)', 'purple', 10),
|
||||
90: ('Y (90 deg)', 'dark_purple', 5),
|
||||
45: ('A (45 deg)' if kinematics == 'corexy' else '45 deg', 'orange', 10),
|
||||
135: ('B (135 deg)' if kinematics == 'corexy' else '135 deg', 'dark_orange', 5),
|
||||
}
|
||||
|
||||
# Plot each angle using settings from the dictionary
|
||||
for angle, (label, color, zorder) in angle_settings.items():
|
||||
idx = np.searchsorted(angles, angle, side='left')
|
||||
ax.plot(speeds, spectrogram_data[idx], label=label, color=KLIPPAIN_COLORS[color], zorder=zorder)
|
||||
|
||||
ax.set_xlim([speeds.min(), speeds.max()])
|
||||
max_value = max(spectrogram_data[angle].max() for angle in [0, 45, 90, 135])
|
||||
ax.set_ylim([0, max_value * 1.1])
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_motor_profiles(
|
||||
ax: plt.Axes,
|
||||
freqs: np.ndarray,
|
||||
main_angles: List[int],
|
||||
motor_profiles: dict,
|
||||
global_motor_profile: np.ndarray,
|
||||
max_freq: float,
|
||||
) -> None:
|
||||
ax.set_title('Motor frequency profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_ylabel('Energy')
|
||||
ax.set_xlabel('Frequency (Hz)')
|
||||
|
||||
ax2 = ax.twinx()
|
||||
ax2.yaxis.set_visible(False)
|
||||
|
||||
# Global weighted average motor profile
|
||||
ax.plot(freqs, global_motor_profile, label='Combined', color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||
max_value = global_motor_profile.max()
|
||||
|
||||
# Mapping of angles to axis names
|
||||
angle_settings = {0: 'X', 90: 'Y', 45: 'A', 135: 'B'}
|
||||
|
||||
# And then plot the motor profiles at each measured angles
|
||||
for angle in main_angles:
|
||||
profile_max = motor_profiles[angle].max()
|
||||
if profile_max > max_value:
|
||||
max_value = profile_max
|
||||
label = f'{angle_settings[angle]} ({angle} deg)' if angle in angle_settings else f'{angle} deg'
|
||||
ax.plot(freqs, motor_profiles[angle], linestyle='--', label=label, zorder=2)
|
||||
|
||||
ax.set_xlim([0, max_freq])
|
||||
ax.set_ylim([0, max_value * 1.1])
|
||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||
|
||||
# Then add the motor resonance peak to the graph and print some infos about it
|
||||
motor_fr, motor_zeta, motor_res_idx, lowfreq_max = compute_mechanical_parameters(global_motor_profile, freqs, 30)
|
||||
if lowfreq_max:
|
||||
ConsoleOutput.print(
|
||||
'[WARNING] There are a lot of low frequency vibrations that can alter the readings. This is probably due to the test being performed at too high an acceleration!'
|
||||
)
|
||||
ConsoleOutput.print(
|
||||
'Try lowering the ACCEL value and/or increasing the SIZE value before restarting the macro to ensure that only constant speeds are being recorded and that the dynamic behavior of the machine is not affecting the measurements'
|
||||
)
|
||||
if motor_zeta is not None:
|
||||
ConsoleOutput.print(
|
||||
'Motors have a main resonant frequency at %.1fHz with an estimated damping ratio of %.3f'
|
||||
% (motor_fr, motor_zeta)
|
||||
)
|
||||
else:
|
||||
ConsoleOutput.print(
|
||||
'Motors have a main resonant frequency at %.1fHz but it was impossible to estimate a damping ratio.'
|
||||
% (motor_fr)
|
||||
)
|
||||
|
||||
ax.plot(freqs[motor_res_idx], global_motor_profile[motor_res_idx], 'x', color='black', markersize=10)
|
||||
ax.annotate(
|
||||
'R',
|
||||
(freqs[motor_res_idx], global_motor_profile[motor_res_idx]),
|
||||
textcoords='offset points',
|
||||
xytext=(15, 5),
|
||||
ha='right',
|
||||
fontsize=14,
|
||||
color=KLIPPAIN_COLORS['red_pink'],
|
||||
weight='bold',
|
||||
)
|
||||
|
||||
ax2.plot([], [], ' ', label='Motor resonant frequency (ω0): %.1fHz' % (motor_fr))
|
||||
if motor_zeta is not None:
|
||||
ax2.plot([], [], ' ', label='Motor damping ratio (ζ): %.3f' % (motor_zeta))
|
||||
else:
|
||||
ax2.plot([], [], ' ', label='No damping ratio computed')
|
||||
|
||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||
ax.grid(which='major', color='grey')
|
||||
ax.grid(which='minor', color='lightgrey')
|
||||
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('small')
|
||||
ax.legend(loc='upper left', prop=fontP)
|
||||
ax2.legend(loc='upper right', prop=fontP)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_vibration_spectrogram_polar(
|
||||
ax: plt.Axes, angles: np.ndarray, speeds: np.ndarray, spectrogram_data: np.ndarray
|
||||
) -> None:
|
||||
angles_radians = np.radians(angles)
|
||||
|
||||
# Assuming speeds defines the radial distance from the center, we need to create a meshgrid
|
||||
# for both angles and speeds to map the spectrogram data onto a polar plot correctly
|
||||
radius, theta = np.meshgrid(speeds, angles_radians)
|
||||
|
||||
ax.set_title(
|
||||
'Polar vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold', va='bottom'
|
||||
)
|
||||
ax.set_theta_zero_location('E')
|
||||
ax.set_theta_direction(1)
|
||||
|
||||
ax.pcolormesh(theta, radius, spectrogram_data, norm=matplotlib.colors.LogNorm(), cmap='inferno', shading='auto')
|
||||
ax.set_thetagrids([theta * 15 for theta in range(360 // 15)])
|
||||
ax.tick_params(axis='y', which='both', colors='white', labelsize='medium')
|
||||
ax.set_ylim([0, max(speeds)])
|
||||
|
||||
# Polar plot doesn't follow the gridspec margin, so we adjust it manually here
|
||||
pos = ax.get_position()
|
||||
new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height]
|
||||
ax.set_position(new_pos)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_vibration_spectrogram(
|
||||
ax: plt.Axes, angles: np.ndarray, speeds: np.ndarray, spectrogram_data: np.ndarray, peaks: np.ndarray
|
||||
) -> None:
|
||||
ax.set_title('Vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||
ax.set_xlabel('Speed (mm/s)')
|
||||
ax.set_ylabel('Angle (deg)')
|
||||
|
||||
ax.imshow(
|
||||
spectrogram_data,
|
||||
norm=matplotlib.colors.LogNorm(),
|
||||
cmap='inferno',
|
||||
aspect='auto',
|
||||
extent=[speeds[0], speeds[-1], angles[0], angles[-1]],
|
||||
origin='lower',
|
||||
interpolation='antialiased',
|
||||
)
|
||||
|
||||
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
||||
if peaks is not None and len(peaks) > 0:
|
||||
for idx, peak in enumerate(peaks):
|
||||
ax.axvline(speeds[peak], color='cyan', linewidth=0.75)
|
||||
ax.annotate(
|
||||
f'Peak {idx+1}',
|
||||
(speeds[peak], angles[-1] * 0.9),
|
||||
textcoords='data',
|
||||
color='cyan',
|
||||
rotation=90,
|
||||
fontsize=10,
|
||||
verticalalignment='top',
|
||||
horizontalalignment='right',
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def plot_motor_config_txt(fig: plt.Figure, motors: List[MotorsConfigParser], differences: Optional[str]) -> None:
|
||||
motor_details = [(motors[0], 'X motor'), (motors[1], 'Y motor')]
|
||||
|
||||
distance = 0.12
|
||||
if motors[0].get_config('autotune_enabled'):
|
||||
distance = 0.27
|
||||
config_blocks = [
|
||||
f"| {lbl}: {mot.get_config('motor').upper()} on {mot.get_config('tmc').upper()} @ {mot.get_config('voltage'):0.1f}V {mot.get_config('run_current'):0.2f}A - {mot.get_config('microsteps')}usteps"
|
||||
for mot, lbl in motor_details
|
||||
]
|
||||
config_blocks.append(
|
||||
f'| TMC Autotune enabled (PWM freq target: X={int(motors[0].get_config("pwm_freq_target")/1000)}kHz / Y={int(motors[1].get_config("pwm_freq_target")/1000)}kHz)'
|
||||
)
|
||||
else:
|
||||
config_blocks = [
|
||||
f"| {lbl}: {mot.get_config('tmc').upper()} @ {mot.get_config('run_current'):0.2f}A - {mot.get_config('microsteps')}usteps"
|
||||
for mot, lbl in motor_details
|
||||
]
|
||||
config_blocks.append('| TMC Autotune not detected')
|
||||
|
||||
for idx, block in enumerate(config_blocks):
|
||||
fig.text(
|
||||
0.41, 0.990 - 0.015 * idx, block, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple']
|
||||
)
|
||||
|
||||
tmc_registers = motors[0].get_registers()
|
||||
idx = -1
|
||||
for idx, (register, settings) in enumerate(tmc_registers.items()):
|
||||
settings_str = ' '.join(f'{k}={v}' for k, v in settings.items())
|
||||
tmc_block = f'| {register.upper()}: {settings_str}'
|
||||
fig.text(
|
||||
0.41 + distance,
|
||||
0.990 - 0.015 * idx,
|
||||
tmc_block,
|
||||
ha='left',
|
||||
va='top',
|
||||
fontsize=10,
|
||||
color=KLIPPAIN_COLORS['dark_purple'],
|
||||
)
|
||||
|
||||
if differences is not None:
|
||||
differences_text = f'| Y motor diff: {differences}'
|
||||
fig.text(
|
||||
0.41 + distance,
|
||||
0.990 - 0.015 * (idx + 1),
|
||||
differences_text,
|
||||
ha='left',
|
||||
va='top',
|
||||
fontsize=10,
|
||||
color=KLIPPAIN_COLORS['dark_purple'],
|
||||
)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup and main routines
|
||||
######################################################################
|
||||
|
||||
|
||||
def extract_angle_and_speed(logname: str) -> Tuple[float, float]:
|
||||
try:
|
||||
match = re.search(r'an(\d+)_\d+sp(\d+)_\d+', os.path.basename(logname))
|
||||
if match:
|
||||
angle = match.group(1)
|
||||
speed = match.group(2)
|
||||
else:
|
||||
raise ValueError(f'File {logname} does not match expected format. Clean your /tmp folder and start again!')
|
||||
except AttributeError as err:
|
||||
raise ValueError(
|
||||
f'File {logname} does not match expected format. Clean your /tmp folder and start again!'
|
||||
) from err
|
||||
return float(angle), float(speed)
|
||||
|
||||
|
||||
def vibrations_profile(
|
||||
lognames: List[str],
|
||||
klipperdir: str = '~/klipper',
|
||||
kinematics: str = 'cartesian',
|
||||
accel: Optional[float] = None,
|
||||
max_freq: float = 1000.0,
|
||||
st_version: Optional[str] = None,
|
||||
motors: Optional[List[MotorsConfigParser]] = None,
|
||||
) -> plt.Figure:
|
||||
global shaper_calibrate
|
||||
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||
|
||||
if kinematics == 'cartesian' or kinematics == 'corexz':
|
||||
main_angles = [0, 90]
|
||||
elif kinematics == 'corexy':
|
||||
main_angles = [45, 135]
|
||||
else:
|
||||
raise ValueError('Only Cartesian, CoreXY and CoreXZ kinematics are supported by this tool at the moment!')
|
||||
|
||||
psds = defaultdict(lambda: defaultdict(list))
|
||||
psds_sum = defaultdict(lambda: defaultdict(list))
|
||||
target_freqs_initialized = False
|
||||
|
||||
for logname in lognames:
|
||||
data = parse_log(logname)
|
||||
if data is None:
|
||||
continue # File is not in the expected format, skip it
|
||||
angle, speed = extract_angle_and_speed(logname)
|
||||
freq_response = calc_freq_response(data)
|
||||
first_freqs = freq_response.freq_bins
|
||||
psd_sum = freq_response.psd_sum
|
||||
|
||||
if not target_freqs_initialized:
|
||||
target_freqs = first_freqs[first_freqs <= max_freq]
|
||||
target_freqs_initialized = True
|
||||
|
||||
psd_sum = psd_sum[first_freqs <= max_freq]
|
||||
first_freqs = first_freqs[first_freqs <= max_freq]
|
||||
|
||||
# Store the interpolated PSD and integral values
|
||||
psds[angle][speed] = np.interp(target_freqs, first_freqs, psd_sum)
|
||||
psds_sum[angle][speed] = np.trapz(psd_sum, first_freqs)
|
||||
|
||||
measured_angles = sorted(psds_sum.keys())
|
||||
measured_speeds = sorted({speed for angle_speeds in psds_sum.values() for speed in angle_speeds.keys()})
|
||||
|
||||
for main_angle in main_angles:
|
||||
if main_angle not in measured_angles:
|
||||
raise ValueError('Measurements not taken at the correct angles for the specified kinematics!')
|
||||
|
||||
# Precompute the variables used in plot functions
|
||||
all_angles, all_speeds, spectrogram_data = compute_dir_speed_spectrogram(
|
||||
measured_speeds, psds_sum, kinematics, main_angles
|
||||
)
|
||||
all_angles_energy = compute_angle_powers(spectrogram_data)
|
||||
sp_min_energy, sp_max_energy, sp_variance_energy, vibration_metric = compute_speed_powers(spectrogram_data)
|
||||
motor_profiles, global_motor_profile = compute_motor_profiles(target_freqs, psds, all_angles_energy, main_angles)
|
||||
|
||||
# symmetry_factor = compute_symmetry_analysis(all_angles, all_angles_energy)
|
||||
symmetry_factor = compute_symmetry_analysis(all_angles, spectrogram_data, main_angles)
|
||||
ConsoleOutput.print(f'Machine estimated vibration symmetry: {symmetry_factor:.1f}%')
|
||||
|
||||
# Analyze low variance ranges of vibration energy across all angles for each speed to identify clean speeds
|
||||
# and highlight them. Also find the peaks to identify speeds to avoid due to high resonances
|
||||
num_peaks, vibration_peaks, peaks_speeds = detect_peaks(
|
||||
vibration_metric,
|
||||
all_speeds,
|
||||
PEAKS_DETECTION_THRESHOLD * vibration_metric.max(),
|
||||
PEAKS_RELATIVE_HEIGHT_THRESHOLD,
|
||||
10,
|
||||
10,
|
||||
)
|
||||
formated_peaks_speeds = ['{:.1f}'.format(pspeed) for pspeed in peaks_speeds]
|
||||
ConsoleOutput.print(
|
||||
'Vibrations peaks detected: %d @ %s mm/s (avoid setting a speed near these values in your slicer print profile)'
|
||||
% (num_peaks, ', '.join(map(str, formated_peaks_speeds)))
|
||||
)
|
||||
|
||||
good_speeds = identify_low_energy_zones(vibration_metric, SPEEDS_VALLEY_DETECTION_THRESHOLD)
|
||||
if good_speeds is not None:
|
||||
deletion_range = int(SPEEDS_AROUND_PEAK_DELETION / (all_speeds[1] - all_speeds[0]))
|
||||
peak_speed_indices = {pspeed: np.where(all_speeds == pspeed)[0][0] for pspeed in set(peaks_speeds)}
|
||||
|
||||
# Filter and split ranges based on peak indices, avoiding overlaps
|
||||
good_speeds = filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range)
|
||||
|
||||
# Add some logging about the good speeds found
|
||||
ConsoleOutput.print(f'Lowest vibrations speeds ({len(good_speeds)} ranges sorted from best to worse):')
|
||||
for idx, (start, end, _) in enumerate(good_speeds):
|
||||
ConsoleOutput.print(f'{idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s')
|
||||
|
||||
# Angle low energy valleys identification (good angles ranges) and print them to the console
|
||||
good_angles = identify_low_energy_zones(all_angles_energy, ANGLES_VALLEY_DETECTION_THRESHOLD)
|
||||
if good_angles is not None:
|
||||
ConsoleOutput.print(f'Lowest vibrations angles ({len(good_angles)} ranges sorted from best to worse):')
|
||||
for idx, (start, end, energy) in enumerate(good_angles):
|
||||
ConsoleOutput.print(
|
||||
f'{idx+1}: {all_angles[start]:.1f}° to {all_angles[end]:.1f}° (mean vibrations energy: {energy:.2f}% of max)'
|
||||
)
|
||||
|
||||
# Create graph layout
|
||||
fig, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots(
|
||||
2,
|
||||
3,
|
||||
gridspec_kw={
|
||||
'height_ratios': [1, 1],
|
||||
'width_ratios': [4, 8, 6],
|
||||
'bottom': 0.050,
|
||||
'top': 0.890,
|
||||
'left': 0.040,
|
||||
'right': 0.985,
|
||||
'hspace': 0.166,
|
||||
'wspace': 0.138,
|
||||
},
|
||||
)
|
||||
|
||||
# Transform ax3 and ax4 to polar plots
|
||||
ax1.remove()
|
||||
ax1 = fig.add_subplot(2, 3, 1, projection='polar')
|
||||
ax4.remove()
|
||||
ax4 = fig.add_subplot(2, 3, 4, projection='polar')
|
||||
|
||||
# Set the global .png figure size
|
||||
fig.set_size_inches(20, 11.5)
|
||||
|
||||
# Add title
|
||||
title_line1 = 'MACHINE VIBRATIONS ANALYSIS TOOL'
|
||||
fig.text(
|
||||
0.060, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||
)
|
||||
try:
|
||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||
dt = datetime.strptime(f"{filename_parts[1]} {filename_parts[2].split('-')[0]}", '%Y%m%d %H%M%S')
|
||||
title_line2 = dt.strftime('%x %X')
|
||||
if accel is not None:
|
||||
title_line2 += ' at ' + str(accel) + ' mm/s² -- ' + kinematics.upper() + ' kinematics'
|
||||
except Exception:
|
||||
ConsoleOutput.print('Warning: CSV filenames appear to be different than expected (%s)' % (lognames[0]))
|
||||
title_line2 = lognames[0].split('/')[-1]
|
||||
fig.text(0.060, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||
|
||||
# Add the motors infos to the top of the graph
|
||||
if motors is not None and len(motors) == 2:
|
||||
differences = motors[0].compare_to(motors[1])
|
||||
plot_motor_config_txt(fig, motors, differences)
|
||||
if differences is not None and kinematics == 'corexy':
|
||||
ConsoleOutput.print(f'Warning: motors have different TMC configurations!\n{differences}')
|
||||
|
||||
# Plot the graphs
|
||||
plot_angle_profile_polar(ax1, all_angles, all_angles_energy, good_angles, symmetry_factor)
|
||||
plot_vibration_spectrogram_polar(ax4, all_angles, all_speeds, spectrogram_data)
|
||||
|
||||
plot_global_speed_profile(
|
||||
ax2,
|
||||
all_speeds,
|
||||
sp_min_energy,
|
||||
sp_max_energy,
|
||||
sp_variance_energy,
|
||||
vibration_metric,
|
||||
num_peaks,
|
||||
vibration_peaks,
|
||||
good_speeds,
|
||||
)
|
||||
plot_angular_speed_profiles(ax3, all_speeds, all_angles, spectrogram_data, kinematics)
|
||||
plot_vibration_spectrogram(ax5, all_angles, all_speeds, spectrogram_data, vibration_peaks)
|
||||
|
||||
plot_motor_profiles(ax6, target_freqs, main_angles, motor_profiles, global_motor_profile, max_freq)
|
||||
|
||||
# Adding a small Klippain logo to the top left corner of the figure
|
||||
ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW')
|
||||
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||
ax_logo.axis('off')
|
||||
|
||||
# Adding Shake&Tune version in the top right corner
|
||||
if st_version != 'unknown':
|
||||
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = '%prog [options] <raw logs>'
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||
opts.add_option(
|
||||
'-c', '--accel', type='int', dest='accel', default=None, help='accel value to be printed on the graph'
|
||||
)
|
||||
opts.add_option('-f', '--max_freq', type='float', default=1000.0, help='maximum frequency to graph')
|
||||
opts.add_option(
|
||||
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||
)
|
||||
opts.add_option(
|
||||
'-m',
|
||||
'--kinematics',
|
||||
type='string',
|
||||
dest='kinematics',
|
||||
default='cartesian',
|
||||
help='machine kinematics configuration',
|
||||
)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) < 1:
|
||||
opts.error('No CSV file(s) to analyse')
|
||||
if options.output is None:
|
||||
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||
if options.kinematics not in ['cartesian', 'corexy', 'corexz']:
|
||||
opts.error('Only cartesian, corexy and corexz kinematics are supported by this tool at the moment!')
|
||||
|
||||
fig = vibrations_profile(args, options.klipperdir, options.kinematics, options.accel, options.max_freq)
|
||||
fig.savefig(options.output, dpi=150)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user