AXES_MAP detection reworked (#110)
This commit is contained in:
@@ -1,7 +1,5 @@
|
|||||||
GitPython==3.1.40
|
GitPython==3.1.40
|
||||||
matplotlib==3.8.2 ; python_version >= '3.9'
|
matplotlib==3.8.2
|
||||||
matplotlib==3.3.4 ; python_version < '3.9'
|
numpy==1.26.2
|
||||||
numpy==1.26.2 ; python_version >= '3.9'
|
scipy==1.11.4
|
||||||
numpy==1.19.5 ; python_version < '3.9'
|
PyWavelets==1.6.0
|
||||||
scipy==1.11.4 ; python_version >= '3.9'
|
|
||||||
scipy==1.7.3 ; python_version < '3.9'
|
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ gcode:
|
|||||||
{% set dummy = params.SPEED|default(80) %}
|
{% set dummy = params.SPEED|default(80) %}
|
||||||
{% set dummy = params.ACCEL|default(1500) %}
|
{% set dummy = params.ACCEL|default(1500) %}
|
||||||
{% set dummy = params.TRAVEL_SPEED|default(120) %}
|
{% set dummy = params.TRAVEL_SPEED|default(120) %}
|
||||||
{% set dummy = params.ACCEL_CHIP %}
|
|
||||||
_AXES_MAP_CALIBRATION {rawparams}
|
_AXES_MAP_CALIBRATION {rawparams}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ from ..helpers.console_output import ConsoleOutput
|
|||||||
from ..shaketune_process import ShakeTuneProcess
|
from ..shaketune_process import ShakeTuneProcess
|
||||||
from .accelerometer import Accelerometer
|
from .accelerometer import Accelerometer
|
||||||
|
|
||||||
|
SEGMENT_LENGTH = 30 # mm
|
||||||
|
|
||||||
|
|
||||||
def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None:
|
def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None:
|
||||||
z_height = gcmd.get_float('Z_HEIGHT', default=20.0)
|
z_height = gcmd.get_float('Z_HEIGHT', default=20.0)
|
||||||
@@ -21,6 +23,12 @@ def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None:
|
|||||||
k_accelerometer = printer.lookup_object(accel_chip, None)
|
k_accelerometer = printer.lookup_object(accel_chip, None)
|
||||||
if k_accelerometer is None:
|
if k_accelerometer is None:
|
||||||
gcmd.error('Error: multi-accelerometer configurations are not supported for this macro!')
|
gcmd.error('Error: multi-accelerometer configurations are not supported for this macro!')
|
||||||
|
pconfig = printer.lookup_object('configfile')
|
||||||
|
current_axes_map = pconfig.status_raw_config[accel_chip]['axes_map']
|
||||||
|
if current_axes_map.strip().replace(' ', '') != 'x,y,z':
|
||||||
|
gcmd.error(
|
||||||
|
f'Error: The parameter axes_map is already set in your {accel_chip} configuration! Please remove it (or set it to "x,y,z")!'
|
||||||
|
)
|
||||||
accelerometer = Accelerometer(k_accelerometer)
|
accelerometer = Accelerometer(k_accelerometer)
|
||||||
|
|
||||||
toolhead_info = toolhead.get_status(systime)
|
toolhead_info = toolhead.get_status(systime)
|
||||||
@@ -44,19 +52,27 @@ def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None:
|
|||||||
_, _, _, E = toolhead.get_position()
|
_, _, _, E = toolhead.get_position()
|
||||||
|
|
||||||
# Going to the start position
|
# Going to the start position
|
||||||
toolhead.move([mid_x - 15, mid_y - 15, z_height, E], feedrate_travel)
|
toolhead.move([mid_x - SEGMENT_LENGTH / 2, mid_y - SEGMENT_LENGTH / 2, z_height, E], feedrate_travel)
|
||||||
toolhead.dwell(0.5)
|
toolhead.dwell(0.5)
|
||||||
|
|
||||||
# Start the measurements and do the movements (+X, +Y and then +Z)
|
# Start the measurements and do the movements (+X, +Y and then +Z)
|
||||||
accelerometer.start_measurement()
|
accelerometer.start_measurement()
|
||||||
toolhead.dwell(1)
|
toolhead.dwell(0.5)
|
||||||
toolhead.move([mid_x + 15, mid_y - 15, z_height, E], speed)
|
toolhead.move([mid_x + SEGMENT_LENGTH / 2, mid_y - SEGMENT_LENGTH / 2, z_height, E], speed)
|
||||||
toolhead.dwell(1)
|
toolhead.dwell(0.5)
|
||||||
toolhead.move([mid_x + 15, mid_y + 15, z_height, E], speed)
|
accelerometer.stop_measurement('axesmap_X', append_time=True)
|
||||||
toolhead.dwell(1)
|
toolhead.dwell(0.5)
|
||||||
toolhead.move([mid_x + 15, mid_y + 15, z_height + 15, E], speed)
|
accelerometer.start_measurement()
|
||||||
toolhead.dwell(1)
|
toolhead.dwell(0.5)
|
||||||
accelerometer.stop_measurement('axemap')
|
toolhead.move([mid_x + SEGMENT_LENGTH / 2, mid_y + SEGMENT_LENGTH / 2, z_height, E], speed)
|
||||||
|
toolhead.dwell(0.5)
|
||||||
|
accelerometer.stop_measurement('axesmap_Y', append_time=True)
|
||||||
|
toolhead.dwell(0.5)
|
||||||
|
accelerometer.start_measurement()
|
||||||
|
toolhead.dwell(0.5)
|
||||||
|
toolhead.move([mid_x + SEGMENT_LENGTH / 2, mid_y + SEGMENT_LENGTH / 2, z_height + SEGMENT_LENGTH, E], speed)
|
||||||
|
toolhead.dwell(0.5)
|
||||||
|
accelerometer.stop_measurement('axesmap_Z', append_time=True)
|
||||||
|
|
||||||
# Re-enable the input shaper if it was active
|
# Re-enable the input shaper if it was active
|
||||||
if input_shaper is not None:
|
if input_shaper is not None:
|
||||||
@@ -71,5 +87,5 @@ def axes_map_calibration(gcmd, config, st_process: ShakeTuneProcess) -> None:
|
|||||||
# Run post-processing
|
# Run post-processing
|
||||||
ConsoleOutput.print('Analysis of the movements...')
|
ConsoleOutput.print('Analysis of the movements...')
|
||||||
creator = st_process.get_graph_creator()
|
creator = st_process.get_graph_creator()
|
||||||
creator.configure(accel)
|
creator.configure(accel, SEGMENT_LENGTH)
|
||||||
st_process.run()
|
st_process.run()
|
||||||
|
|||||||
@@ -6,13 +6,31 @@
|
|||||||
# Written by Frix_x#0161 #
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import matplotlib.colors
|
||||||
|
import matplotlib.font_manager
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import matplotlib.ticker
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy.signal import butter, filtfilt
|
import pywt
|
||||||
|
from scipy import stats
|
||||||
|
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
|
||||||
|
from ..helpers.common_func import parse_log
|
||||||
from ..helpers.console_output import ConsoleOutput
|
from ..helpers.console_output import ConsoleOutput
|
||||||
|
|
||||||
NUM_POINTS = 500
|
KLIPPAIN_COLORS = {
|
||||||
|
'purple': '#70088C',
|
||||||
|
'orange': '#FF8D32',
|
||||||
|
'dark_purple': '#150140',
|
||||||
|
'dark_orange': '#F24130',
|
||||||
|
'red_pink': '#F2055C',
|
||||||
|
}
|
||||||
|
MACHINE_AXES = ['x', 'y', 'z']
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
@@ -20,58 +38,230 @@ NUM_POINTS = 500
|
|||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
def accel_signal_filter(data, cutoff=2, fs=100, order=5):
|
def wavelet_denoise(data, wavelet='db1', level=1):
|
||||||
nyq = 0.5 * fs
|
coeffs = pywt.wavedec(data, wavelet, mode='smooth')
|
||||||
normal_cutoff = cutoff / nyq
|
threshold = np.median(np.abs(coeffs[-level])) / 0.6745 * np.sqrt(2 * np.log(len(data)))
|
||||||
b, a = butter(order, normal_cutoff, btype='low', analog=False)
|
new_coeffs = [pywt.threshold(c, threshold, mode='soft') for c in coeffs]
|
||||||
filtered_data = filtfilt(b, a, data)
|
denoised_data = pywt.waverec(new_coeffs, wavelet)
|
||||||
filtered_data -= np.mean(filtered_data)
|
|
||||||
return filtered_data
|
# Compute noise by subtracting denoised data from original data
|
||||||
|
noise = data - denoised_data[: len(data)]
|
||||||
|
return denoised_data, noise
|
||||||
|
|
||||||
|
|
||||||
def find_first_spike(data):
|
def integrate_trapz(accel, time):
|
||||||
min_index, max_index = np.argmin(data), np.argmax(data)
|
return np.array([np.trapz(accel[:i], time[:i]) for i in range(2, len(time) + 1)])
|
||||||
return ('-', min_index) if min_index < max_index else ('', max_index)
|
|
||||||
|
|
||||||
|
|
||||||
def get_movement_vector(data, start_idx, end_idx):
|
def process_acceleration_data(time, accel_x, accel_y, accel_z):
|
||||||
if start_idx < end_idx:
|
# Calculate the constant offset (gravity component)
|
||||||
vector = []
|
offset_x = np.mean(accel_x)
|
||||||
for i in range(3):
|
offset_y = np.mean(accel_y)
|
||||||
vector.append(np.mean(data[i][start_idx:end_idx], axis=0))
|
offset_z = np.mean(accel_z)
|
||||||
return vector
|
|
||||||
else:
|
# Remove the constant offset from acceleration data
|
||||||
return np.zeros(3)
|
accel_x -= offset_x
|
||||||
|
accel_y -= offset_y
|
||||||
|
accel_z -= offset_z
|
||||||
|
|
||||||
|
# Apply wavelet denoising
|
||||||
|
accel_x, noise_x = wavelet_denoise(accel_x)
|
||||||
|
accel_y, noise_y = wavelet_denoise(accel_y)
|
||||||
|
accel_z, noise_z = wavelet_denoise(accel_z)
|
||||||
|
|
||||||
|
# Integrate acceleration to get velocity using trapezoidal rule
|
||||||
|
velocity_x = integrate_trapz(accel_x, time)
|
||||||
|
velocity_y = integrate_trapz(accel_y, time)
|
||||||
|
velocity_z = integrate_trapz(accel_z, time)
|
||||||
|
|
||||||
|
# Correct drift in velocity by resetting to zero at the beginning and end
|
||||||
|
velocity_x -= np.linspace(velocity_x[0], velocity_x[-1], len(velocity_x))
|
||||||
|
velocity_y -= np.linspace(velocity_y[0], velocity_y[-1], len(velocity_y))
|
||||||
|
velocity_z -= np.linspace(velocity_z[0], velocity_z[-1], len(velocity_z))
|
||||||
|
|
||||||
|
# Integrate velocity to get position using trapezoidal rule
|
||||||
|
position_x = integrate_trapz(velocity_x, time[1:])
|
||||||
|
position_y = integrate_trapz(velocity_y, time[1:])
|
||||||
|
position_z = integrate_trapz(velocity_z, time[1:])
|
||||||
|
|
||||||
|
noise_intensity = np.mean([np.std(noise_x), np.std(noise_y), np.std(noise_z)])
|
||||||
|
|
||||||
|
return offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity
|
||||||
|
|
||||||
|
|
||||||
def angle_between(v1, v2):
|
def scale_positions_to_fixed_length(position_x, position_y, position_z, fixed_length):
|
||||||
v1_u = v1 / np.linalg.norm(v1)
|
# Calculate the total distance traveled in 3D space
|
||||||
v2_u = v2 / np.linalg.norm(v2)
|
total_distance = np.sqrt(np.diff(position_x) ** 2 + np.diff(position_y) ** 2 + np.diff(position_z) ** 2).sum()
|
||||||
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
|
scale_factor = fixed_length / total_distance
|
||||||
|
|
||||||
|
# Apply the scale factor to the positions
|
||||||
|
position_x *= scale_factor
|
||||||
|
position_y *= scale_factor
|
||||||
|
position_z *= scale_factor
|
||||||
|
|
||||||
|
return position_x, position_y, position_z
|
||||||
|
|
||||||
|
|
||||||
def compute_errors(filtered_data, spikes_sorted, accel_value, num_points):
|
def find_nearest_perfect_vector(average_direction_vector):
|
||||||
# Get the movement start points in the correct order from the sorted bag of spikes
|
# Define the perfect vectors
|
||||||
movement_starts = [spike[0][1] for spike in spikes_sorted]
|
perfect_vectors = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [-1, 0, 0], [0, -1, 0], [0, 0, -1]])
|
||||||
|
|
||||||
# Theoretical unit vectors for X, Y, Z printer axes
|
# Find the nearest perfect vector
|
||||||
printer_axes = {'x': np.array([1, 0, 0]), 'y': np.array([0, 1, 0]), 'z': np.array([0, 0, 1])}
|
dot_products = perfect_vectors @ average_direction_vector
|
||||||
|
nearest_vector_idx = np.argmax(dot_products)
|
||||||
|
nearest_vector = perfect_vectors[nearest_vector_idx]
|
||||||
|
|
||||||
alignment_errors = {}
|
# Calculate the angle error
|
||||||
sensitivity_errors = {}
|
angle_error = np.arccos(dot_products[nearest_vector_idx]) * 180 / np.pi
|
||||||
for i, axis in enumerate(['x', 'y', 'z']):
|
|
||||||
movement_start = movement_starts[i]
|
|
||||||
movement_end = movement_start + num_points
|
|
||||||
movement_vector = get_movement_vector(filtered_data, movement_start, movement_end)
|
|
||||||
alignment_errors[axis] = angle_between(movement_vector, printer_axes[axis])
|
|
||||||
|
|
||||||
measured_accel_magnitude = np.linalg.norm(movement_vector)
|
return nearest_vector, angle_error
|
||||||
if accel_value != 0:
|
|
||||||
sensitivity_errors[axis] = abs(measured_accel_magnitude - accel_value) / accel_value * 100
|
|
||||||
else:
|
|
||||||
sensitivity_errors[axis] = None
|
|
||||||
|
|
||||||
return alignment_errors, sensitivity_errors
|
|
||||||
|
def linear_regression_direction(position_x, position_y, position_z, trim_length=0.25):
|
||||||
|
# Trim the start and end of the position data to keep only the center of the segment
|
||||||
|
# as the start and stop positions are not always perfectly aligned and can be a bit noisy
|
||||||
|
t = len(position_x)
|
||||||
|
trim_start = int(t * trim_length)
|
||||||
|
trim_end = int(t * (1 - trim_length))
|
||||||
|
position_x = position_x[trim_start:trim_end]
|
||||||
|
position_y = position_y[trim_start:trim_end]
|
||||||
|
position_z = position_z[trim_start:trim_end]
|
||||||
|
|
||||||
|
# Compute the direction vector using linear regression over the position data
|
||||||
|
time = np.arange(len(position_x))
|
||||||
|
slope_x, intercept_x, _, _, _ = stats.linregress(time, position_x)
|
||||||
|
slope_y, intercept_y, _, _, _ = stats.linregress(time, position_y)
|
||||||
|
slope_z, intercept_z, _, _, _ = stats.linregress(time, position_z)
|
||||||
|
end_position = np.array(
|
||||||
|
[slope_x * time[-1] + intercept_x, slope_y * time[-1] + intercept_y, slope_z * time[-1] + intercept_z]
|
||||||
|
)
|
||||||
|
direction_vector = end_position - np.array([intercept_x, intercept_y, intercept_z])
|
||||||
|
direction_vector = direction_vector / np.linalg.norm(direction_vector)
|
||||||
|
return direction_vector
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Graphing
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def plot_compare_frequency(ax, time, accel_x, accel_y, accel_z, offset, i):
|
||||||
|
# Plot acceleration data
|
||||||
|
ax.plot(
|
||||||
|
time,
|
||||||
|
accel_x,
|
||||||
|
label='X' if i == 0 else '',
|
||||||
|
color=KLIPPAIN_COLORS['purple'],
|
||||||
|
linewidth=0.5,
|
||||||
|
zorder=50 if i == 0 else 10,
|
||||||
|
)
|
||||||
|
ax.plot(
|
||||||
|
time,
|
||||||
|
accel_y,
|
||||||
|
label='Y' if i == 0 else '',
|
||||||
|
color=KLIPPAIN_COLORS['orange'],
|
||||||
|
linewidth=0.5,
|
||||||
|
zorder=50 if i == 1 else 10,
|
||||||
|
)
|
||||||
|
ax.plot(
|
||||||
|
time,
|
||||||
|
accel_z,
|
||||||
|
label='Z' if i == 0 else '',
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
linewidth=0.5,
|
||||||
|
zorder=50 if i == 2 else 10,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setting axis parameters, grid and graph title
|
||||||
|
ax.set_xlabel('Time (s)')
|
||||||
|
ax.set_ylabel('Acceleration (mm/s²)')
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('small')
|
||||||
|
ax.set_title(
|
||||||
|
'Acceleration (gravity offset removed)',
|
||||||
|
fontsize=14,
|
||||||
|
color=KLIPPAIN_COLORS['dark_orange'],
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.legend(loc='upper left', prop=fontP)
|
||||||
|
|
||||||
|
# Add gravity offset to the graph
|
||||||
|
if i == 0:
|
||||||
|
ax2 = ax.twinx() # To split the legends in two box
|
||||||
|
ax2.yaxis.set_visible(False)
|
||||||
|
ax2.plot([], [], ' ', label=f'Measured gravity: {offset / 1000:0.3f} m/s²')
|
||||||
|
ax2.legend(loc='upper right', prop=fontP)
|
||||||
|
|
||||||
|
|
||||||
|
def plot_3d_path(ax, i, position_x, position_y, position_z, average_direction_vector, angle_error):
|
||||||
|
ax.plot(position_x, position_y, position_z, color=KLIPPAIN_COLORS['orange'], linestyle=':', linewidth=2)
|
||||||
|
ax.scatter(position_x[0], position_y[0], position_z[0], color=KLIPPAIN_COLORS['red_pink'], zorder=10)
|
||||||
|
ax.text(
|
||||||
|
position_x[0] + 1,
|
||||||
|
position_y[0],
|
||||||
|
position_z[0],
|
||||||
|
str(i + 1),
|
||||||
|
color='black',
|
||||||
|
fontsize=16,
|
||||||
|
fontweight='bold',
|
||||||
|
zorder=20,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Plot the average direction vector
|
||||||
|
start_position = np.array([position_x[0], position_y[0], position_z[0]])
|
||||||
|
end_position = start_position + average_direction_vector * np.linalg.norm(
|
||||||
|
[position_x[-1] - position_x[0], position_y[-1] - position_y[0], position_z[-1] - position_z[0]]
|
||||||
|
)
|
||||||
|
axes = ['X', 'Y', 'Z']
|
||||||
|
ax.plot(
|
||||||
|
[start_position[0], end_position[0]],
|
||||||
|
[start_position[1], end_position[1]],
|
||||||
|
[start_position[2], end_position[2]],
|
||||||
|
label=f'{axes[i]} angle: {angle_error:0.2f}°',
|
||||||
|
color=KLIPPAIN_COLORS['purple'],
|
||||||
|
linestyle='-',
|
||||||
|
linewidth=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setting axis parameters, grid and graph title
|
||||||
|
ax.set_xlabel('X Position (mm)')
|
||||||
|
ax.set_ylabel('Y Position (mm)')
|
||||||
|
ax.set_zlabel('Z Position (mm)')
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('small')
|
||||||
|
ax.set_title(
|
||||||
|
'Estimated movement in 3D space',
|
||||||
|
fontsize=14,
|
||||||
|
color=KLIPPAIN_COLORS['dark_orange'],
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.legend(loc='upper left', prop=fontP)
|
||||||
|
|
||||||
|
|
||||||
|
def format_direction_vector(vectors):
|
||||||
|
formatted_vector = []
|
||||||
|
for vector in vectors:
|
||||||
|
for i in range(len(vector)):
|
||||||
|
if vector[i] > 0:
|
||||||
|
formatted_vector.append(MACHINE_AXES[i])
|
||||||
|
break
|
||||||
|
elif vector[i] < 0:
|
||||||
|
formatted_vector.append(f'-{MACHINE_AXES[i]}')
|
||||||
|
break
|
||||||
|
return ', '.join(formatted_vector)
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
@@ -79,50 +269,122 @@ def compute_errors(filtered_data, spikes_sorted, accel_value, num_points):
|
|||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
def parse_log(logname):
|
def axesmap_calibration(lognames, fixed_length, accel=None, st_version='unknown'):
|
||||||
with open(logname) as f:
|
# Parse data from the log files while ignoring CSV in the wrong format (sorted by axis name)
|
||||||
for header in f:
|
raw_datas = {}
|
||||||
if not header.startswith('#'):
|
for logname in lognames:
|
||||||
break
|
data = parse_log(logname)
|
||||||
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
if data is not None:
|
||||||
# Raw accelerometer data
|
_axis = logname.split('_')[-1].split('.')[0].lower()
|
||||||
return np.loadtxt(logname, comments='#', delimiter=',')
|
raw_datas[_axis] = data
|
||||||
# Power spectral density data or shaper calibration data
|
|
||||||
raise ValueError(
|
if len(raw_datas) != 3:
|
||||||
'File %s does not contain raw accelerometer data and therefore '
|
raise ValueError('This tool needs 3 CSVs to work with (like axesmap_X.csv, axesmap_Y.csv and axesmap_Z.csv)')
|
||||||
'is not supported by this script. Please use the official Klipper '
|
|
||||||
'calibrate_shaper.py script to process it instead.' % (logname,)
|
fig, ((ax1, ax2)) = plt.subplots(
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
gridspec_kw={
|
||||||
|
'width_ratios': [5, 3],
|
||||||
|
'bottom': 0.080,
|
||||||
|
'top': 0.840,
|
||||||
|
'left': 0.055,
|
||||||
|
'right': 0.960,
|
||||||
|
'hspace': 0.166,
|
||||||
|
'wspace': 0.060,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
fig.set_size_inches(15, 7)
|
||||||
|
ax2.remove()
|
||||||
|
ax2 = fig.add_subplot(122, projection='3d')
|
||||||
|
|
||||||
|
cumulative_start_position = np.array([0, 0, 0])
|
||||||
|
direction_vectors = []
|
||||||
|
total_noise_intensity = 0.0
|
||||||
|
for i, machine_axis in enumerate(MACHINE_AXES):
|
||||||
|
if machine_axis not in raw_datas:
|
||||||
|
raise ValueError(f'Missing CSV file for axis {machine_axis}')
|
||||||
|
|
||||||
|
# Get the accel data according to the current axes_map
|
||||||
|
time = raw_datas[machine_axis][:, 0]
|
||||||
|
accel_x = raw_datas[machine_axis][:, 1]
|
||||||
|
accel_y = raw_datas[machine_axis][:, 2]
|
||||||
|
accel_z = raw_datas[machine_axis][:, 3]
|
||||||
|
|
||||||
|
offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity = process_acceleration_data(
|
||||||
|
time, accel_x, accel_y, accel_z
|
||||||
|
)
|
||||||
|
position_x, position_y, position_z = scale_positions_to_fixed_length(
|
||||||
|
position_x, position_y, position_z, fixed_length
|
||||||
|
)
|
||||||
|
position_x += cumulative_start_position[0]
|
||||||
|
position_y += cumulative_start_position[1]
|
||||||
|
position_z += cumulative_start_position[2]
|
||||||
|
|
||||||
|
gravity = np.linalg.norm(np.array([offset_x, offset_y, offset_z]))
|
||||||
|
average_direction_vector = linear_regression_direction(position_x, position_y, position_z)
|
||||||
|
direction_vector, angle_error = find_nearest_perfect_vector(average_direction_vector)
|
||||||
|
ConsoleOutput.print(
|
||||||
|
f'Machine axis {machine_axis.upper()} -> nearest accelerometer direction vector: {direction_vector} (angle error: {angle_error:.2f}°)'
|
||||||
|
)
|
||||||
|
direction_vectors.append(direction_vector)
|
||||||
|
|
||||||
|
total_noise_intensity += noise_intensity
|
||||||
|
|
||||||
|
plot_compare_frequency(ax1, time, accel_x, accel_y, accel_z, gravity, i)
|
||||||
|
plot_3d_path(ax2, i, position_x, position_y, position_z, average_direction_vector, angle_error)
|
||||||
|
|
||||||
|
# Update the cumulative start position for the next segment
|
||||||
|
cumulative_start_position = np.array([position_x[-1], position_y[-1], position_z[-1]])
|
||||||
|
|
||||||
|
average_noise_intensity = total_noise_intensity / len(raw_datas)
|
||||||
|
if average_noise_intensity <= 350:
|
||||||
|
average_noise_intensity_text = '-> OK'
|
||||||
|
elif 350 < average_noise_intensity <= 700:
|
||||||
|
average_noise_intensity_text = '-> WARNING: accelerometer noise is a bit high'
|
||||||
|
else:
|
||||||
|
average_noise_intensity_text = '-> ERROR: accelerometer noise is too high!'
|
||||||
|
|
||||||
|
formatted_direction_vector = format_direction_vector(direction_vectors)
|
||||||
|
ConsoleOutput.print(f'--> Detected axes_map: {formatted_direction_vector}')
|
||||||
|
ConsoleOutput.print(
|
||||||
|
f'Average accelerometer noise level: {average_noise_intensity:.2f} mm/s² {average_noise_intensity_text}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Add title
|
||||||
|
title_line1 = 'AXES MAP CALIBRATION TOOL'
|
||||||
|
fig.text(
|
||||||
|
0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
filename = lognames[0].split('/')[-1]
|
||||||
|
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S')
|
||||||
|
title_line2 = dt.strftime('%x %X')
|
||||||
|
if accel is not None:
|
||||||
|
title_line2 += f' -- at {accel:0.0f} mm/s²'
|
||||||
|
except Exception:
|
||||||
|
ConsoleOutput.print(
|
||||||
|
'Warning: CSV filenames look to be different than expected (%s , %s, %s)'
|
||||||
|
% (lognames[0], lognames[1], lognames[2])
|
||||||
|
)
|
||||||
|
title_line2 = lognames[0].split('/')[-1] + ' ...'
|
||||||
|
fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
|
|
||||||
def axesmap_calibration(lognames, accel=None):
|
title_line3 = f'| Detected axes_map: {formatted_direction_vector}'
|
||||||
# Parse the raw data and get them ready for analysis
|
title_line4 = f'| Accelerometer noise level: {average_noise_intensity:.2f} mm/s² {average_noise_intensity_text}'
|
||||||
raw_datas = [parse_log(filename) for filename in lognames]
|
fig.text(0.50, 0.985, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
if len(raw_datas) > 1:
|
fig.text(0.50, 0.950, title_line4, ha='left', va='top', fontsize=11, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
raise ValueError('Analysis of multiple CSV files at once is not possible with this script')
|
|
||||||
|
|
||||||
filtered_data = [accel_signal_filter(raw_datas[0][:, i + 1]) for i in range(3)]
|
# Adding a small Klippain logo to the top left corner of the figure
|
||||||
spikes = [find_first_spike(filtered_data[i]) for i in range(3)]
|
ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW')
|
||||||
spikes_sorted = sorted([(spikes[0], 'x'), (spikes[1], 'y'), (spikes[2], 'z')], key=lambda x: x[0][1])
|
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||||
|
ax_logo.axis('off')
|
||||||
|
|
||||||
# Using the previous variables to get the axes_map and errors
|
# Adding Shake&Tune version in the top right corner
|
||||||
axes_map = ','.join([f'{spike[0][0]}{spike[1]}' for spike in spikes_sorted])
|
if st_version != 'unknown':
|
||||||
# alignment_error, sensitivity_error = compute_errors(filtered_data, spikes_sorted, accel, NUM_POINTS)
|
fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||||
|
|
||||||
results = f'Be aware that this macro is experimental and has been known to sometimes produce incorrect results. Use it with caution and always check the results!\n'
|
return fig
|
||||||
results += f'Detected axes_map:\n {axes_map}\n'
|
|
||||||
|
|
||||||
# TODO: work on this function that is currently not giving good results...
|
|
||||||
# results += "Accelerometer angle deviation:\n"
|
|
||||||
# for axis, angle in alignment_error.items():
|
|
||||||
# angle_degrees = np.degrees(angle) # Convert radians to degrees
|
|
||||||
# results += f" {axis.upper()} axis: {angle_degrees:.2f} degrees\n"
|
|
||||||
|
|
||||||
# results += "Accelerometer sensitivity error:\n"
|
|
||||||
# for axis, error in sensitivity_error.items():
|
|
||||||
# results += f" {axis.upper()} axis: {error:.2f}%\n"
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -133,6 +395,9 @@ def main():
|
|||||||
opts.add_option(
|
opts.add_option(
|
||||||
'-a', '--accel', type='string', dest='accel', default=None, help='acceleration value used to do the movements'
|
'-a', '--accel', type='string', dest='accel', default=None, help='acceleration value used to do the movements'
|
||||||
)
|
)
|
||||||
|
opts.add_option(
|
||||||
|
'-l', '--length', type='float', dest='length', default=None, help='recorded length for each segment'
|
||||||
|
)
|
||||||
options, args = opts.parse_args()
|
options, args = opts.parse_args()
|
||||||
if len(args) < 1:
|
if len(args) < 1:
|
||||||
opts.error('No CSV file(s) to analyse')
|
opts.error('No CSV file(s) to analyse')
|
||||||
@@ -142,13 +407,17 @@ def main():
|
|||||||
accel_value = float(options.accel)
|
accel_value = float(options.accel)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
opts.error('Invalid acceleration value. It should be a numeric value.')
|
opts.error('Invalid acceleration value. It should be a numeric value.')
|
||||||
|
if options.length is None:
|
||||||
|
opts.error('You must specify the length of the measured segments (option -l)')
|
||||||
|
try:
|
||||||
|
length_value = float(options.length)
|
||||||
|
except ValueError:
|
||||||
|
opts.error('Invalid length value. It should be a numeric value.')
|
||||||
|
if options.output is None:
|
||||||
|
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||||
|
|
||||||
results = axesmap_calibration(args, accel_value)
|
fig = axesmap_calibration(args, length_value, accel_value, 'unknown')
|
||||||
ConsoleOutput.print(results)
|
fig.savefig(options.output, dpi=150)
|
||||||
|
|
||||||
if options.output is not None:
|
|
||||||
with open(options.output, 'w') as f:
|
|
||||||
f.write(results)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from typing import Callable, Optional
|
|||||||
|
|
||||||
from matplotlib.figure import Figure
|
from matplotlib.figure import Figure
|
||||||
|
|
||||||
from ..helpers.console_output import ConsoleOutput
|
|
||||||
from ..measurement.motorsconfigparser import MotorsConfigParser
|
from ..measurement.motorsconfigparser import MotorsConfigParser
|
||||||
from ..shaketune_config import ShakeTuneConfig
|
from ..shaketune_config import ShakeTuneConfig
|
||||||
from .analyze_axesmap import axesmap_calibration
|
from .analyze_axesmap import axesmap_calibration
|
||||||
@@ -238,41 +237,41 @@ class AxesMapFinder(GraphCreator):
|
|||||||
def __init__(self, config: ShakeTuneConfig):
|
def __init__(self, config: ShakeTuneConfig):
|
||||||
super().__init__(config)
|
super().__init__(config)
|
||||||
|
|
||||||
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
||||||
self._type = 'axesmap'
|
|
||||||
self._folder = config.get_results_folder()
|
|
||||||
|
|
||||||
self._accel = None
|
self._accel = None
|
||||||
|
self._segment_length = None
|
||||||
|
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||||
|
|
||||||
def configure(self, accel: int) -> None:
|
self._setup_folder('axesmap')
|
||||||
|
|
||||||
|
def configure(self, accel: int, segment_length: float) -> None:
|
||||||
self._accel = accel
|
self._accel = accel
|
||||||
|
self._segment_length = segment_length
|
||||||
|
|
||||||
def find_axesmap(self) -> None:
|
|
||||||
tmp_folder = Path('/tmp')
|
|
||||||
globbed_files = list(tmp_folder.glob('shaketune-axemap_*.csv'))
|
|
||||||
|
|
||||||
if not globbed_files:
|
|
||||||
raise FileNotFoundError('no CSV files found in the /tmp folder to find the axes map!')
|
|
||||||
|
|
||||||
# Find the CSV files with the latest timestamp and process it
|
|
||||||
logname = sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[0]
|
|
||||||
results = axesmap_calibration(
|
|
||||||
lognames=[str(logname)],
|
|
||||||
accel=self._accel,
|
|
||||||
)
|
|
||||||
ConsoleOutput.print(results)
|
|
||||||
|
|
||||||
result_filename = self._folder / f'{self._type}_{self._graph_date}.txt'
|
|
||||||
with result_filename.open('w') as f:
|
|
||||||
f.write(results)
|
|
||||||
|
|
||||||
# While the AxesMapFinder doesn't directly create a graph, we need to implement this
|
|
||||||
# method to allow using it seemlessly like all the other GraphCreator objects
|
|
||||||
def create_graph(self) -> None:
|
def create_graph(self) -> None:
|
||||||
self.find_axesmap()
|
lognames = self._move_and_prepare_files(
|
||||||
|
glob_pattern='shaketune-axesmap_*.csv',
|
||||||
|
min_files_required=3,
|
||||||
|
custom_name_func=lambda f: f.stem.split('_')[1].upper(),
|
||||||
|
)
|
||||||
|
fig = axesmap_calibration(
|
||||||
|
lognames=[str(path) for path in lognames],
|
||||||
|
accel=self._accel,
|
||||||
|
fixed_length=self._segment_length,
|
||||||
|
st_version=self._version,
|
||||||
|
)
|
||||||
|
self._save_figure_and_cleanup(fig, lognames)
|
||||||
|
|
||||||
def clean_old_files(self, keep_results: int) -> None:
|
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||||
tmp_folder = Path('/tmp')
|
# Get all PNG files in the directory as a list of Path objects
|
||||||
globbed_files = list(tmp_folder.glob('shaketune-axemap_*.csv'))
|
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||||
for csv_file in globbed_files:
|
|
||||||
csv_file.unlink()
|
if len(files) <= keep_results:
|
||||||
|
return # No need to delete any files
|
||||||
|
|
||||||
|
# Delete the older files
|
||||||
|
for old_file in files[keep_results:]:
|
||||||
|
file_date = '_'.join(old_file.stem.split('_')[1:3])
|
||||||
|
for suffix in ['X', 'Y', 'Z']:
|
||||||
|
csv_file = self._folder / f'axesmap_{file_date}_{suffix}.csv'
|
||||||
|
csv_file.unlink(missing_ok=True)
|
||||||
|
old_file.unlink()
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from .helpers.console_output import ConsoleOutput
|
|||||||
KLIPPER_FOLDER = Path.home() / 'klipper'
|
KLIPPER_FOLDER = Path.home() / 'klipper'
|
||||||
KLIPPER_LOG_FOLDER = Path.home() / 'printer_data/logs'
|
KLIPPER_LOG_FOLDER = Path.home() / 'printer_data/logs'
|
||||||
RESULTS_BASE_FOLDER = Path.home() / 'printer_data/config/K-ShakeTune_results'
|
RESULTS_BASE_FOLDER = Path.home() / 'printer_data/config/K-ShakeTune_results'
|
||||||
RESULTS_SUBFOLDERS = {'belts': 'belts', 'shaper': 'inputshaper', 'vibrations': 'vibrations'}
|
RESULTS_SUBFOLDERS = {'axesmap': 'axesmap', 'belts': 'belts', 'shaper': 'inputshaper', 'vibrations': 'vibrations'}
|
||||||
|
|
||||||
|
|
||||||
class ShakeTuneConfig:
|
class ShakeTuneConfig:
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ class ShakeTuneProcess:
|
|||||||
# Trying to reduce Shake&Tune process priority to avoid slowing down the main Klipper process
|
# Trying to reduce Shake&Tune process priority to avoid slowing down the main Klipper process
|
||||||
# as this could lead to random "Timer too close" errors when already running CANbus, etc...
|
# as this could lead to random "Timer too close" errors when already running CANbus, etc...
|
||||||
try:
|
try:
|
||||||
os.nice(15)
|
os.nice(19)
|
||||||
except Exception:
|
except Exception:
|
||||||
ConsoleOutput.print('Warning: failed reducing Shake&Tune process priority, continuing...')
|
ConsoleOutput.print('Warning: failed reducing Shake&Tune process priority, continuing...')
|
||||||
|
|
||||||
@@ -76,8 +76,7 @@ class ShakeTuneProcess:
|
|||||||
|
|
||||||
graph_creator.clean_old_files(self._config.keep_n_results)
|
graph_creator.clean_old_files(self._config.keep_n_results)
|
||||||
|
|
||||||
if graph_creator.get_type() != 'axesmap':
|
ConsoleOutput.print(f'{graph_creator.get_type()} graphs created successfully!')
|
||||||
ConsoleOutput.print(f'{graph_creator.get_type()} graphs created successfully!')
|
ConsoleOutput.print(
|
||||||
ConsoleOutput.print(
|
f'Cleaned up the output folder (only the last {self._config.keep_n_results} results were kept)!'
|
||||||
f'Cleaned up the output folder (only the last {self._config.keep_n_results} results were kept)!'
|
)
|
||||||
)
|
|
||||||
|
|||||||
Reference in New Issue
Block a user