Compare commits
100 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
47770e2d34 | ||
|
|
7f46da1708 | ||
|
|
cf2cb2cf2f | ||
|
|
bc80aa0be1 | ||
|
|
ca45745a0c | ||
|
|
ea11c262ff | ||
|
|
46dd0c2ca6 | ||
|
|
19bc62a6b7 | ||
|
|
178fa2ea3b | ||
|
|
f3ed4cd1a9 | ||
|
|
31a5ed8db2 | ||
|
|
abc20fdf41 | ||
|
|
99b719051c | ||
|
|
94e110736a | ||
|
|
6184233b03 | ||
|
|
f0f12a613a | ||
|
|
2cc9ac63e6 | ||
|
|
e4810f82d0 | ||
|
|
bf6adcd93c | ||
|
|
1ce9fd5c2b | ||
|
|
385ee01d34 | ||
|
|
ab6e76ea11 | ||
|
|
915e69d420 | ||
|
|
656f6d0d9e | ||
|
|
43ac2911a2 | ||
|
|
c01704437e | ||
|
|
ef006dbd1e | ||
|
|
41590be745 | ||
|
|
8336b62f97 | ||
|
|
24fb5398c8 | ||
|
|
8b0e80c583 | ||
|
|
7652f0d8e7 | ||
|
|
bde8577d0e | ||
|
|
53bee00517 | ||
|
|
51f2efb5f8 | ||
|
|
086293618a | ||
|
|
cbc43f7e24 | ||
|
|
fa41637ac9 | ||
|
|
c2c05e51ae | ||
|
|
617a47f968 | ||
|
|
83588029f1 | ||
|
|
4297aef0f5 | ||
|
|
37195051e4 | ||
|
|
0a25344b0c | ||
|
|
bf7a98d98b | ||
|
|
82b91c1b40 | ||
|
|
536c3c0eff | ||
|
|
73672fd694 | ||
|
|
312a9c9ffa | ||
|
|
f4e700a1ff | ||
|
|
80c8da622d | ||
|
|
b42e377ac6 | ||
|
|
7cfd02a7c6 | ||
|
|
9fa07a12c4 | ||
|
|
1a4fea3c8c | ||
|
|
eab10ce5da | ||
|
|
0696a60b7f | ||
|
|
ac96cb2eb7 | ||
|
|
84c406b407 | ||
|
|
3d07904556 | ||
|
|
16fabdc895 | ||
|
|
fe0fa1856a | ||
|
|
f3f2a7951a | ||
|
|
d71e385ad9 | ||
|
|
a7cd005f5b | ||
|
|
f846534f0f | ||
|
|
db57300eb2 | ||
|
|
680c3053f6 | ||
|
|
32047dbdba | ||
|
|
e056ec2249 | ||
|
|
0170e34cab | ||
|
|
0ff63edec8 | ||
|
|
f385bd98e3 | ||
|
|
d1394ad841 | ||
|
|
2a84f9c849 | ||
|
|
2a627a1fac | ||
|
|
cf57d5dd5c | ||
|
|
8216af87f1 | ||
|
|
c7e39da528 | ||
|
|
1a0ee0a162 | ||
|
|
87cb9015fa | ||
|
|
b32abe2eca | ||
|
|
7050018274 | ||
|
|
8721488d8c | ||
|
|
7ba692954f | ||
|
|
9ce3677a00 | ||
|
|
3a9cb57f31 | ||
|
|
43a205d036 | ||
|
|
a1e9269ba3 | ||
|
|
8e304a71ca | ||
|
|
5d54db0ca0 | ||
|
|
d52680738f | ||
|
|
f95c55230b | ||
|
|
0f7fa66af4 | ||
|
|
da10593ca7 | ||
|
|
060a800cc3 | ||
|
|
7c76be5077 | ||
|
|
a4c2ead732 | ||
|
|
6e884528c0 | ||
|
|
17ccddfa0f |
1
.git-blame-ignore-revs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
ef006dbd1e31cc7cae2fae978401a818ee2025d1
|
||||||
163
.gitignore
vendored
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
test/
|
||||||
|
.vscode/
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
################################################
|
|
||||||
###### STANDARD INPUT_SHAPER CALIBRATIONS ######
|
|
||||||
################################################
|
|
||||||
# Written by Frix_x#0161 #
|
|
||||||
# @version: 1.4
|
|
||||||
|
|
||||||
# CHANGELOG:
|
|
||||||
# v1.4: added possibility to only run one axis at a time for the axes shaper calibration
|
|
||||||
# v1.3: added possibility to override the default parameters
|
|
||||||
# v1.2: added EXCITATE_AXIS_AT_FREQ to hold a specific excitating frequency on an axis and diagnose mechanical problems
|
|
||||||
# v1.1: added M400 to validate that the files are correctly saved to disk
|
|
||||||
# v1.0: first version of the automatic input shaper workflow
|
|
||||||
|
|
||||||
|
|
||||||
### What is it ? ###
|
|
||||||
# This macro helps you to configure the input shaper algorithm of Klipper by running the tests sequencially and calling an automatic script
|
|
||||||
# that generate the graphs, manage the files and so on. It's basically a fully automatic input shaper calibration workflow.
|
|
||||||
# Results can be found in your config folder using FLuidd/Maisail file manager.
|
|
||||||
|
|
||||||
# The goal is to make it easy to set, share and use it.
|
|
||||||
|
|
||||||
# Usage:
|
|
||||||
# 1. Call the AXES_SHAPER_CALIBRATION macro, wait for it to end and compute the graphs. Then look for the results in the results folder.
|
|
||||||
# 2. Call the BELTS_SHAPER_CALIBRATION macro, wait for it to end and compute the graphs. Then look for the results in the results folder.
|
|
||||||
# 3. If you find out some strange noise, you can use the EXCITATE_AXIS_AT_FREQ macro to diagnose the origin
|
|
||||||
|
|
||||||
|
|
||||||
[gcode_macro AXES_SHAPER_CALIBRATION]
|
|
||||||
description: Run standard input shaper test for all axes
|
|
||||||
gcode:
|
|
||||||
{% set verbose = params.VERBOSE|default(true) %}
|
|
||||||
{% set min_freq = params.FREQ_START|default(5)|float %}
|
|
||||||
{% set max_freq = params.FREQ_END|default(133.3)|float %}
|
|
||||||
{% set hz_per_sec = params.HZ_PER_SEC|default(1)|float %}
|
|
||||||
{% set axis = params.AXIS|default("all")|string|lower %}
|
|
||||||
|
|
||||||
{% set X, Y = False, False %}
|
|
||||||
|
|
||||||
{% if axis == "all" %}
|
|
||||||
{% set X, Y = True, True %}
|
|
||||||
{% elif axis == "x" %}
|
|
||||||
{% set X = True %}
|
|
||||||
{% elif axis == "y" %}
|
|
||||||
{% set Y = True %}
|
|
||||||
{% else %}
|
|
||||||
{ action_raise_error("AXIS selection invalid. Should be either all, x or y!") }
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if X %}
|
|
||||||
TEST_RESONANCES AXIS=X OUTPUT=raw_data NAME=x FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
|
||||||
M400
|
|
||||||
|
|
||||||
{% if verbose %}
|
|
||||||
RESPOND MSG="X axis shaper graphs generation..."
|
|
||||||
{% endif %}
|
|
||||||
RUN_SHELL_COMMAND CMD=plot_graph PARAMS=SHAPER
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if Y %}
|
|
||||||
TEST_RESONANCES AXIS=Y OUTPUT=raw_data NAME=y FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
|
||||||
M400
|
|
||||||
|
|
||||||
{% if verbose %}
|
|
||||||
RESPOND MSG="Y axis shaper graphs generation..."
|
|
||||||
{% endif %}
|
|
||||||
RUN_SHELL_COMMAND CMD=plot_graph PARAMS=SHAPER
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
|
||||||
[gcode_macro BELTS_SHAPER_CALIBRATION]
|
|
||||||
description: Run custom demi-axe test to analyze belts on CoreXY printers
|
|
||||||
gcode:
|
|
||||||
{% set verbose = params.VERBOSE|default(true) %}
|
|
||||||
{% set min_freq = params.FREQ_START|default(5)|float %}
|
|
||||||
{% set max_freq = params.FREQ_END|default(133.33)|float %}
|
|
||||||
{% set hz_per_sec = params.HZ_PER_SEC|default(1)|float %}
|
|
||||||
|
|
||||||
TEST_RESONANCES AXIS=1,1 OUTPUT=raw_data NAME=b FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
|
||||||
M400
|
|
||||||
TEST_RESONANCES AXIS=1,-1 OUTPUT=raw_data NAME=a FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
|
||||||
M400
|
|
||||||
|
|
||||||
{% if verbose %}
|
|
||||||
RESPOND MSG="Belts graphs generation..."
|
|
||||||
{% endif %}
|
|
||||||
RUN_SHELL_COMMAND CMD=plot_graph PARAMS=BELTS
|
|
||||||
|
|
||||||
|
|
||||||
[gcode_macro EXCITATE_AXIS_AT_FREQ]
|
|
||||||
description: Maintain a specified input shaper excitating frequency for some time to diagnose vibrations
|
|
||||||
gcode:
|
|
||||||
{% set FREQUENCY = params.FREQUENCY|default(25)|int %}
|
|
||||||
{% set TIME = params.TIME|default(10)|int %}
|
|
||||||
{% set AXIS = params.AXIS|default("x")|string|lower %}
|
|
||||||
|
|
||||||
TEST_RESONANCES OUTPUT=raw_data AXIS={AXIS} FREQ_START={FREQUENCY-1} FREQ_END={FREQUENCY+1} HZ_PER_SEC={1/(TIME/3)}
|
|
||||||
M400
|
|
||||||
@@ -1,191 +0,0 @@
|
|||||||
################################################
|
|
||||||
###### VIBRATIONS AND SPEED OPTIMIZATIONS ######
|
|
||||||
################################################
|
|
||||||
# Written by Frix_x#0161 #
|
|
||||||
# @version: 2.1
|
|
||||||
|
|
||||||
# CHANGELOG:
|
|
||||||
# v2.1: allow decimal entries for speed and increment and added the E axis as an option to be neasured
|
|
||||||
# v2.0: added the possibility to measure mutliple axis
|
|
||||||
# v1.0: first speed and vibrations optimization macro
|
|
||||||
|
|
||||||
|
|
||||||
### What is it ? ###
|
|
||||||
# This macro helps you to identify the speed settings that exacerbate the vibrations of the machine (ie. where the frame resonate badly).
|
|
||||||
# It also helps to find the clean speed ranges where the machine is silent.
|
|
||||||
# I had some strong vibrations at very specific speeds on my machine (52mm/s for example) and I wanted to find all these problematic speeds
|
|
||||||
# to avoid them in my slicer profile and finally get the silent machine I was dreaming!
|
|
||||||
|
|
||||||
# It works by moving the toolhead at different speed settings while recording the vibrations using the ADXL chip. Then the macro call a custom script
|
|
||||||
# to compute and find the best speed settings. The results can be found in your config folder using Fluidd/Mainsail file manager.
|
|
||||||
|
|
||||||
# The goal is to make it easy to set, share and use it.
|
|
||||||
|
|
||||||
# This macro is parametric and most of the values can be adjusted with their respective input parameters.
|
|
||||||
# It can be called without any parameters - in which case the default values would be used - or with any combination of parameters as desired.
|
|
||||||
|
|
||||||
# Usage:
|
|
||||||
# 1. DO YOUR INPUT SHAPER CALIBRATION FIRST !!! This macro should not be used before as it would be useless and the results invalid.
|
|
||||||
# 2. Call the VIBRATIONS_CALIBRATION macro with the speed range you want to measure (default 20 to 200mm/s with 2mm/s increment).
|
|
||||||
# Be carefull about the Z_HEIGHT variable that default to 20mm -> if your ADXL is under the nozzle, increase it to avoid a crash of the ADXL on the bed of the machine.
|
|
||||||
# 3. Wait for it to finish all the measurement and compute the graph. Then look at it in the results folder.
|
|
||||||
|
|
||||||
|
|
||||||
[gcode_macro VIBRATIONS_CALIBRATION]
|
|
||||||
gcode:
|
|
||||||
#
|
|
||||||
# PARAMETERS
|
|
||||||
#
|
|
||||||
{% set size = params.SIZE|default(60)|int %} # size of the area where the movements are done
|
|
||||||
{% set direction = params.DIRECTION|default('XY') %} # can be set to either XY, AB, ABXY, A, B, X, Y, Z
|
|
||||||
{% set z_height = params.Z_HEIGHT|default(20)|int %} # z height to put the toolhead before starting the movements
|
|
||||||
{% set verbose = params.VERBOSE|default(true) %} # Wether to log the current speed in the console
|
|
||||||
|
|
||||||
{% set min_speed = params.MIN_SPEED|default(20)|float * 60 %} # minimum feedrate for the movements
|
|
||||||
{% set max_speed = params.MAX_SPEED|default(200)|float * 60 %} # maximum feedrate for the movements
|
|
||||||
{% set speed_increment = params.SPEED_INCREMENT|default(2)|float * 60 %} # feedrate increment between each move
|
|
||||||
{% set feedrate_travel = params.TRAVEL_SPEED|default(200)|int * 60 %} # travel feedrate between moves
|
|
||||||
|
|
||||||
{% set accel_chip = params.ACCEL_CHIP|default("adxl345") %} # ADXL chip name in the config
|
|
||||||
|
|
||||||
#
|
|
||||||
# COMPUTED VALUES
|
|
||||||
#
|
|
||||||
{% set mid_x = printer.toolhead.axis_maximum.x|float / 2 %}
|
|
||||||
{% set mid_y = printer.toolhead.axis_maximum.y|float / 2 %}
|
|
||||||
{% set nb_samples = ((max_speed - min_speed) / speed_increment + 1) | int %}
|
|
||||||
|
|
||||||
{% set direction_factor = {
|
|
||||||
'XY' : {
|
|
||||||
'start' : {'x': -0.5, 'y': -0.5 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': 0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'1' : {'x': 0.5, 'y': 0.5, 'z': 0.0 },
|
|
||||||
'2' : {'x': -0.5, 'y': 0.5, 'z': 0.0 },
|
|
||||||
'3' : {'x': -0.5, 'y': -0.5, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'AB' : {
|
|
||||||
'start' : {'x': 0.0, 'y': 0.0 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': 0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'1' : {'x': -0.5, 'y': 0.5, 'z': 0.0 },
|
|
||||||
'2' : {'x': 0.0, 'y': 0.0, 'z': 0.0 },
|
|
||||||
'3' : {'x': 0.5, 'y': 0.5, 'z': 0.0 },
|
|
||||||
'4' : {'x': -0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'5' : {'x': 0.0, 'y': 0.0, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'ABXY' : {
|
|
||||||
'start' : {'x': -0.5, 'y': 0.5 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': -0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'1' : {'x': 0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'2' : {'x': -0.5, 'y': 0.5, 'z': 0.0 },
|
|
||||||
'3' : {'x': 0.5, 'y': 0.5, 'z': 0.0 },
|
|
||||||
'4' : {'x': -0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'5' : {'x': -0.5, 'y': 0.5, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'B' : {
|
|
||||||
'start' : {'x': 0.5, 'y': 0.5 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': -0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'1' : {'x': 0.5, 'y': 0.5, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'A' : {
|
|
||||||
'start' : {'x': -0.5, 'y': 0.5 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': 0.5, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'1' : {'x': -0.5, 'y': 0.5, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'X' : {
|
|
||||||
'start' : {'x': -0.5, 'y': 0.0 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': 0.5, 'y': 0.0, 'z': 0.0 },
|
|
||||||
'1' : {'x': -0.5, 'y': 0.0, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'Y' : {
|
|
||||||
'start' : {'x': 0.0, 'y': 0.5 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': 0.0, 'y': -0.5, 'z': 0.0 },
|
|
||||||
'1' : {'x': 0.0, 'y': 0.5, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'Z' : {
|
|
||||||
'start' : {'x': 0.0, 'y': 0.0 },
|
|
||||||
'move_factors' : {
|
|
||||||
'0' : {'x': 0.0, 'y': 0.0, 'z': 1.0 },
|
|
||||||
'1' : {'x': 0.0, 'y': 0.0, 'z': 0.0 }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'E' : {
|
|
||||||
'start' : {'x': 0.0, 'y': 0.0 },
|
|
||||||
'move_factor' : 0.05
|
|
||||||
}
|
|
||||||
}
|
|
||||||
%}
|
|
||||||
|
|
||||||
#
|
|
||||||
# STARTING...
|
|
||||||
#
|
|
||||||
{% if not 'xyz' in printer.toolhead.homed_axes %}
|
|
||||||
{ action_raise_error("Must Home printer first!") }
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if params.SPEED_INCREMENT|default(2)|float * 100 != (params.SPEED_INCREMENT|default(2)|float * 100)|int %}
|
|
||||||
{ action_raise_error("Only 2 decimal digits are allowed for SPEED_INCREMENT") }
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if (size / (max_speed / 60)) < 0.25 and direction != 'E' %}
|
|
||||||
{ action_raise_error("SIZE is too small for this MAX_SPEED. Increase SIZE or decrease MAX_SPEED!") }
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if not (direction in direction_factor) %}
|
|
||||||
{ action_raise_error("DIRECTION is not valid. Only XY, AB, ABXY, A, B, X, Y, Z or E is allowed!") }
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{action_respond_info("")}
|
|
||||||
{action_respond_info("Starting speed and vibration calibration")}
|
|
||||||
{action_respond_info("This operation can not be interrupted by normal means. Hit the \"emergency stop\" button to stop it if needed")}
|
|
||||||
{action_respond_info("")}
|
|
||||||
|
|
||||||
SAVE_GCODE_STATE NAME=STATE_VIBRATIONS_CALIBRATION
|
|
||||||
|
|
||||||
M83
|
|
||||||
G90
|
|
||||||
|
|
||||||
# Going to the start position
|
|
||||||
G1 Z{z_height}
|
|
||||||
G1 X{mid_x + (size * direction_factor[direction].start.x) } Y{mid_y + (size * direction_factor[direction].start.y)} F{feedrate_travel}
|
|
||||||
|
|
||||||
# vibration pattern for each frequency
|
|
||||||
{% for curr_sample in range(0, nb_samples) %}
|
|
||||||
{% set curr_speed = min_speed + curr_sample * speed_increment %}
|
|
||||||
{% if verbose %}
|
|
||||||
RESPOND MSG="{"Current speed: %.2f mm/s" % (curr_speed / 60)|float}"
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
ACCELEROMETER_MEASURE CHIP={accel_chip}
|
|
||||||
{% if direction == 'E' %}
|
|
||||||
G0 E{curr_speed*direction_factor[direction].move_factor} F{curr_speed}
|
|
||||||
{% else %}
|
|
||||||
{% for key, factor in direction_factor[direction].move_factors|dictsort %}
|
|
||||||
G1 X{mid_x + (size * factor.x) } Y{mid_y + (size * factor.y)} Z{z_height + (size * factor.z)} F{curr_speed}
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
ACCELEROMETER_MEASURE CHIP={accel_chip} NAME=sp{("%.2f" % (curr_speed / 60)|float)|replace('.','_')}n1
|
|
||||||
|
|
||||||
G4 P300
|
|
||||||
M400
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% if verbose %}
|
|
||||||
RESPOND MSG="Graphs generation... Please wait a minute or two and look in the configured folder."
|
|
||||||
{% endif %}
|
|
||||||
RUN_SHELL_COMMAND CMD=plot_graph PARAMS="VIBRATIONS {direction}"
|
|
||||||
|
|
||||||
RESTORE_GCODE_STATE NAME=STATE_VIBRATIONS_CALIBRATION
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
[gcode_shell_command plot_graph]
|
|
||||||
command: ~/printer_data/config/K-ShakeTune/scripts/is_workflow.py
|
|
||||||
timeout: 600.0
|
|
||||||
verbose: True
|
|
||||||
60
K-ShakeTune/K-SnT_axes_map.cfg
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
############################################################
|
||||||
|
###### AXE_MAP DETECTION AND ACCELEROMETER VALIDATION ######
|
||||||
|
############################################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
[gcode_macro AXES_MAP_CALIBRATION]
|
||||||
|
gcode:
|
||||||
|
{% set z_height = params.Z_HEIGHT|default(20)|int %} # z height to put the toolhead before starting the movements
|
||||||
|
{% set speed = params.SPEED|default(80)|float * 60 %} # feedrate for the movements
|
||||||
|
{% set accel = params.ACCEL|default(1500)|int %} # accel value used to move on the pattern
|
||||||
|
{% set feedrate_travel = params.TRAVEL_SPEED|default(120)|int * 60 %} # travel feedrate between moves
|
||||||
|
{% set accel_chip = params.ACCEL_CHIP|default("adxl345") %} # ADXL chip name in the config
|
||||||
|
|
||||||
|
{% set mid_x = printer.toolhead.axis_maximum.x|float / 2 %}
|
||||||
|
{% set mid_y = printer.toolhead.axis_maximum.y|float / 2 %}
|
||||||
|
|
||||||
|
{% set accel = [accel, printer.configfile.settings.printer.max_accel]|min %}
|
||||||
|
{% set old_accel = printer.toolhead.max_accel %}
|
||||||
|
{% set old_cruise_ratio = printer.toolhead.minimum_cruise_ratio %}
|
||||||
|
{% set old_sqv = printer.toolhead.square_corner_velocity %}
|
||||||
|
|
||||||
|
|
||||||
|
{% if not 'xyz' in printer.toolhead.homed_axes %}
|
||||||
|
{ action_raise_error("Must Home printer first!") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{action_respond_info("")}
|
||||||
|
{action_respond_info("Starting accelerometer axe_map calibration")}
|
||||||
|
{action_respond_info("This operation can not be interrupted by normal means. Hit the \"emergency stop\" button to stop it if needed")}
|
||||||
|
{action_respond_info("")}
|
||||||
|
|
||||||
|
SAVE_GCODE_STATE NAME=STATE_AXESMAP_CALIBRATION
|
||||||
|
|
||||||
|
G90
|
||||||
|
|
||||||
|
# Set the wanted acceleration values (not too high to avoid oscillation, not too low to be able to reach constant speed on each segments)
|
||||||
|
SET_VELOCITY_LIMIT ACCEL={accel} MINIMUM_CRUISE_RATIO=0 SQUARE_CORNER_VELOCITY={[(accel / 1000), 5.0]|max}
|
||||||
|
|
||||||
|
# Going to the start position
|
||||||
|
G1 Z{z_height} F{feedrate_travel / 8}
|
||||||
|
G1 X{mid_x - 15} Y{mid_y - 15} F{feedrate_travel}
|
||||||
|
G4 P500
|
||||||
|
|
||||||
|
ACCELEROMETER_MEASURE CHIP={accel_chip}
|
||||||
|
G4 P1000 # This first waiting time is to record the background accelerometer noise before moving
|
||||||
|
G1 X{mid_x + 15} F{speed}
|
||||||
|
G4 P1000
|
||||||
|
G1 Y{mid_y + 15} F{speed}
|
||||||
|
G4 P1000
|
||||||
|
G1 Z{z_height + 15} F{speed}
|
||||||
|
G4 P1000
|
||||||
|
ACCELEROMETER_MEASURE CHIP={accel_chip} NAME=axemap
|
||||||
|
|
||||||
|
RESPOND MSG="Analysis of the movements..."
|
||||||
|
RUN_SHELL_COMMAND CMD=shaketune PARAMS="--type axesmap --accel {accel|int} --chip_name {accel_chip}"
|
||||||
|
|
||||||
|
# Restore the previous acceleration values
|
||||||
|
SET_VELOCITY_LIMIT ACCEL={old_accel} MINIMUM_CRUISE_RATIO={old_cruise_ratio} SQUARE_CORNER_VELOCITY={old_sqv}
|
||||||
|
|
||||||
|
RESTORE_GCODE_STATE NAME=STATE_AXESMAP_CALIBRATION
|
||||||
54
K-ShakeTune/K-SnT_axis.cfg
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
################################################
|
||||||
|
###### STANDARD INPUT_SHAPER CALIBRATIONS ######
|
||||||
|
################################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
[gcode_macro AXES_SHAPER_CALIBRATION]
|
||||||
|
description: Perform standard axis input shaper tests on one or both XY axes to select the best input shaper filter
|
||||||
|
gcode:
|
||||||
|
{% set min_freq = params.FREQ_START|default(5)|float %}
|
||||||
|
{% set max_freq = params.FREQ_END|default(133.3)|float %}
|
||||||
|
{% set hz_per_sec = params.HZ_PER_SEC|default(1)|float %}
|
||||||
|
{% set axis = params.AXIS|default("all")|string|lower %}
|
||||||
|
{% set scv = params.SCV|default(None) %}
|
||||||
|
{% set max_sm = params.MAX_SMOOTHING|default(None) %}
|
||||||
|
{% set keep_results = params.KEEP_N_RESULTS|default(3)|int %}
|
||||||
|
{% set keep_csv = params.KEEP_CSV|default(0)|int %}
|
||||||
|
|
||||||
|
{% set X, Y = False, False %}
|
||||||
|
|
||||||
|
{% if axis == "all" %}
|
||||||
|
{% set X, Y = True, True %}
|
||||||
|
{% elif axis == "x" %}
|
||||||
|
{% set X = True %}
|
||||||
|
{% elif axis == "y" %}
|
||||||
|
{% set Y = True %}
|
||||||
|
{% else %}
|
||||||
|
{ action_raise_error("AXIS selection invalid. Should be either all, x or y!") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if scv is none or scv == "" %}
|
||||||
|
{% set scv = printer.toolhead.square_corner_velocity %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if max_sm == "" %}
|
||||||
|
{% set max_sm = none %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if X %}
|
||||||
|
TEST_RESONANCES AXIS=X OUTPUT=raw_data NAME=x FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
||||||
|
M400
|
||||||
|
|
||||||
|
RESPOND MSG="X axis frequency profile generation..."
|
||||||
|
RESPOND MSG="This may take some time (1-3min)"
|
||||||
|
RUN_SHELL_COMMAND CMD=shaketune PARAMS="--type shaper --scv {scv} {% if max_sm is not none %}--max_smoothing {max_sm}{% endif %} {% if keep_csv %}--keep_csv{% endif %} --keep_results {keep_results}"
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if Y %}
|
||||||
|
TEST_RESONANCES AXIS=Y OUTPUT=raw_data NAME=y FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
||||||
|
M400
|
||||||
|
|
||||||
|
RESPOND MSG="Y axis frequency profile generation..."
|
||||||
|
RESPOND MSG="This may take some time (1-3min)"
|
||||||
|
RUN_SHELL_COMMAND CMD=shaketune PARAMS="--type shaper --scv {scv} {% if max_sm is not none %}--max_smoothing {max_sm}{% endif %} {% if keep_csv %}--keep_csv{% endif %} --keep_results {keep_results}"
|
||||||
|
{% endif %}
|
||||||
23
K-ShakeTune/K-SnT_belts.cfg
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
################################################
|
||||||
|
###### STANDARD INPUT_SHAPER CALIBRATIONS ######
|
||||||
|
################################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
[gcode_macro COMPARE_BELTS_RESPONSES]
|
||||||
|
description: Perform a custom half-axis test to analyze and compare the frequency profiles of individual belts on CoreXY printers
|
||||||
|
gcode:
|
||||||
|
{% set min_freq = params.FREQ_START|default(5)|float %}
|
||||||
|
{% set max_freq = params.FREQ_END|default(133.33)|float %}
|
||||||
|
{% set hz_per_sec = params.HZ_PER_SEC|default(1)|float %}
|
||||||
|
{% set keep_results = params.KEEP_N_RESULTS|default(3)|int %}
|
||||||
|
{% set keep_csv = params.KEEP_CSV|default(0)|int %}
|
||||||
|
|
||||||
|
TEST_RESONANCES AXIS=1,1 OUTPUT=raw_data NAME=b FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
||||||
|
M400
|
||||||
|
|
||||||
|
TEST_RESONANCES AXIS=1,-1 OUTPUT=raw_data NAME=a FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
|
||||||
|
M400
|
||||||
|
|
||||||
|
RESPOND MSG="Belts comparative frequency profile generation..."
|
||||||
|
RESPOND MSG="This may take some time (3-5min)"
|
||||||
|
RUN_SHELL_COMMAND CMD=shaketune PARAMS="--type belts {% if keep_csv %}--keep_csv{% endif %} --keep_results {keep_results}"
|
||||||
24
K-ShakeTune/K-SnT_static_freq.cfg
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
################################################
|
||||||
|
###### STANDARD INPUT_SHAPER CALIBRATIONS ######
|
||||||
|
################################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
[gcode_macro EXCITATE_AXIS_AT_FREQ]
|
||||||
|
description: Maintain a specified excitation frequency for a period of time to diagnose and locate a source of vibration
|
||||||
|
gcode:
|
||||||
|
{% set frequency = params.FREQUENCY|default(25)|int %}
|
||||||
|
{% set time = params.TIME|default(10)|int %}
|
||||||
|
{% set axis = params.AXIS|default("x")|string|lower %}
|
||||||
|
|
||||||
|
{% if axis not in ["x", "y", "a", "b"] %}
|
||||||
|
{ action_raise_error("AXIS selection invalid. Should be either x, y, a or b!") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if axis == "a" %}
|
||||||
|
{% set axis = "1,-1" %}
|
||||||
|
{% elif axis == "b" %}
|
||||||
|
{% set axis = "1,1" %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
TEST_RESONANCES OUTPUT=raw_data AXIS={axis} FREQ_START={frequency-1} FREQ_END={frequency+1} HZ_PER_SEC={1/(time/3)}
|
||||||
|
M400
|
||||||
214
K-ShakeTune/K-SnT_vibrations.cfg
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
#########################################
|
||||||
|
###### MACHINE VIBRATIONS ANALYSIS ######
|
||||||
|
#########################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
[gcode_macro CREATE_VIBRATIONS_PROFILE]
|
||||||
|
gcode:
|
||||||
|
{% set size = params.SIZE|default(100)|int %} # size of the circle where the angled lines are done
|
||||||
|
{% set z_height = params.Z_HEIGHT|default(20)|int %} # z height to put the toolhead before starting the movements
|
||||||
|
{% set max_speed = params.MAX_SPEED|default(200)|float * 60 %} # maximum feedrate for the movements
|
||||||
|
{% set speed_increment = params.SPEED_INCREMENT|default(2)|float * 60 %} # feedrate increment between each move
|
||||||
|
|
||||||
|
{% set feedrate_travel = params.TRAVEL_SPEED|default(200)|int * 60 %} # travel feedrate between moves
|
||||||
|
{% set accel = params.ACCEL|default(3000)|int %} # accel value used to move on the pattern
|
||||||
|
{% set accel_chip = params.ACCEL_CHIP|default("adxl345") %} # ADXL chip name in the config
|
||||||
|
|
||||||
|
{% set keep_results = params.KEEP_N_RESULTS|default(3)|int %}
|
||||||
|
{% set keep_csv = params.KEEP_CSV|default(0)|int %}
|
||||||
|
|
||||||
|
{% set mid_x = printer.toolhead.axis_maximum.x|float / 2 %}
|
||||||
|
{% set mid_y = printer.toolhead.axis_maximum.y|float / 2 %}
|
||||||
|
{% set min_speed = 2 * 60 %} # minimum feedrate for the movements is set to 2mm/s
|
||||||
|
{% set nb_speed_samples = ((max_speed - min_speed) / speed_increment + 1) | int %}
|
||||||
|
|
||||||
|
{% set accel = [accel, printer.configfile.settings.printer.max_accel]|min %}
|
||||||
|
{% set old_accel = printer.toolhead.max_accel %}
|
||||||
|
{% set old_cruise_ratio = printer.toolhead.minimum_cruise_ratio %}
|
||||||
|
{% set old_sqv = printer.toolhead.square_corner_velocity %}
|
||||||
|
|
||||||
|
{% set kinematics = printer.configfile.settings.printer.kinematics %}
|
||||||
|
|
||||||
|
|
||||||
|
{% if not 'xyz' in printer.toolhead.homed_axes %}
|
||||||
|
{ action_raise_error("Must Home printer first!") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if params.SPEED_INCREMENT|default(2)|float * 100 != (params.SPEED_INCREMENT|default(2)|float * 100)|int %}
|
||||||
|
{ action_raise_error("Only 2 decimal digits are allowed for SPEED_INCREMENT") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (size / (max_speed / 60)) < 0.25 %}
|
||||||
|
{ action_raise_error("SIZE is too small for this MAX_SPEED. Increase SIZE or decrease MAX_SPEED!") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{action_respond_info("")}
|
||||||
|
{action_respond_info("Starting machine vibrations profile measurement")}
|
||||||
|
{action_respond_info("This operation can not be interrupted by normal means. Hit the \"emergency stop\" button to stop it if needed")}
|
||||||
|
{action_respond_info("")}
|
||||||
|
|
||||||
|
SAVE_GCODE_STATE NAME=CREATE_VIBRATIONS_PROFILE
|
||||||
|
|
||||||
|
G90
|
||||||
|
|
||||||
|
# Set the wanted acceleration values (not too high to avoid oscillation, not too low to be able to reach constant speed on each segments)
|
||||||
|
SET_VELOCITY_LIMIT ACCEL={accel} MINIMUM_CRUISE_RATIO=0 SQUARE_CORNER_VELOCITY={[(accel / 1000), 5.0]|max}
|
||||||
|
|
||||||
|
# Going to the start position
|
||||||
|
G1 Z{z_height} F{feedrate_travel / 10}
|
||||||
|
G1 X{mid_x } Y{mid_y} F{feedrate_travel}
|
||||||
|
|
||||||
|
|
||||||
|
{% if kinematics == "cartesian" %}
|
||||||
|
# Cartesian motors are on X and Y axis directly
|
||||||
|
RESPOND MSG="Cartesian kinematics mode"
|
||||||
|
{% set main_angles = [0, 90] %}
|
||||||
|
{% elif kinematics == "corexy" %}
|
||||||
|
# CoreXY motors are on A and B axis (45 and 135 degrees)
|
||||||
|
RESPOND MSG="CoreXY kinematics mode"
|
||||||
|
{% set main_angles = [45, 135] %}
|
||||||
|
{% else %}
|
||||||
|
{ action_raise_error("Only Cartesian and CoreXY kinematics are supported at the moment for the vibrations measurement tool!") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% set pi = (3.141592653589793) | float %}
|
||||||
|
{% set tau = (pi * 2) | float %}
|
||||||
|
|
||||||
|
|
||||||
|
{% for curr_angle in main_angles %}
|
||||||
|
{% for curr_speed_sample in range(0, nb_speed_samples) %}
|
||||||
|
{% set curr_speed = min_speed + curr_speed_sample * speed_increment %}
|
||||||
|
{% set rad_angle_full = (curr_angle|float * pi / 180) %}
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------------------------------------
|
||||||
|
# Here are some maths to approximate the sin and cos values of rad_angle in Jinja
|
||||||
|
# Thanks a lot to Aubey! for sharing the idea of using hardcoded Taylor series and
|
||||||
|
# the associated bit of code to do it easily! This is pure madness!
|
||||||
|
{% set rad_angle = ((rad_angle_full % tau) - (tau / 2)) | float %}
|
||||||
|
|
||||||
|
{% if rad_angle < (-(tau / 4)) %}
|
||||||
|
{% set rad_angle = (rad_angle + (tau / 2)) | float %}
|
||||||
|
{% set final_mult = (-1) %}
|
||||||
|
{% elif rad_angle > (tau / 4) %}
|
||||||
|
{% set rad_angle = (rad_angle - (tau / 2)) | float %}
|
||||||
|
{% set final_mult = (-1) %}
|
||||||
|
{% else %}
|
||||||
|
{% set final_mult = (1) %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% set sin0 = (rad_angle) %}
|
||||||
|
{% set sin1 = ((rad_angle ** 3) / 6) | float %}
|
||||||
|
{% set sin2 = ((rad_angle ** 5) / 120) | float %}
|
||||||
|
{% set sin3 = ((rad_angle ** 7) / 5040) | float %}
|
||||||
|
{% set sin4 = ((rad_angle ** 9) / 362880) | float %}
|
||||||
|
{% set sin5 = ((rad_angle ** 11) / 39916800) | float %}
|
||||||
|
{% set sin6 = ((rad_angle ** 13) / 6227020800) | float %}
|
||||||
|
{% set sin7 = ((rad_angle ** 15) / 1307674368000) | float %}
|
||||||
|
{% set sin = (-(sin0 - sin1 + sin2 - sin3 + sin4 - sin5 + sin6 - sin7) * final_mult) | float %}
|
||||||
|
|
||||||
|
{% set cos0 = (1) | float %}
|
||||||
|
{% set cos1 = ((rad_angle ** 2) / 2) | float %}
|
||||||
|
{% set cos2 = ((rad_angle ** 4) / 24) | float %}
|
||||||
|
{% set cos3 = ((rad_angle ** 6) / 720) | float %}
|
||||||
|
{% set cos4 = ((rad_angle ** 8) / 40320) | float %}
|
||||||
|
{% set cos5 = ((rad_angle ** 10) / 3628800) | float %}
|
||||||
|
{% set cos6 = ((rad_angle ** 12) / 479001600) | float %}
|
||||||
|
{% set cos7 = ((rad_angle ** 14) / 87178291200) | float %}
|
||||||
|
{% set cos = (-(cos0 - cos1 + cos2 - cos3 + cos4 - cos5 + cos6 - cos7) * final_mult) | float %}
|
||||||
|
# -----------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Reduce the segments length for the lower speed range (0-100mm/s). The minimum length is 1/3 of the SIZE and is gradually increased
|
||||||
|
# to the nominal SIZE at 100mm/s. No further size changes are made above this speed. The goal is to ensure that the print head moves
|
||||||
|
# enough to collect enough data for vibration analysis, without doing unnecessary distance to save time. At higher speeds, the full
|
||||||
|
# segments lengths are used because the head moves faster and travels more distance in the same amount of time and we want enough data
|
||||||
|
{% if curr_speed < (100 * 60) %}
|
||||||
|
{% set segment_length_multiplier = 1/5 + 4/5 * (curr_speed / 60) / 100 %}
|
||||||
|
{% else %}
|
||||||
|
{% set segment_length_multiplier = 1 %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
# Calculate angle coordinates using trigonometry and length multiplier and move to start point
|
||||||
|
{% set dx = (size / 2) * cos * segment_length_multiplier %}
|
||||||
|
{% set dy = (size / 2) * sin * segment_length_multiplier %}
|
||||||
|
G1 X{mid_x - dx} Y{mid_y - dy} F{feedrate_travel}
|
||||||
|
|
||||||
|
# Adjust the number of back and forth movements based on speed to also save time on lower speed range
|
||||||
|
# 3 movements are done by default, reduced to 2 between 150-250mm/s and to 1 under 150mm/s.
|
||||||
|
{% set movements = 3 %}
|
||||||
|
{% if curr_speed < (150 * 60) %}
|
||||||
|
{% set movements = 1 %}
|
||||||
|
{% elif curr_speed < (250 * 60) %}
|
||||||
|
{% set movements = 2 %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
ACCELEROMETER_MEASURE CHIP={accel_chip}
|
||||||
|
|
||||||
|
# Back and forth movements to record the vibrations at constant speed in both direction
|
||||||
|
{% for n in range(movements) %}
|
||||||
|
G1 X{mid_x + dx} Y{mid_y + dy} F{curr_speed}
|
||||||
|
G1 X{mid_x - dx} Y{mid_y - dy} F{curr_speed}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
ACCELEROMETER_MEASURE CHIP={accel_chip} NAME=an{("%.2f" % curr_angle|float)|replace('.','_')}sp{("%.2f" % (curr_speed / 60)|float)|replace('.','_')}
|
||||||
|
G4 P300
|
||||||
|
|
||||||
|
M400
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
# Restore the previous acceleration values
|
||||||
|
SET_VELOCITY_LIMIT ACCEL={old_accel} MINIMUM_CRUISE_RATIO={old_cruise_ratio} SQUARE_CORNER_VELOCITY={old_sqv}
|
||||||
|
|
||||||
|
# Extract the TMC names and configuration
|
||||||
|
{% set ns_x = namespace(path='') %}
|
||||||
|
{% set ns_y = namespace(path='') %}
|
||||||
|
|
||||||
|
{% for item in printer %}
|
||||||
|
{% set parts = item.split() %}
|
||||||
|
{% if parts|length == 2 and parts[0].startswith('tmc') and parts[0][3:].isdigit() %}
|
||||||
|
{% if parts[1] == 'stepper_x' %}
|
||||||
|
{% set ns_x.path = parts[0] %}
|
||||||
|
{% elif parts[1] == 'stepper_y' %}
|
||||||
|
{% set ns_y.path = parts[0] %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if ns_x.path and ns_y.path %}
|
||||||
|
{% set metadata =
|
||||||
|
"stepper_x_tmc:" ~ ns_x.path ~ "|"
|
||||||
|
"stepper_x_run_current:" ~ (printer[ns_x.path + ' stepper_x'].run_current | round(2) | string) ~ "|"
|
||||||
|
"stepper_x_hold_current:" ~ (printer[ns_x.path + ' stepper_x'].hold_current | round(2) | string) ~ "|"
|
||||||
|
"stepper_y_tmc:" ~ ns_y.path ~ "|"
|
||||||
|
"stepper_y_run_current:" ~ (printer[ns_y.path + ' stepper_y'].run_current | round(2) | string) ~ "|"
|
||||||
|
"stepper_y_hold_current:" ~ (printer[ns_y.path + ' stepper_y'].hold_current | round(2) | string) ~ "|"
|
||||||
|
%}
|
||||||
|
|
||||||
|
{% set autotune_x = printer.configfile.config['autotune_tmc stepper_x'] if 'autotune_tmc stepper_x' in printer.configfile.config else none %}
|
||||||
|
{% set autotune_y = printer.configfile.config['autotune_tmc stepper_y'] if 'autotune_tmc stepper_y' in printer.configfile.config else none %}
|
||||||
|
{% if autotune_x and autotune_y %}
|
||||||
|
{% set stepper_x_voltage = autotune_x.voltage if autotune_x.voltage else '24.0' %}
|
||||||
|
{% set stepper_y_voltage = autotune_y.voltage if autotune_y.voltage else '24.0' %}
|
||||||
|
{% set metadata = metadata ~
|
||||||
|
"autotune_enabled:True|"
|
||||||
|
"stepper_x_motor:" ~ autotune_x.motor ~ "|"
|
||||||
|
"stepper_x_voltage:" ~ stepper_x_voltage ~ "|"
|
||||||
|
"stepper_y_motor:" ~ autotune_y.motor ~ "|"
|
||||||
|
"stepper_y_voltage:" ~ stepper_y_voltage ~ "|"
|
||||||
|
%}
|
||||||
|
{% else %}
|
||||||
|
{% set metadata = metadata ~ "autotune_enabled:False|" %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
DUMP_TMC STEPPER=stepper_x
|
||||||
|
DUMP_TMC STEPPER=stepper_y
|
||||||
|
|
||||||
|
{% else %}
|
||||||
|
{ action_respond_info("No TMC drivers found for X and Y steppers") }
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
RESPOND MSG="Machine vibrations profile generation..."
|
||||||
|
RESPOND MSG="This may take some time (3-5min)"
|
||||||
|
RUN_SHELL_COMMAND CMD=shaketune PARAMS="--type vibrations --accel {accel|int} --kinematics {kinematics} {% if metadata %}--metadata {metadata}{% endif %} --chip_name {accel_chip} {% if keep_csv %}--keep_csv{% endif %} --keep_results {keep_results}"
|
||||||
|
|
||||||
|
RESTORE_GCODE_STATE NAME=CREATE_VIBRATIONS_PROFILE
|
||||||
@@ -1,633 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
#################################################
|
|
||||||
######## CoreXY BELTS CALIBRATION SCRIPT ########
|
|
||||||
#################################################
|
|
||||||
# Written by Frix_x#0161 #
|
|
||||||
# @version: 2.0
|
|
||||||
|
|
||||||
# CHANGELOG:
|
|
||||||
# v2.0: updated the script to align it to the new K-Shake&Tune module
|
|
||||||
# v1.0: first version of this tool for enhanced vizualisation of belt graphs
|
|
||||||
|
|
||||||
|
|
||||||
# Be sure to make this script executable using SSH: type 'chmod +x ./graph_belts.py' when in the folder!
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
################ !!! DO NOT EDIT BELOW THIS LINE !!! ################
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
import optparse, matplotlib, sys, importlib, os
|
|
||||||
from textwrap import wrap
|
|
||||||
from collections import namedtuple
|
|
||||||
import numpy as np
|
|
||||||
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
|
|
||||||
import matplotlib.ticker, matplotlib.gridspec, matplotlib.colors
|
|
||||||
import matplotlib.patches
|
|
||||||
import locale
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
matplotlib.use('Agg')
|
|
||||||
|
|
||||||
|
|
||||||
ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" # For paired peaks names
|
|
||||||
|
|
||||||
PEAKS_DETECTION_THRESHOLD = 0.20
|
|
||||||
CURVE_SIMILARITY_SIGMOID_K = 0.6
|
|
||||||
DC_GRAIN_OF_SALT_FACTOR = 0.75
|
|
||||||
DC_THRESHOLD_METRIC = 1.5e9
|
|
||||||
DC_MAX_UNPAIRED_PEAKS_ALLOWED = 4
|
|
||||||
|
|
||||||
# Define the SignalData namedtuple
|
|
||||||
SignalData = namedtuple('CalibrationData', ['freqs', 'psd', 'peaks', 'paired_peaks', 'unpaired_peaks'])
|
|
||||||
|
|
||||||
KLIPPAIN_COLORS = {
|
|
||||||
"purple": "#70088C",
|
|
||||||
"orange": "#FF8D32",
|
|
||||||
"dark_purple": "#150140",
|
|
||||||
"dark_orange": "#F24130",
|
|
||||||
"red_pink": "#F2055C"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Set the best locale for time and date formating (generation of the titles)
|
|
||||||
try:
|
|
||||||
locale.setlocale(locale.LC_TIME, locale.getdefaultlocale())
|
|
||||||
except locale.Error:
|
|
||||||
locale.setlocale(locale.LC_TIME, 'C')
|
|
||||||
|
|
||||||
# Override the built-in print function to avoid problem in Klipper due to locale settings
|
|
||||||
original_print = print
|
|
||||||
def print_with_c_locale(*args, **kwargs):
|
|
||||||
original_locale = locale.setlocale(locale.LC_ALL, None)
|
|
||||||
locale.setlocale(locale.LC_ALL, 'C')
|
|
||||||
original_print(*args, **kwargs)
|
|
||||||
locale.setlocale(locale.LC_ALL, original_locale)
|
|
||||||
print = print_with_c_locale
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Computation of the PSD graph
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
# Calculate estimated "power spectral density" using existing Klipper tools
|
|
||||||
def calc_freq_response(data):
|
|
||||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
|
||||||
return helper.process_accelerometer_data(data)
|
|
||||||
|
|
||||||
|
|
||||||
# Calculate or estimate a "similarity" factor between two PSD curves and scale it to a percentage. This is
|
|
||||||
# used here to quantify how close the two belts path behavior and responses are close together.
|
|
||||||
def compute_curve_similarity_factor(signal1, signal2):
|
|
||||||
freqs1 = signal1.freqs
|
|
||||||
psd1 = signal1.psd
|
|
||||||
freqs2 = signal2.freqs
|
|
||||||
psd2 = signal2.psd
|
|
||||||
|
|
||||||
# Interpolate PSDs to match the same frequency bins and do a cross-correlation
|
|
||||||
psd2_interp = np.interp(freqs1, freqs2, psd2)
|
|
||||||
cross_corr = np.correlate(psd1, psd2_interp, mode='full')
|
|
||||||
|
|
||||||
# Find the peak of the cross-correlation and compute a similarity normalized by the energy of the signals
|
|
||||||
peak_value = np.max(cross_corr)
|
|
||||||
similarity = peak_value / (np.sqrt(np.sum(psd1**2) * np.sum(psd2_interp**2)))
|
|
||||||
|
|
||||||
# Apply sigmoid scaling to get better numbers and get a final percentage value
|
|
||||||
scaled_similarity = sigmoid_scale(-np.log(1 - similarity), CURVE_SIMILARITY_SIGMOID_K)
|
|
||||||
|
|
||||||
return scaled_similarity
|
|
||||||
|
|
||||||
|
|
||||||
# This find all the peaks in a curve by looking at when the derivative term goes from positive to negative
|
|
||||||
# Then only the peaks found above a threshold are kept to avoid capturing peaks in the low amplitude noise of a signal
|
|
||||||
def detect_peaks(psd, freqs, window_size=5, vicinity=3):
|
|
||||||
# Smooth the curve using a moving average to avoid catching peaks everywhere in noisy signals
|
|
||||||
kernel = np.ones(window_size) / window_size
|
|
||||||
smoothed_psd = np.convolve(psd, kernel, mode='valid')
|
|
||||||
mean_pad = [np.mean(psd[:window_size])] * (window_size // 2)
|
|
||||||
smoothed_psd = np.concatenate((mean_pad, smoothed_psd))
|
|
||||||
|
|
||||||
# Find peaks on the smoothed curve
|
|
||||||
smoothed_peaks = np.where((smoothed_psd[:-2] < smoothed_psd[1:-1]) & (smoothed_psd[1:-1] > smoothed_psd[2:]))[0] + 1
|
|
||||||
detection_threshold = PEAKS_DETECTION_THRESHOLD * psd.max()
|
|
||||||
smoothed_peaks = smoothed_peaks[smoothed_psd[smoothed_peaks] > detection_threshold]
|
|
||||||
|
|
||||||
# Refine peak positions on the original curve
|
|
||||||
refined_peaks = []
|
|
||||||
for peak in smoothed_peaks:
|
|
||||||
local_max = peak + np.argmax(psd[max(0, peak-vicinity):min(len(psd), peak+vicinity+1)]) - vicinity
|
|
||||||
refined_peaks.append(local_max)
|
|
||||||
|
|
||||||
return np.array(refined_peaks), freqs[refined_peaks]
|
|
||||||
|
|
||||||
|
|
||||||
# This function create pairs of peaks that are close in frequency on two curves (that are known
|
|
||||||
# to be resonances points and must be similar on both belts on a CoreXY kinematic)
|
|
||||||
def pair_peaks(peaks1, freqs1, psd1, peaks2, freqs2, psd2):
|
|
||||||
# Compute a dynamic detection threshold to filter and pair peaks efficiently
|
|
||||||
# even if the signal is very noisy (this get clipped to a maximum of 10Hz diff)
|
|
||||||
distances = []
|
|
||||||
for p1 in peaks1:
|
|
||||||
for p2 in peaks2:
|
|
||||||
distances.append(abs(freqs1[p1] - freqs2[p2]))
|
|
||||||
distances = np.array(distances)
|
|
||||||
|
|
||||||
median_distance = np.median(distances)
|
|
||||||
iqr = np.percentile(distances, 75) - np.percentile(distances, 25)
|
|
||||||
|
|
||||||
threshold = median_distance + 1.5 * iqr
|
|
||||||
threshold = min(threshold, 10)
|
|
||||||
|
|
||||||
# Pair the peaks using the dynamic thresold
|
|
||||||
paired_peaks = []
|
|
||||||
unpaired_peaks1 = list(peaks1)
|
|
||||||
unpaired_peaks2 = list(peaks2)
|
|
||||||
|
|
||||||
while unpaired_peaks1 and unpaired_peaks2:
|
|
||||||
min_distance = threshold + 1
|
|
||||||
pair = None
|
|
||||||
|
|
||||||
for p1 in unpaired_peaks1:
|
|
||||||
for p2 in unpaired_peaks2:
|
|
||||||
distance = abs(freqs1[p1] - freqs2[p2])
|
|
||||||
if distance < min_distance:
|
|
||||||
min_distance = distance
|
|
||||||
pair = (p1, p2)
|
|
||||||
|
|
||||||
if pair is None: # No more pairs below the threshold
|
|
||||||
break
|
|
||||||
|
|
||||||
p1, p2 = pair
|
|
||||||
paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2])))
|
|
||||||
unpaired_peaks1.remove(p1)
|
|
||||||
unpaired_peaks2.remove(p2)
|
|
||||||
|
|
||||||
return paired_peaks, unpaired_peaks1, unpaired_peaks2
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Computation of a basic signal spectrogram
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def compute_spectrogram(data):
|
|
||||||
N = data.shape[0]
|
|
||||||
Fs = N / (data[-1,0] - data[0,0])
|
|
||||||
# Round up to a power of 2 for faster FFT
|
|
||||||
M = 1 << int(.5 * Fs - 1).bit_length()
|
|
||||||
window = np.kaiser(M, 6.)
|
|
||||||
def _specgram(x):
|
|
||||||
return matplotlib.mlab.specgram(
|
|
||||||
x, Fs=Fs, NFFT=M, noverlap=M//2, window=window,
|
|
||||||
mode='psd', detrend='mean', scale_by_freq=False)
|
|
||||||
|
|
||||||
d = {'x': data[:,1], 'y': data[:,2], 'z': data[:,3]}
|
|
||||||
pdata, bins, t = _specgram(d['x'])
|
|
||||||
for ax in 'yz':
|
|
||||||
pdata += _specgram(d[ax])[0]
|
|
||||||
return pdata, bins, t
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Computation of the differential spectrogram
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
# Performs a standard bilinear interpolation for a given x, y point based on surrounding input grid values. This function
|
|
||||||
# is part of the logic to re-align both belts spectrogram in order to combine them in the differential spectrogram.
|
|
||||||
def bilinear_interpolate(x, y, points, values):
|
|
||||||
x1, x2 = points[0]
|
|
||||||
y1, y2 = points[1]
|
|
||||||
|
|
||||||
f11, f12 = values[0]
|
|
||||||
f21, f22 = values[1]
|
|
||||||
|
|
||||||
interpolated_value = (
|
|
||||||
(f11 * (x2 - x) * (y2 - y) +
|
|
||||||
f21 * (x - x1) * (y2 - y) +
|
|
||||||
f12 * (x2 - x) * (y - y1) +
|
|
||||||
f22 * (x - x1) * (y - y1)) / ((x2 - x1) * (y2 - y1))
|
|
||||||
)
|
|
||||||
|
|
||||||
return interpolated_value
|
|
||||||
|
|
||||||
|
|
||||||
# Interpolate source_data (2D) to match target_x and target_y in order to interpolate and
|
|
||||||
# get similar time and frequency dimensions for the differential spectrogram
|
|
||||||
def interpolate_2d(target_x, target_y, source_x, source_y, source_data):
|
|
||||||
interpolated_data = np.zeros((len(target_y), len(target_x)))
|
|
||||||
|
|
||||||
for i, y in enumerate(target_y):
|
|
||||||
for j, x in enumerate(target_x):
|
|
||||||
# Find indices of surrounding points in source data
|
|
||||||
# and ensure we don't exceed array bounds
|
|
||||||
x_indices = np.searchsorted(source_x, x) - 1
|
|
||||||
y_indices = np.searchsorted(source_y, y) - 1
|
|
||||||
x_indices = max(0, min(len(source_x) - 1, x_indices))
|
|
||||||
y_indices = max(0, min(len(source_y) - 1, y_indices))
|
|
||||||
|
|
||||||
if x_indices == len(source_x) - 2:
|
|
||||||
x_indices -= 1
|
|
||||||
if y_indices == len(source_y) - 2:
|
|
||||||
y_indices -= 1
|
|
||||||
|
|
||||||
x1, x2 = source_x[x_indices], source_x[x_indices + 1]
|
|
||||||
y1, y2 = source_y[y_indices], source_y[y_indices + 1]
|
|
||||||
|
|
||||||
f11 = source_data[y_indices, x_indices]
|
|
||||||
f12 = source_data[y_indices, x_indices + 1]
|
|
||||||
f21 = source_data[y_indices + 1, x_indices]
|
|
||||||
f22 = source_data[y_indices + 1, x_indices + 1]
|
|
||||||
|
|
||||||
interpolated_data[i, j] = bilinear_interpolate(x, y, ((x1, x2), (y1, y2)), ((f11, f12), (f21, f22)))
|
|
||||||
|
|
||||||
return interpolated_data
|
|
||||||
|
|
||||||
|
|
||||||
# This function identifies a "ridge" of high gradient magnitude in a spectrogram (pdata) - ie. a resonance diagonal line. Starting from
|
|
||||||
# the maximum value in the first column, it iteratively follows the direction of the highest gradient in the vicinity (window configured using
|
|
||||||
# the n_average parameter). The result is a sequence of indices that traces the resonance line across the original spectrogram.
|
|
||||||
def detect_ridge(pdata, n_average=3):
|
|
||||||
grad_y, grad_x = np.gradient(pdata)
|
|
||||||
magnitude = np.sqrt(grad_x**2 + grad_y**2)
|
|
||||||
|
|
||||||
# Start at the maximum value in the first column
|
|
||||||
start_idx = np.argmax(pdata[:, 0])
|
|
||||||
path = [start_idx]
|
|
||||||
|
|
||||||
# Walk through the spectrogram following the path of the ridge
|
|
||||||
for j in range(1, pdata.shape[1]):
|
|
||||||
# Look in the vicinity of the previous point
|
|
||||||
vicinity = magnitude[max(0, path[-1]-n_average):min(pdata.shape[0], path[-1]+n_average+1), j]
|
|
||||||
# Take an average of top few points
|
|
||||||
sorted_indices = np.argsort(vicinity)
|
|
||||||
top_indices = sorted_indices[-n_average:]
|
|
||||||
next_idx = int(np.mean(top_indices) + max(0, path[-1]-n_average))
|
|
||||||
path.append(next_idx)
|
|
||||||
|
|
||||||
return np.array(path)
|
|
||||||
|
|
||||||
|
|
||||||
# This function calculates the time offset between two resonances lines (ridge1 and ridge2) using cross-correlation in
|
|
||||||
# the frequency domain (using FFT). The result provides the lag (or offset) at which the two sequences are most similar.
|
|
||||||
# This is used to re-align both belts spectrograms on their resonances lines in order to create the combined spectrogram.
|
|
||||||
def compute_cross_correlation_offset(ridge1, ridge2):
|
|
||||||
# Ensure that the two arrays have the same shape
|
|
||||||
if len(ridge1) < len(ridge2):
|
|
||||||
ridge1 = np.pad(ridge1, (0, len(ridge2) - len(ridge1)))
|
|
||||||
elif len(ridge1) > len(ridge2):
|
|
||||||
ridge2 = np.pad(ridge2, (0, len(ridge1) - len(ridge2)))
|
|
||||||
|
|
||||||
cross_corr = np.fft.fftshift(np.fft.ifft(np.fft.fft(ridge1) * np.conj(np.fft.fft(ridge2))))
|
|
||||||
return np.argmax(np.abs(cross_corr)) - len(ridge1) // 2
|
|
||||||
|
|
||||||
|
|
||||||
# This function shifts data along its second dimension - ie. time here - by a specified shift_amount
|
|
||||||
def shift_data_in_time(data, shift_amount):
|
|
||||||
if shift_amount > 0:
|
|
||||||
return np.pad(data, ((0, 0), (shift_amount, 0)), mode='constant')[:, :-shift_amount]
|
|
||||||
elif shift_amount < 0:
|
|
||||||
return np.pad(data, ((0, 0), (0, -shift_amount)), mode='constant')[:, -shift_amount:]
|
|
||||||
else:
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
# Main logic function to combine two similar spectrogram - ie. from both belts paths - by detecting similarities (ridges), computing
|
|
||||||
# the time lag and realigning them. Finally this function combine (by substracting signals) the aligned spectrograms in a new one.
|
|
||||||
# This result of a mostly zero-ed new spectrogram with some colored zones highlighting differences in the belts paths.
|
|
||||||
def combined_spectrogram(data1, data2):
|
|
||||||
pdata1, bins1, t1 = compute_spectrogram(data1)
|
|
||||||
pdata2, _, _ = compute_spectrogram(data2)
|
|
||||||
|
|
||||||
# Detect ridges
|
|
||||||
ridge1 = detect_ridge(pdata1)
|
|
||||||
ridge2 = detect_ridge(pdata2)
|
|
||||||
|
|
||||||
# Compute offset using cross-correlation and shit/align and interpolate the spectrograms
|
|
||||||
offset = compute_cross_correlation_offset(ridge1, ridge2)
|
|
||||||
pdata2_aligned = shift_data_in_time(pdata2, offset)
|
|
||||||
pdata2_interpolated = interpolate_2d(t1, bins1, t1, bins1, pdata2_aligned)
|
|
||||||
|
|
||||||
# Combine the spectrograms
|
|
||||||
combined_data = np.abs(pdata1 - pdata2_interpolated)
|
|
||||||
return combined_data, bins1, t1
|
|
||||||
|
|
||||||
|
|
||||||
# Compute a composite and highly subjective value indicating the "mechanical health of the printer (0 to 100%)" that represent the
|
|
||||||
# likelihood of mechanical issues on the printer. It is based on the differential spectrogram sum of gradient, salted with a bit
|
|
||||||
# of the estimated similarity cross-correlation from compute_curve_similarity_factor() and with a bit of the number of unpaired peaks.
|
|
||||||
# This result in a percentage value quantifying the machine behavior around the main resonances that give an hint if only touching belt tension
|
|
||||||
# will give good graphs or if there is a chance of mechanical issues in the background (above 50% should be considered as probably problematic)
|
|
||||||
def compute_mhi(combined_data, similarity_coefficient, num_unpaired_peaks):
|
|
||||||
filtered_data = combined_data[combined_data > 100]
|
|
||||||
|
|
||||||
# First compute a "total variability metric" based on the sum of the gradient that sum the magnitude of will emphasize regions of the
|
|
||||||
# spectrogram where there are rapid changes in magnitude (like the edges of resonance peaks).
|
|
||||||
total_variability_metric = np.sum(np.abs(np.gradient(filtered_data)))
|
|
||||||
# Scale the metric to a percentage using the threshold (found empirically on a large number of user data shared to me)
|
|
||||||
base_percentage = (np.log1p(total_variability_metric) / np.log1p(DC_THRESHOLD_METRIC)) * 100
|
|
||||||
|
|
||||||
# Adjust the percentage based on the similarity_coefficient to add a grain of salt
|
|
||||||
adjusted_percentage = base_percentage * (1 - DC_GRAIN_OF_SALT_FACTOR * (similarity_coefficient / 100))
|
|
||||||
|
|
||||||
# Adjust the percentage again based on the number of unpaired peaks to add a second grain of salt
|
|
||||||
peak_confidence = num_unpaired_peaks / DC_MAX_UNPAIRED_PEAKS_ALLOWED
|
|
||||||
final_percentage = (1 - peak_confidence) * adjusted_percentage + peak_confidence * 100
|
|
||||||
|
|
||||||
# Ensure the result lies between 0 and 100 by clipping the computed value
|
|
||||||
final_percentage = np.clip(final_percentage, 0, 100)
|
|
||||||
|
|
||||||
return final_percentage, mhi_lut(final_percentage)
|
|
||||||
|
|
||||||
|
|
||||||
# LUT to transform the MHI into a textual value easy to understand for the users of the script
|
|
||||||
def mhi_lut(mhi):
|
|
||||||
if 0 <= mhi <= 30:
|
|
||||||
return "Excellent mechanical health"
|
|
||||||
elif 30 < mhi <= 45:
|
|
||||||
return "Good mechanical health"
|
|
||||||
elif 45 < mhi <= 55:
|
|
||||||
return "Acceptable mechanical health"
|
|
||||||
elif 55 < mhi <= 70:
|
|
||||||
return "Potential signs of a mechanical issue"
|
|
||||||
elif 70 < mhi <= 85:
|
|
||||||
return "Likely a mechanical issue"
|
|
||||||
elif 85 < mhi <= 100:
|
|
||||||
return "Mechanical issue detected"
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Graphing
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def plot_compare_frequency(ax, lognames, signal1, signal2, max_freq):
|
|
||||||
# Get the belt name for the legend to avoid putting the full file name
|
|
||||||
signal1_belt = (lognames[0].split('/')[-1]).split('_')[-1][0]
|
|
||||||
signal2_belt = (lognames[1].split('/')[-1]).split('_')[-1][0]
|
|
||||||
|
|
||||||
if signal1_belt == 'A' and signal2_belt == 'B':
|
|
||||||
signal1_belt += " (axis 1,-1)"
|
|
||||||
signal2_belt += " (axis 1, 1)"
|
|
||||||
elif signal1_belt == 'B' and signal2_belt == 'A':
|
|
||||||
signal1_belt += " (axis 1, 1)"
|
|
||||||
signal2_belt += " (axis 1,-1)"
|
|
||||||
else:
|
|
||||||
print("Warning: belts doesn't seem to have the correct name A and B (extracted from the filename.csv)")
|
|
||||||
|
|
||||||
# Plot the two belts PSD signals
|
|
||||||
ax.plot(signal1.freqs, signal1.psd, label="Belt " + signal1_belt, color=KLIPPAIN_COLORS['purple'])
|
|
||||||
ax.plot(signal2.freqs, signal2.psd, label="Belt " + signal2_belt, color=KLIPPAIN_COLORS['orange'])
|
|
||||||
|
|
||||||
# Trace the "relax region" (also used as a threshold to filter and detect the peaks)
|
|
||||||
psd_lowest_max = min(signal1.psd.max(), signal2.psd.max())
|
|
||||||
peaks_warning_threshold = PEAKS_DETECTION_THRESHOLD * psd_lowest_max
|
|
||||||
ax.axhline(y=peaks_warning_threshold, color='black', linestyle='--', linewidth=0.5)
|
|
||||||
ax.fill_between(signal1.freqs, 0, peaks_warning_threshold, color='green', alpha=0.15, label='Relax Region')
|
|
||||||
|
|
||||||
# Trace and annotate the peaks on the graph
|
|
||||||
paired_peak_count = 0
|
|
||||||
unpaired_peak_count = 0
|
|
||||||
offsets_table_data = []
|
|
||||||
|
|
||||||
for _, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
|
||||||
label = ALPHABET[paired_peak_count]
|
|
||||||
amplitude_offset = abs(((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / max(signal1.psd[peak1[0]], signal2.psd[peak2[0]])) * 100)
|
|
||||||
frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]])
|
|
||||||
offsets_table_data.append([f"Peaks {label}", f"{frequency_offset:.1f} Hz", f"{amplitude_offset:.1f} %"])
|
|
||||||
|
|
||||||
ax.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], "x", color='black')
|
|
||||||
ax.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], "x", color='black')
|
|
||||||
ax.plot([signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]], [signal1.psd[peak1[0]], signal2.psd[peak2[0]]], ":", color='gray')
|
|
||||||
|
|
||||||
ax.annotate(label + "1", (signal1.freqs[peak1[0]], signal1.psd[peak1[0]]),
|
|
||||||
textcoords="offset points", xytext=(8, 5),
|
|
||||||
ha='left', fontsize=13, color='black')
|
|
||||||
ax.annotate(label + "2", (signal2.freqs[peak2[0]], signal2.psd[peak2[0]]),
|
|
||||||
textcoords="offset points", xytext=(8, 5),
|
|
||||||
ha='left', fontsize=13, color='black')
|
|
||||||
paired_peak_count += 1
|
|
||||||
|
|
||||||
for peak in signal1.unpaired_peaks:
|
|
||||||
ax.plot(signal1.freqs[peak], signal1.psd[peak], "x", color='black')
|
|
||||||
ax.annotate(str(unpaired_peak_count + 1), (signal1.freqs[peak], signal1.psd[peak]),
|
|
||||||
textcoords="offset points", xytext=(8, 5),
|
|
||||||
ha='left', fontsize=13, color='red', weight='bold')
|
|
||||||
unpaired_peak_count += 1
|
|
||||||
|
|
||||||
for peak in signal2.unpaired_peaks:
|
|
||||||
ax.plot(signal2.freqs[peak], signal2.psd[peak], "x", color='black')
|
|
||||||
ax.annotate(str(unpaired_peak_count + 1), (signal2.freqs[peak], signal2.psd[peak]),
|
|
||||||
textcoords="offset points", xytext=(8, 5),
|
|
||||||
ha='left', fontsize=13, color='red', weight='bold')
|
|
||||||
unpaired_peak_count += 1
|
|
||||||
|
|
||||||
# Compute the similarity (using cross-correlation of the PSD signals)
|
|
||||||
ax2 = ax.twinx() # To split the legends in two box
|
|
||||||
ax2.yaxis.set_visible(False)
|
|
||||||
similarity_factor = compute_curve_similarity_factor(signal1, signal2)
|
|
||||||
ax2.plot([], [], ' ', label=f'Estimated similarity: {similarity_factor:.1f}%')
|
|
||||||
ax2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}')
|
|
||||||
print(f"Belts estimated similarity: {similarity_factor:.1f}%")
|
|
||||||
|
|
||||||
# Setting axis parameters, grid and graph title
|
|
||||||
ax.set_xlabel('Frequency (Hz)')
|
|
||||||
ax.set_xlim([0, max_freq])
|
|
||||||
ax.set_ylabel('Power spectral density')
|
|
||||||
psd_highest_max = max(signal1.psd.max(), signal2.psd.max())
|
|
||||||
ax.set_ylim([0, psd_highest_max + psd_highest_max * 0.05])
|
|
||||||
|
|
||||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
|
||||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
|
||||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0,0))
|
|
||||||
ax.grid(which='major', color='grey')
|
|
||||||
ax.grid(which='minor', color='lightgrey')
|
|
||||||
fontP = matplotlib.font_manager.FontProperties()
|
|
||||||
fontP.set_size('small')
|
|
||||||
ax.set_title('Belts Frequency Profiles (estimated similarity: {:.1f}%)'.format(similarity_factor), fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
|
||||||
|
|
||||||
# Print the table of offsets ontop of the graph below the original legend (upper right)
|
|
||||||
if len(offsets_table_data) > 0:
|
|
||||||
columns = ["", "Frequency delta", "Amplitude delta", ]
|
|
||||||
offset_table = ax.table(cellText=offsets_table_data, colLabels=columns, bbox=[0.66, 0.75, 0.33, 0.15], loc='upper right', cellLoc='center')
|
|
||||||
offset_table.auto_set_font_size(False)
|
|
||||||
offset_table.set_fontsize(8)
|
|
||||||
offset_table.auto_set_column_width([0, 1, 2])
|
|
||||||
offset_table.set_zorder(100)
|
|
||||||
cells = [key for key in offset_table.get_celld().keys()]
|
|
||||||
for cell in cells:
|
|
||||||
offset_table[cell].set_facecolor('white')
|
|
||||||
offset_table[cell].set_alpha(0.6)
|
|
||||||
|
|
||||||
ax.legend(loc='upper left', prop=fontP)
|
|
||||||
ax2.legend(loc='upper right', prop=fontP)
|
|
||||||
|
|
||||||
return similarity_factor, unpaired_peak_count
|
|
||||||
|
|
||||||
|
|
||||||
def plot_difference_spectrogram(ax, data1, data2, signal1, signal2, similarity_factor, max_freq):
|
|
||||||
combined_data, bins, t = combined_spectrogram(data1, data2)
|
|
||||||
|
|
||||||
# Compute the MHI value from the differential spectrogram sum of gradient, salted with
|
|
||||||
# the similarity factor and the number or unpaired peaks from the belts frequency profile
|
|
||||||
# Be careful, this value is highly opinionated and is pretty experimental!
|
|
||||||
mhi, textual_mhi = compute_mhi(combined_data, similarity_factor, len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks))
|
|
||||||
print(f"[experimental] Mechanical Health Indicator: {textual_mhi.lower()} ({mhi:.1f}%)")
|
|
||||||
ax.set_title(f"Differential Spectrogram", fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
|
||||||
ax.plot([], [], ' ', label=f'{textual_mhi} (experimental)')
|
|
||||||
|
|
||||||
# Draw the differential spectrogram with a specific norm to get light grey zero values and red for max values (vmin to vcenter is not used)
|
|
||||||
norm = matplotlib.colors.TwoSlopeNorm(vcenter=np.min(combined_data), vmax=np.max(combined_data))
|
|
||||||
ax.pcolormesh(bins, t, combined_data.T, cmap='RdBu_r', norm=norm, shading='gouraud')
|
|
||||||
ax.set_xlabel('Frequency (hz)')
|
|
||||||
ax.set_xlim([0., max_freq])
|
|
||||||
ax.set_ylabel('Time (s)')
|
|
||||||
ax.set_ylim([0, t[-1]])
|
|
||||||
|
|
||||||
fontP = matplotlib.font_manager.FontProperties()
|
|
||||||
fontP.set_size('medium')
|
|
||||||
ax.legend(loc='best', prop=fontP)
|
|
||||||
|
|
||||||
# Plot vertical lines for unpaired peaks
|
|
||||||
unpaired_peak_count = 0
|
|
||||||
for _, peak in enumerate(signal1.unpaired_peaks):
|
|
||||||
ax.axvline(signal1.freqs[peak], color='red', linestyle='dotted', linewidth=1.5)
|
|
||||||
ax.annotate(f"Peak {unpaired_peak_count + 1}", (signal1.freqs[peak], t[-1]*0.05),
|
|
||||||
textcoords="data", color='red', rotation=90, fontsize=10,
|
|
||||||
verticalalignment='bottom', horizontalalignment='right')
|
|
||||||
unpaired_peak_count +=1
|
|
||||||
|
|
||||||
for _, peak in enumerate(signal2.unpaired_peaks):
|
|
||||||
ax.axvline(signal2.freqs[peak], color='red', linestyle='dotted', linewidth=1.5)
|
|
||||||
ax.annotate(f"Peak {unpaired_peak_count + 1}", (signal2.freqs[peak], t[-1]*0.05),
|
|
||||||
textcoords="data", color='red', rotation=90, fontsize=10,
|
|
||||||
verticalalignment='bottom', horizontalalignment='right')
|
|
||||||
unpaired_peak_count +=1
|
|
||||||
|
|
||||||
# Plot vertical lines and zones for paired peaks
|
|
||||||
for idx, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
|
||||||
label = ALPHABET[idx]
|
|
||||||
x_min = min(peak1[1], peak2[1])
|
|
||||||
x_max = max(peak1[1], peak2[1])
|
|
||||||
ax.axvline(x_min, color=KLIPPAIN_COLORS['purple'], linestyle='dotted', linewidth=1.5)
|
|
||||||
ax.axvline(x_max, color=KLIPPAIN_COLORS['purple'], linestyle='dotted', linewidth=1.5)
|
|
||||||
ax.fill_between([x_min, x_max], 0, np.max(combined_data), color=KLIPPAIN_COLORS['purple'], alpha=0.3)
|
|
||||||
ax.annotate(f"Peaks {label}", (x_min, t[-1]*0.05),
|
|
||||||
textcoords="data", color=KLIPPAIN_COLORS['purple'], rotation=90, fontsize=10,
|
|
||||||
verticalalignment='bottom', horizontalalignment='right')
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Custom tools
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
# Simple helper to compute a sigmoid scalling (from 0 to 100%)
|
|
||||||
def sigmoid_scale(x, k=1):
|
|
||||||
return 1 / (1 + np.exp(-k * x)) * 100
|
|
||||||
|
|
||||||
# Original Klipper function to get the PSD data of a raw accelerometer signal
|
|
||||||
def compute_signal_data(data, max_freq):
|
|
||||||
calibration_data = calc_freq_response(data)
|
|
||||||
freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq]
|
|
||||||
psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq]
|
|
||||||
peaks, _ = detect_peaks(psd, freqs)
|
|
||||||
return SignalData(freqs=freqs, psd=psd, peaks=peaks, paired_peaks=None, unpaired_peaks=None)
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Startup and main routines
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def parse_log(logname):
|
|
||||||
with open(logname) as f:
|
|
||||||
for header in f:
|
|
||||||
if not header.startswith('#'):
|
|
||||||
break
|
|
||||||
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
|
||||||
# Raw accelerometer data
|
|
||||||
return np.loadtxt(logname, comments='#', delimiter=',')
|
|
||||||
# Power spectral density data or shaper calibration data
|
|
||||||
raise ValueError("File %s does not contain raw accelerometer data and therefore "
|
|
||||||
"is not supported by this script. Please use the official Klipper "
|
|
||||||
"graph_accelerometer.py script to process it instead." % (logname,))
|
|
||||||
|
|
||||||
|
|
||||||
def setup_klipper_import(kdir):
|
|
||||||
global shaper_calibrate
|
|
||||||
kdir = os.path.expanduser(kdir)
|
|
||||||
sys.path.append(os.path.join(kdir, 'klippy'))
|
|
||||||
shaper_calibrate = importlib.import_module('.shaper_calibrate', 'extras')
|
|
||||||
|
|
||||||
|
|
||||||
def belts_calibration(lognames, klipperdir="~/klipper", max_freq=200.):
|
|
||||||
setup_klipper_import(klipperdir)
|
|
||||||
|
|
||||||
# Parse data
|
|
||||||
datas = [parse_log(fn) for fn in lognames]
|
|
||||||
if len(datas) > 2:
|
|
||||||
raise ValueError("Incorrect number of .csv files used (this function needs two files to compare them)")
|
|
||||||
|
|
||||||
# Compute calibration data for the two datasets with automatic peaks detection
|
|
||||||
signal1 = compute_signal_data(datas[0], max_freq)
|
|
||||||
signal2 = compute_signal_data(datas[1], max_freq)
|
|
||||||
|
|
||||||
# Pair the peaks across the two datasets
|
|
||||||
paired_peaks, unpaired_peaks1, unpaired_peaks2 = pair_peaks(signal1.peaks, signal1.freqs, signal1.psd,
|
|
||||||
signal2.peaks, signal2.freqs, signal2.psd)
|
|
||||||
signal1 = signal1._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks1)
|
|
||||||
signal2 = signal2._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks2)
|
|
||||||
|
|
||||||
fig = matplotlib.pyplot.figure()
|
|
||||||
gs = matplotlib.gridspec.GridSpec(2, 1, height_ratios=[4, 3])
|
|
||||||
ax1 = fig.add_subplot(gs[0])
|
|
||||||
ax2 = fig.add_subplot(gs[1])
|
|
||||||
|
|
||||||
# Add title
|
|
||||||
title_line1 = "RELATIVE BELT CALIBRATION TOOL"
|
|
||||||
fig.text(0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold')
|
|
||||||
try:
|
|
||||||
filename = lognames[0].split('/')[-1]
|
|
||||||
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", "%Y%m%d %H%M%S")
|
|
||||||
title_line2 = dt.strftime('%x %X')
|
|
||||||
except:
|
|
||||||
print("Warning: CSV filenames look to be different than expected (%s , %s)" % (lognames[0], lognames[1]))
|
|
||||||
title_line2 = lognames[0].split('/')[-1] + " / " + lognames[1].split('/')[-1]
|
|
||||||
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
|
||||||
|
|
||||||
# Plot the graphs
|
|
||||||
similarity_factor, _ = plot_compare_frequency(ax1, lognames, signal1, signal2, max_freq)
|
|
||||||
plot_difference_spectrogram(ax2, datas[0], datas[1], signal1, signal2, similarity_factor, max_freq)
|
|
||||||
|
|
||||||
fig.set_size_inches(8.3, 11.6)
|
|
||||||
fig.tight_layout()
|
|
||||||
fig.subplots_adjust(top=0.89)
|
|
||||||
|
|
||||||
# Adding a small Klippain logo to the top left corner of the figure
|
|
||||||
ax_logo = fig.add_axes([0.001, 0.899, 0.1, 0.1], anchor='NW', zorder=-1)
|
|
||||||
ax_logo.imshow(matplotlib.pyplot.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
|
||||||
ax_logo.axis('off')
|
|
||||||
|
|
||||||
return fig
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Parse command-line arguments
|
|
||||||
usage = "%prog [options] <raw logs>"
|
|
||||||
opts = optparse.OptionParser(usage)
|
|
||||||
opts.add_option("-o", "--output", type="string", dest="output",
|
|
||||||
default=None, help="filename of output graph")
|
|
||||||
opts.add_option("-f", "--max_freq", type="float", default=200.,
|
|
||||||
help="maximum frequency to graph")
|
|
||||||
opts.add_option("-k", "--klipper_dir", type="string", dest="klipperdir",
|
|
||||||
default="~/klipper", help="main klipper directory")
|
|
||||||
options, args = opts.parse_args()
|
|
||||||
if len(args) < 1:
|
|
||||||
opts.error("Incorrect number of arguments")
|
|
||||||
if options.output is None:
|
|
||||||
opts.error("You must specify an output file.png to use the script (option -o)")
|
|
||||||
|
|
||||||
fig = belts_calibration(args, options.klipperdir, options.max_freq)
|
|
||||||
fig.savefig(options.output)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,395 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
#################################################
|
|
||||||
######## INPUT SHAPER CALIBRATION SCRIPT ########
|
|
||||||
#################################################
|
|
||||||
# Derived from the calibrate_shaper.py official Klipper script
|
|
||||||
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com>
|
|
||||||
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
|
|
||||||
#
|
|
||||||
# Written by Frix_x#0161 #
|
|
||||||
# @version: 2.0
|
|
||||||
|
|
||||||
# CHANGELOG:
|
|
||||||
# v2.0: updated the script to align it to the new K-Shake&Tune module
|
|
||||||
# v1.1: - improved the damping ratio computation with linear approximation for more precision
|
|
||||||
# - reworked the top graph to add more information to it with colored zones,
|
|
||||||
# automated peak detection, etc...
|
|
||||||
# - added a full spectrogram of the signal on the bottom to allow deeper analysis
|
|
||||||
# v1.0: first version of this script inspired from the official Klipper
|
|
||||||
# shaper calibration script to add an automatic damping ratio estimation to it
|
|
||||||
|
|
||||||
|
|
||||||
# Be sure to make this script executable using SSH: type 'chmod +x ./graph_shaper.py' when in the folder!
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
################ !!! DO NOT EDIT BELOW THIS LINE !!! ################
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
import optparse, matplotlib, sys, importlib, os, math
|
|
||||||
from textwrap import wrap
|
|
||||||
import numpy as np
|
|
||||||
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
|
|
||||||
import matplotlib.ticker, matplotlib.gridspec
|
|
||||||
import locale
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
matplotlib.use('Agg')
|
|
||||||
|
|
||||||
|
|
||||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
|
||||||
PEAKS_EFFECT_THRESHOLD = 0.12
|
|
||||||
SPECTROGRAM_LOW_PERCENTILE_FILTER = 5
|
|
||||||
MAX_SMOOTHING = 0.1
|
|
||||||
|
|
||||||
KLIPPAIN_COLORS = {
|
|
||||||
"purple": "#70088C",
|
|
||||||
"dark_purple": "#150140",
|
|
||||||
"dark_orange": "#F24130"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Set the best locale for time and date formating (generation of the titles)
|
|
||||||
try:
|
|
||||||
locale.setlocale(locale.LC_TIME, locale.getdefaultlocale())
|
|
||||||
except locale.Error:
|
|
||||||
locale.setlocale(locale.LC_TIME, 'C')
|
|
||||||
|
|
||||||
# Override the built-in print function to avoid problem in Klipper due to locale settings
|
|
||||||
original_print = print
|
|
||||||
def print_with_c_locale(*args, **kwargs):
|
|
||||||
original_locale = locale.setlocale(locale.LC_ALL, None)
|
|
||||||
locale.setlocale(locale.LC_ALL, 'C')
|
|
||||||
original_print(*args, **kwargs)
|
|
||||||
locale.setlocale(locale.LC_ALL, original_locale)
|
|
||||||
print = print_with_c_locale
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Computation
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
# Find the best shaper parameters using Klipper's official algorithm selection
|
|
||||||
def calibrate_shaper_with_damping(datas, max_smoothing):
|
|
||||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
|
||||||
|
|
||||||
calibration_data = helper.process_accelerometer_data(datas[0])
|
|
||||||
for data in datas[1:]:
|
|
||||||
calibration_data.add_data(helper.process_accelerometer_data(data))
|
|
||||||
|
|
||||||
calibration_data.normalize_to_frequencies()
|
|
||||||
|
|
||||||
shaper, all_shapers = helper.find_best_shaper(calibration_data, max_smoothing, print)
|
|
||||||
|
|
||||||
freqs = calibration_data.freq_bins
|
|
||||||
psd = calibration_data.psd_sum
|
|
||||||
fr, zeta = compute_damping_ratio(psd, freqs)
|
|
||||||
|
|
||||||
print("Recommended shaper is %s @ %.1f Hz" % (shaper.name, shaper.freq))
|
|
||||||
print("Axis has a main resonant frequency at %.1fHz with an estimated damping ratio of %.3f" % (fr, zeta))
|
|
||||||
|
|
||||||
return shaper.name, all_shapers, calibration_data, fr, zeta
|
|
||||||
|
|
||||||
|
|
||||||
# Compute damping ratio by using the half power bandwidth method with interpolated frequencies
|
|
||||||
def compute_damping_ratio(psd, freqs):
|
|
||||||
max_power_index = np.argmax(psd)
|
|
||||||
fr = freqs[max_power_index]
|
|
||||||
max_power = psd[max_power_index]
|
|
||||||
|
|
||||||
half_power = max_power / math.sqrt(2)
|
|
||||||
idx_below = np.where(psd[:max_power_index] <= half_power)[0][-1]
|
|
||||||
idx_above = np.where(psd[max_power_index:] <= half_power)[0][0] + max_power_index
|
|
||||||
freq_below_half_power = freqs[idx_below] + (half_power - psd[idx_below]) * (freqs[idx_below + 1] - freqs[idx_below]) / (psd[idx_below + 1] - psd[idx_below])
|
|
||||||
freq_above_half_power = freqs[idx_above - 1] + (half_power - psd[idx_above - 1]) * (freqs[idx_above] - freqs[idx_above - 1]) / (psd[idx_above] - psd[idx_above - 1])
|
|
||||||
|
|
||||||
bandwidth = freq_above_half_power - freq_below_half_power
|
|
||||||
zeta = bandwidth / (2 * fr)
|
|
||||||
|
|
||||||
return fr, zeta
|
|
||||||
|
|
||||||
|
|
||||||
def compute_spectrogram(data):
|
|
||||||
N = data.shape[0]
|
|
||||||
Fs = N / (data[-1,0] - data[0,0])
|
|
||||||
# Round up to a power of 2 for faster FFT
|
|
||||||
M = 1 << int(.5 * Fs - 1).bit_length()
|
|
||||||
window = np.kaiser(M, 6.)
|
|
||||||
def _specgram(x):
|
|
||||||
return matplotlib.mlab.specgram(
|
|
||||||
x, Fs=Fs, NFFT=M, noverlap=M//2, window=window,
|
|
||||||
mode='psd', detrend='mean', scale_by_freq=False)
|
|
||||||
|
|
||||||
d = {'x': data[:,1], 'y': data[:,2], 'z': data[:,3]}
|
|
||||||
pdata, bins, t = _specgram(d['x'])
|
|
||||||
for ax in 'yz':
|
|
||||||
pdata += _specgram(d[ax])[0]
|
|
||||||
return pdata, bins, t
|
|
||||||
|
|
||||||
|
|
||||||
# This find all the peaks in a curve by looking at when the derivative term goes from positive to negative
|
|
||||||
# Then only the peaks found above a threshold are kept to avoid capturing peaks in the low amplitude noise of a signal
|
|
||||||
# An added "virtual" threshold allow me to quantify in an opiniated way the peaks that "could have" effect on the printer
|
|
||||||
# behavior and are likely known to produce or contribute to the ringing/ghosting in printed parts
|
|
||||||
def detect_peaks(psd, freqs, window_size=5, vicinity=3):
|
|
||||||
# Smooth the curve using a moving average to avoid catching peaks everywhere in noisy signals
|
|
||||||
kernel = np.ones(window_size) / window_size
|
|
||||||
smoothed_psd = np.convolve(psd, kernel, mode='valid')
|
|
||||||
mean_pad = [np.mean(psd[:window_size])] * (window_size // 2)
|
|
||||||
smoothed_psd = np.concatenate((mean_pad, smoothed_psd))
|
|
||||||
|
|
||||||
# Find peaks on the smoothed curve
|
|
||||||
smoothed_peaks = np.where((smoothed_psd[:-2] < smoothed_psd[1:-1]) & (smoothed_psd[1:-1] > smoothed_psd[2:]))[0] + 1
|
|
||||||
detection_threshold = PEAKS_DETECTION_THRESHOLD * psd.max()
|
|
||||||
effect_threshold = PEAKS_EFFECT_THRESHOLD * psd.max()
|
|
||||||
smoothed_peaks = smoothed_peaks[smoothed_psd[smoothed_peaks] > detection_threshold]
|
|
||||||
|
|
||||||
# Refine peak positions on the original curve
|
|
||||||
refined_peaks = []
|
|
||||||
for peak in smoothed_peaks:
|
|
||||||
local_max = peak + np.argmax(psd[max(0, peak-vicinity):min(len(psd), peak+vicinity+1)]) - vicinity
|
|
||||||
refined_peaks.append(local_max)
|
|
||||||
|
|
||||||
peak_freqs = ["{:.1f}".format(f) for f in freqs[refined_peaks]]
|
|
||||||
|
|
||||||
num_peaks = len(refined_peaks)
|
|
||||||
num_peaks_above_effect_threshold = np.sum(psd[refined_peaks] > effect_threshold)
|
|
||||||
|
|
||||||
print("Peaks detected on the graph: %d @ %s Hz (%d above effect threshold)" % (num_peaks, ", ".join(map(str, peak_freqs)), num_peaks_above_effect_threshold))
|
|
||||||
|
|
||||||
return np.array(refined_peaks), num_peaks, num_peaks_above_effect_threshold
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Graphing
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def plot_freq_response_with_damping(ax, calibration_data, shapers, performance_shaper, fr, zeta, max_freq):
|
|
||||||
freqs = calibration_data.freq_bins
|
|
||||||
psd = calibration_data.psd_sum[freqs <= max_freq]
|
|
||||||
px = calibration_data.psd_x[freqs <= max_freq]
|
|
||||||
py = calibration_data.psd_y[freqs <= max_freq]
|
|
||||||
pz = calibration_data.psd_z[freqs <= max_freq]
|
|
||||||
freqs = freqs[freqs <= max_freq]
|
|
||||||
|
|
||||||
fontP = matplotlib.font_manager.FontProperties()
|
|
||||||
fontP.set_size('x-small')
|
|
||||||
|
|
||||||
ax.set_xlabel('Frequency (Hz)')
|
|
||||||
ax.set_xlim([0, max_freq])
|
|
||||||
ax.set_ylabel('Power spectral density')
|
|
||||||
ax.set_ylim([0, psd.max() + psd.max() * 0.05])
|
|
||||||
|
|
||||||
ax.plot(freqs, psd, label='X+Y+Z', color='purple')
|
|
||||||
ax.plot(freqs, px, label='X', color='red')
|
|
||||||
ax.plot(freqs, py, label='Y', color='green')
|
|
||||||
ax.plot(freqs, pz, label='Z', color='blue')
|
|
||||||
|
|
||||||
ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5))
|
|
||||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
|
||||||
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0,0))
|
|
||||||
ax.grid(which='major', color='grey')
|
|
||||||
ax.grid(which='minor', color='lightgrey')
|
|
||||||
|
|
||||||
ax2 = ax.twinx()
|
|
||||||
ax2.yaxis.set_visible(False)
|
|
||||||
|
|
||||||
lowvib_shaper_vibrs = float('inf')
|
|
||||||
lowvib_shaper = None
|
|
||||||
lowvib_shaper_freq = None
|
|
||||||
lowvib_shaper_accel = 0
|
|
||||||
|
|
||||||
# Draw the shappers curves and add their specific parameters in the legend
|
|
||||||
# This adds also a way to find the best shaper with a low level of vibrations (with a resonable level of smoothing)
|
|
||||||
for shaper in shapers:
|
|
||||||
shaper_max_accel = round(shaper.max_accel / 100.) * 100.
|
|
||||||
label = "%s (%.1f Hz, vibr=%.1f%%, sm~=%.2f, accel<=%.f)" % (
|
|
||||||
shaper.name.upper(), shaper.freq,
|
|
||||||
shaper.vibrs * 100., shaper.smoothing,
|
|
||||||
shaper_max_accel)
|
|
||||||
ax2.plot(freqs, shaper.vals, label=label, linestyle='dotted')
|
|
||||||
|
|
||||||
# Get the performance shaper
|
|
||||||
if shaper.name == performance_shaper:
|
|
||||||
performance_shaper_freq = shaper.freq
|
|
||||||
performance_shaper_vibr = shaper.vibrs * 100.
|
|
||||||
performance_shaper_vals = shaper.vals
|
|
||||||
|
|
||||||
# Get the low vibration shaper
|
|
||||||
if (shaper.vibrs * 100 < lowvib_shaper_vibrs or (shaper.vibrs * 100 == lowvib_shaper_vibrs and shaper_max_accel > lowvib_shaper_accel)) and shaper.smoothing < MAX_SMOOTHING:
|
|
||||||
lowvib_shaper_accel = shaper_max_accel
|
|
||||||
lowvib_shaper = shaper.name
|
|
||||||
lowvib_shaper_freq = shaper.freq
|
|
||||||
lowvib_shaper_vibrs = shaper.vibrs * 100
|
|
||||||
lowvib_shaper_vals = shaper.vals
|
|
||||||
|
|
||||||
# User recommendations are added to the legend: one is Klipper's original suggestion that is usually good for performances
|
|
||||||
# and the other one is the custom "low vibration" recommendation that looks for a suitable shaper that doesn't have excessive
|
|
||||||
# smoothing and that have a lower vibration level. If both recommendation are the same shaper, or if no suitable "low
|
|
||||||
# vibration" shaper is found, then only a single line as the "best shaper" recommendation is added to the legend
|
|
||||||
if lowvib_shaper != None and lowvib_shaper != performance_shaper and lowvib_shaper_vibrs <= performance_shaper_vibr:
|
|
||||||
ax2.plot([], [], ' ', label="Recommended performance shaper: %s @ %.1f Hz" % (performance_shaper.upper(), performance_shaper_freq))
|
|
||||||
ax.plot(freqs, psd * performance_shaper_vals, label='With %s applied' % (performance_shaper.upper()), color='cyan')
|
|
||||||
ax2.plot([], [], ' ', label="Recommended low vibrations shaper: %s @ %.1f Hz" % (lowvib_shaper.upper(), lowvib_shaper_freq))
|
|
||||||
ax.plot(freqs, psd * lowvib_shaper_vals, label='With %s applied' % (lowvib_shaper.upper()), color='lime')
|
|
||||||
else:
|
|
||||||
ax2.plot([], [], ' ', label="Recommended best shaper: %s @ %.1f Hz" % (performance_shaper.upper(), performance_shaper_freq))
|
|
||||||
ax.plot(freqs, psd * performance_shaper_vals, label='With %s applied' % (performance_shaper.upper()), color='cyan')
|
|
||||||
|
|
||||||
# And the estimated damping ratio is finally added at the end of the legend
|
|
||||||
ax2.plot([], [], ' ', label="Estimated damping ratio (ζ): %.3f" % (zeta))
|
|
||||||
|
|
||||||
# Draw the detected peaks and name them
|
|
||||||
# This also draw the detection threshold and warning threshold (aka "effect zone")
|
|
||||||
peaks, _, _ = detect_peaks(psd, freqs)
|
|
||||||
peaks_warning_threshold = PEAKS_DETECTION_THRESHOLD * psd.max()
|
|
||||||
peaks_effect_threshold = PEAKS_EFFECT_THRESHOLD * psd.max()
|
|
||||||
|
|
||||||
ax.plot(freqs[peaks], psd[peaks], "x", color='black', markersize=8)
|
|
||||||
for idx, peak in enumerate(peaks):
|
|
||||||
if psd[peak] > peaks_effect_threshold:
|
|
||||||
fontcolor = 'red'
|
|
||||||
fontweight = 'bold'
|
|
||||||
else:
|
|
||||||
fontcolor = 'black'
|
|
||||||
fontweight = 'normal'
|
|
||||||
ax.annotate(f"{idx+1}", (freqs[peak], psd[peak]),
|
|
||||||
textcoords="offset points", xytext=(8, 5),
|
|
||||||
ha='left', fontsize=13, color=fontcolor, weight=fontweight)
|
|
||||||
ax.axhline(y=peaks_warning_threshold, color='black', linestyle='--', linewidth=0.5)
|
|
||||||
ax.axhline(y=peaks_effect_threshold, color='black', linestyle='--', linewidth=0.5)
|
|
||||||
ax.fill_between(freqs, 0, peaks_warning_threshold, color='green', alpha=0.15, label='Relax Region')
|
|
||||||
ax.fill_between(freqs, peaks_warning_threshold, peaks_effect_threshold, color='orange', alpha=0.2, label='Warning Region')
|
|
||||||
|
|
||||||
|
|
||||||
# Add the main resonant frequency and damping ratio of the axis to the graph title
|
|
||||||
ax.set_title("Axis Frequency Profile (ω0=%.1fHz, ζ=%.3f)" % (fr, zeta), fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
|
||||||
ax.legend(loc='upper left', prop=fontP)
|
|
||||||
ax2.legend(loc='upper right', prop=fontP)
|
|
||||||
|
|
||||||
return freqs[peaks]
|
|
||||||
|
|
||||||
|
|
||||||
# Plot a time-frequency spectrogram to see how the system respond over time during the
|
|
||||||
# resonnance test. This can highlight hidden spots from the standard PSD graph from other harmonics
|
|
||||||
def plot_spectrogram(ax, data, peaks, max_freq):
|
|
||||||
pdata, bins, t = compute_spectrogram(data)
|
|
||||||
|
|
||||||
# We need to normalize the data to get a proper signal on the spectrogram
|
|
||||||
# However, while using "LogNorm" provide too much background noise, using
|
|
||||||
# "Normalize" make only the resonnance appearing and hide interesting elements
|
|
||||||
# So we need to filter out the lower part of the data (ie. find the proper vmin for LogNorm)
|
|
||||||
vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER)
|
|
||||||
|
|
||||||
ax.set_title("Time-Frequency Spectrogram", fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
|
||||||
ax.pcolormesh(bins, t, pdata.T, norm=matplotlib.colors.LogNorm(vmin=vmin_value),
|
|
||||||
cmap='inferno', shading='gouraud')
|
|
||||||
|
|
||||||
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
|
||||||
if peaks is not None:
|
|
||||||
for idx, peak in enumerate(peaks):
|
|
||||||
ax.axvline(peak, color='cyan', linestyle='dotted', linewidth=0.75)
|
|
||||||
ax.annotate(f"Peak {idx+1}", (peak, t[-1]*0.9),
|
|
||||||
textcoords="data", color='cyan', rotation=90, fontsize=10,
|
|
||||||
verticalalignment='top', horizontalalignment='right')
|
|
||||||
|
|
||||||
ax.set_xlim([0., max_freq])
|
|
||||||
ax.set_ylabel('Time (s)')
|
|
||||||
ax.set_xlabel('Frequency (Hz)')
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Startup and main routines
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def parse_log(logname):
|
|
||||||
with open(logname) as f:
|
|
||||||
for header in f:
|
|
||||||
if not header.startswith('#'):
|
|
||||||
break
|
|
||||||
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
|
||||||
# Raw accelerometer data
|
|
||||||
return np.loadtxt(logname, comments='#', delimiter=',')
|
|
||||||
# Power spectral density data or shaper calibration data
|
|
||||||
raise ValueError("File %s does not contain raw accelerometer data and therefore "
|
|
||||||
"is not supported by this script. Please use the official Klipper "
|
|
||||||
"calibrate_shaper.py script to process it instead." % (logname,))
|
|
||||||
|
|
||||||
|
|
||||||
def setup_klipper_import(kdir):
|
|
||||||
global shaper_calibrate
|
|
||||||
kdir = os.path.expanduser(kdir)
|
|
||||||
sys.path.append(os.path.join(kdir, 'klippy'))
|
|
||||||
shaper_calibrate = importlib.import_module('.shaper_calibrate', 'extras')
|
|
||||||
|
|
||||||
|
|
||||||
def shaper_calibration(lognames, klipperdir="~/klipper", max_smoothing=None, max_freq=200.):
|
|
||||||
setup_klipper_import(klipperdir)
|
|
||||||
|
|
||||||
# Parse data
|
|
||||||
datas = [parse_log(fn) for fn in lognames]
|
|
||||||
|
|
||||||
# Calibrate shaper and generate outputs
|
|
||||||
performance_shaper, shapers, calibration_data, fr, zeta = calibrate_shaper_with_damping(datas, max_smoothing)
|
|
||||||
|
|
||||||
fig = matplotlib.pyplot.figure()
|
|
||||||
gs = matplotlib.gridspec.GridSpec(2, 1, height_ratios=[4, 3])
|
|
||||||
ax1 = fig.add_subplot(gs[0])
|
|
||||||
ax2 = fig.add_subplot(gs[1])
|
|
||||||
|
|
||||||
# Add title
|
|
||||||
title_line1 = "INPUT SHAPER CALIBRATION TOOL"
|
|
||||||
fig.text(0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold')
|
|
||||||
try:
|
|
||||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
|
||||||
dt = datetime.strptime(f"{filename_parts[1]} {filename_parts[2]}", "%Y%m%d %H%M%S")
|
|
||||||
title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis'
|
|
||||||
except:
|
|
||||||
print("Warning: CSV filename look to be different than expected (%s)" % (lognames[0]))
|
|
||||||
title_line2 = lognames[0].split('/')[-1]
|
|
||||||
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
|
||||||
|
|
||||||
# Plot the graphs
|
|
||||||
peaks = plot_freq_response_with_damping(ax1, calibration_data, shapers, performance_shaper, fr, zeta, max_freq)
|
|
||||||
plot_spectrogram(ax2, datas[0], peaks, max_freq)
|
|
||||||
|
|
||||||
fig.set_size_inches(8.3, 11.6)
|
|
||||||
fig.tight_layout()
|
|
||||||
fig.subplots_adjust(top=0.89)
|
|
||||||
|
|
||||||
# Adding a small Klippain logo to the top left corner of the figure
|
|
||||||
ax_logo = fig.add_axes([0.001, 0.899, 0.1, 0.1], anchor='NW', zorder=-1)
|
|
||||||
ax_logo.imshow(matplotlib.pyplot.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
|
||||||
ax_logo.axis('off')
|
|
||||||
|
|
||||||
return fig
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Parse command-line arguments
|
|
||||||
usage = "%prog [options] <logs>"
|
|
||||||
opts = optparse.OptionParser(usage)
|
|
||||||
opts.add_option("-o", "--output", type="string", dest="output",
|
|
||||||
default=None, help="filename of output graph")
|
|
||||||
opts.add_option("-f", "--max_freq", type="float", default=200.,
|
|
||||||
help="maximum frequency to graph")
|
|
||||||
opts.add_option("-s", "--max_smoothing", type="float", default=None,
|
|
||||||
help="maximum shaper smoothing to allow")
|
|
||||||
opts.add_option("-k", "--klipper_dir", type="string", dest="klipperdir",
|
|
||||||
default="~/klipper", help="main klipper directory")
|
|
||||||
options, args = opts.parse_args()
|
|
||||||
if len(args) < 1:
|
|
||||||
opts.error("Incorrect number of arguments")
|
|
||||||
if options.output is None:
|
|
||||||
opts.error("You must specify an output file.png to use the script (option -o)")
|
|
||||||
if options.max_smoothing is not None and options.max_smoothing < 0.05:
|
|
||||||
opts.error("Too small max_smoothing specified (must be at least 0.05)")
|
|
||||||
|
|
||||||
fig = shaper_calibration(args, options.klipperdir, options.max_smoothing, options.max_freq)
|
|
||||||
fig.savefig(options.output)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,439 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
##################################################
|
|
||||||
###### SPEED AND VIBRATIONS PLOTTING SCRIPT ######
|
|
||||||
##################################################
|
|
||||||
# Written by Frix_x#0161 #
|
|
||||||
# @version: 2.0
|
|
||||||
|
|
||||||
# CHANGELOG:
|
|
||||||
# v2.0: - updated the script to align it to the new K-Shake&Tune module
|
|
||||||
# - new features for peaks detection and advised speed zones
|
|
||||||
# v1.2: fixed a bug that could happen when username is not "pi" (thanks @spikeygg)
|
|
||||||
# v1.1: better graph formatting
|
|
||||||
# v1.0: first version of the script
|
|
||||||
|
|
||||||
|
|
||||||
# Be sure to make this script executable using SSH: type 'chmod +x ./graph_vibrations.py' when in the folder !
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
################ !!! DO NOT EDIT BELOW THIS LINE !!! ################
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
import optparse, matplotlib, re, sys, importlib, os, operator
|
|
||||||
from collections import OrderedDict
|
|
||||||
import numpy as np
|
|
||||||
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
|
|
||||||
import matplotlib.ticker, matplotlib.gridspec
|
|
||||||
import locale
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
matplotlib.use('Agg')
|
|
||||||
|
|
||||||
|
|
||||||
PEAKS_DETECTION_THRESHOLD = 0.05
|
|
||||||
PEAKS_RELATIVE_HEIGHT_THRESHOLD = 0.04
|
|
||||||
VALLEY_DETECTION_THRESHOLD = 0.1 # Lower is more sensitive
|
|
||||||
|
|
||||||
KLIPPAIN_COLORS = {
|
|
||||||
"purple": "#70088C",
|
|
||||||
"dark_purple": "#150140",
|
|
||||||
"dark_orange": "#F24130"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Set the best locale for time and date formating (generation of the titles)
|
|
||||||
try:
|
|
||||||
locale.setlocale(locale.LC_TIME, locale.getdefaultlocale())
|
|
||||||
except locale.Error:
|
|
||||||
locale.setlocale(locale.LC_TIME, 'C')
|
|
||||||
|
|
||||||
# Override the built-in print function to avoid problem in Klipper due to locale settings
|
|
||||||
original_print = print
|
|
||||||
def print_with_c_locale(*args, **kwargs):
|
|
||||||
original_locale = locale.setlocale(locale.LC_ALL, None)
|
|
||||||
locale.setlocale(locale.LC_ALL, 'C')
|
|
||||||
original_print(*args, **kwargs)
|
|
||||||
locale.setlocale(locale.LC_ALL, original_locale)
|
|
||||||
print = print_with_c_locale
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Computation
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def calc_freq_response(data):
|
|
||||||
# Use Klipper standard input shaper objects to do the computation
|
|
||||||
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
|
||||||
return helper.process_accelerometer_data(data)
|
|
||||||
|
|
||||||
|
|
||||||
def calc_psd(datas, group, max_freq):
|
|
||||||
psd_list = []
|
|
||||||
first_freqs = None
|
|
||||||
signal_axes = ['x', 'y', 'z', 'all']
|
|
||||||
|
|
||||||
for i in range(0, len(datas), group):
|
|
||||||
|
|
||||||
# Round up to the nearest power of 2 for faster FFT
|
|
||||||
N = datas[i].shape[0]
|
|
||||||
T = datas[i][-1,0] - datas[i][0,0]
|
|
||||||
M = 1 << int((N/T) * 0.5 - 1).bit_length()
|
|
||||||
if N <= M:
|
|
||||||
# If there is not enough lines in the array to be able to round up to the
|
|
||||||
# nearest power of 2, we need to pad some zeros at the end of the array to
|
|
||||||
# avoid entering a blocking state from Klipper shaper_calibrate.py
|
|
||||||
datas[i] = np.pad(datas[i], [(0, (M-N)+1), (0, 0)], mode='constant', constant_values=0)
|
|
||||||
|
|
||||||
freqrsp = calc_freq_response(datas[i])
|
|
||||||
for n in range(group - 1):
|
|
||||||
data = datas[i + n + 1]
|
|
||||||
|
|
||||||
# Round up to the nearest power of 2 for faster FFT
|
|
||||||
N = data.shape[0]
|
|
||||||
T = data[-1,0] - data[0,0]
|
|
||||||
M = 1 << int((N/T) * 0.5 - 1).bit_length()
|
|
||||||
if N <= M:
|
|
||||||
# If there is not enough lines in the array to be able to round up to the
|
|
||||||
# nearest power of 2, we need to pad some zeros at the end of the array to
|
|
||||||
# avoid entering a blocking state from Klipper shaper_calibrate.py
|
|
||||||
data = np.pad(data, [(0, (M-N)+1), (0, 0)], mode='constant', constant_values=0)
|
|
||||||
|
|
||||||
freqrsp.add_data(calc_freq_response(data))
|
|
||||||
|
|
||||||
if not psd_list:
|
|
||||||
# First group, just put it in the result list
|
|
||||||
first_freqs = freqrsp.freq_bins
|
|
||||||
psd = freqrsp.psd_sum[first_freqs <= max_freq]
|
|
||||||
px = freqrsp.psd_x[first_freqs <= max_freq]
|
|
||||||
py = freqrsp.psd_y[first_freqs <= max_freq]
|
|
||||||
pz = freqrsp.psd_z[first_freqs <= max_freq]
|
|
||||||
psd_list.append([psd, px, py, pz])
|
|
||||||
else:
|
|
||||||
# Not the first group, we need to interpolate every new signals
|
|
||||||
# to the first one to equalize the frequency_bins between them
|
|
||||||
signal_normalized = dict()
|
|
||||||
freqs = freqrsp.freq_bins
|
|
||||||
for axe in signal_axes:
|
|
||||||
signal = freqrsp.get_psd(axe)
|
|
||||||
signal_normalized[axe] = np.interp(first_freqs, freqs, signal)
|
|
||||||
|
|
||||||
# Remove data above max_freq on all axes and add to the result list
|
|
||||||
psd = signal_normalized['all'][first_freqs <= max_freq]
|
|
||||||
px = signal_normalized['x'][first_freqs <= max_freq]
|
|
||||||
py = signal_normalized['y'][first_freqs <= max_freq]
|
|
||||||
pz = signal_normalized['z'][first_freqs <= max_freq]
|
|
||||||
psd_list.append([psd, px, py, pz])
|
|
||||||
|
|
||||||
return first_freqs[first_freqs <= max_freq], psd_list
|
|
||||||
|
|
||||||
|
|
||||||
def calc_powertot(psd_list, freqs):
|
|
||||||
pwrtot_sum = []
|
|
||||||
pwrtot_x = []
|
|
||||||
pwrtot_y = []
|
|
||||||
pwrtot_z = []
|
|
||||||
|
|
||||||
for psd in psd_list:
|
|
||||||
pwrtot_sum.append(np.trapz(psd[0], freqs))
|
|
||||||
pwrtot_x.append(np.trapz(psd[1], freqs))
|
|
||||||
pwrtot_y.append(np.trapz(psd[2], freqs))
|
|
||||||
pwrtot_z.append(np.trapz(psd[3], freqs))
|
|
||||||
|
|
||||||
return [pwrtot_sum, pwrtot_x, pwrtot_y, pwrtot_z]
|
|
||||||
|
|
||||||
|
|
||||||
# This find all the peaks in a curve by looking at when the derivative term goes from positive to negative
|
|
||||||
# Then only the peaks found above a threshold are kept to avoid capturing peaks in the low amplitude noise of a signal
|
|
||||||
# Additionaly, we validate that a peak is a real peak based of its neighbors as we can have pretty flat zones in vibration
|
|
||||||
# graphs with a lot of false positive due to small "noise" in these flat zones
|
|
||||||
def detect_peaks(power_total, speeds, window_size=10, vicinity=10):
|
|
||||||
# Smooth the curve using a moving average to avoid catching peaks everywhere in noisy signals
|
|
||||||
kernel = np.ones(window_size) / window_size
|
|
||||||
smoothed_psd = np.convolve(power_total, kernel, mode='valid')
|
|
||||||
mean_pad = [np.mean(power_total[:window_size])] * (window_size // 2)
|
|
||||||
smoothed_psd = np.concatenate((mean_pad, smoothed_psd))
|
|
||||||
|
|
||||||
# Find peaks on the smoothed curve (and excluding the last value of the serie often detected when in a flat zone)
|
|
||||||
smoothed_peaks = np.where((smoothed_psd[:-3] < smoothed_psd[1:-2]) & (smoothed_psd[1:-2] > smoothed_psd[2:-1]))[0] + 1
|
|
||||||
detection_threshold = PEAKS_DETECTION_THRESHOLD * power_total.max()
|
|
||||||
|
|
||||||
valid_peaks = []
|
|
||||||
for peak in smoothed_peaks:
|
|
||||||
peak_height = smoothed_psd[peak] - np.min(smoothed_psd[max(0, peak-vicinity):min(len(smoothed_psd), peak+vicinity+1)])
|
|
||||||
if peak_height > PEAKS_RELATIVE_HEIGHT_THRESHOLD * smoothed_psd[peak] and smoothed_psd[peak] > detection_threshold:
|
|
||||||
valid_peaks.append(peak)
|
|
||||||
|
|
||||||
# Refine peak positions on the original curve
|
|
||||||
refined_peaks = []
|
|
||||||
for peak in valid_peaks:
|
|
||||||
local_max = peak + np.argmax(power_total[max(0, peak-vicinity):min(len(power_total), peak+vicinity+1)]) - vicinity
|
|
||||||
refined_peaks.append(local_max)
|
|
||||||
|
|
||||||
peak_speeds = ["{:.1f}".format(speeds[i]) for i in refined_peaks]
|
|
||||||
num_peaks = len(refined_peaks)
|
|
||||||
print("Vibrations peaks detected: %d @ %s mm/s (avoid running these speeds in your slicer profile)" % (num_peaks, ", ".join(map(str, peak_speeds))))
|
|
||||||
|
|
||||||
return np.array(refined_peaks), num_peaks
|
|
||||||
|
|
||||||
|
|
||||||
# The goal is to find zone outside of peaks (flat low energy zones) to advise them as good speeds range to use in the slicer
|
|
||||||
def identify_low_energy_zones(power_total):
|
|
||||||
valleys = []
|
|
||||||
|
|
||||||
# Calculate the mean and standard deviation of the entire power_total
|
|
||||||
mean_energy = np.mean(power_total)
|
|
||||||
std_energy = np.std(power_total)
|
|
||||||
|
|
||||||
# Define a threshold value as mean minus a certain number of standard deviations
|
|
||||||
threshold_value = mean_energy - VALLEY_DETECTION_THRESHOLD * std_energy
|
|
||||||
|
|
||||||
# Find valleys in power_total based on the threshold
|
|
||||||
in_valley = False
|
|
||||||
start_idx = 0
|
|
||||||
for i, value in enumerate(power_total):
|
|
||||||
if not in_valley and value < threshold_value:
|
|
||||||
in_valley = True
|
|
||||||
start_idx = i
|
|
||||||
elif in_valley and value >= threshold_value:
|
|
||||||
in_valley = False
|
|
||||||
valleys.append((start_idx, i))
|
|
||||||
|
|
||||||
# If the last point is still in a valley, close the valley
|
|
||||||
if in_valley:
|
|
||||||
valleys.append((start_idx, len(power_total) - 1))
|
|
||||||
|
|
||||||
max_signal = np.max(power_total)
|
|
||||||
|
|
||||||
# Calculate mean energy for each valley as a percentage of the maximum of the signal
|
|
||||||
valley_means_percentage = []
|
|
||||||
for start, end in valleys:
|
|
||||||
if not np.isnan(np.mean(power_total[start:end])):
|
|
||||||
valley_means_percentage.append((start, end, (np.mean(power_total[start:end]) / max_signal) * 100))
|
|
||||||
|
|
||||||
# Sort valleys based on mean percentage values
|
|
||||||
sorted_valleys = sorted(valley_means_percentage, key=lambda x: x[2])
|
|
||||||
|
|
||||||
return sorted_valleys
|
|
||||||
|
|
||||||
|
|
||||||
# Resample the signal to achieve denser data points in order to get more precise valley placing and
|
|
||||||
# avoid having to use the original sampling of the signal (that is equal to the speed increment used for the test)
|
|
||||||
def resample_signal(speeds, power_total, new_spacing=0.1):
|
|
||||||
new_speeds = np.arange(speeds[0], speeds[-1] + new_spacing, new_spacing)
|
|
||||||
new_power_total = np.interp(new_speeds, speeds, power_total)
|
|
||||||
return new_speeds, new_power_total
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Graphing
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def plot_total_power(ax, speeds, power_total):
|
|
||||||
resampled_speeds, resampled_power_total = resample_signal(speeds, power_total[0])
|
|
||||||
|
|
||||||
ax.set_title("Vibrations decomposition", fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
|
||||||
ax.set_xlabel('Speed (mm/s)')
|
|
||||||
ax.set_ylabel('Energy')
|
|
||||||
|
|
||||||
ax2 = ax.twinx()
|
|
||||||
ax2.yaxis.set_visible(False)
|
|
||||||
|
|
||||||
power_total_sum = np.array(resampled_power_total)
|
|
||||||
speed_array = np.array(resampled_speeds)
|
|
||||||
max_y = power_total_sum.max() + power_total_sum.max() * 0.05
|
|
||||||
ax.set_xlim([speed_array.min(), speed_array.max()])
|
|
||||||
ax.set_ylim([0, max_y])
|
|
||||||
ax2.set_ylim([0, max_y])
|
|
||||||
|
|
||||||
ax.plot(resampled_speeds, resampled_power_total, label="X+Y+Z", color='purple')
|
|
||||||
ax.plot(speeds, power_total[1], label="X", color='red')
|
|
||||||
ax.plot(speeds, power_total[2], label="Y", color='green')
|
|
||||||
ax.plot(speeds, power_total[3], label="Z", color='blue')
|
|
||||||
|
|
||||||
peaks, num_peaks = detect_peaks(resampled_power_total, resampled_speeds)
|
|
||||||
low_energy_zones = identify_low_energy_zones(resampled_power_total)
|
|
||||||
|
|
||||||
if peaks.size:
|
|
||||||
ax.plot(speed_array[peaks], power_total_sum[peaks], "x", color='black', markersize=8)
|
|
||||||
for idx, peak in enumerate(peaks):
|
|
||||||
fontcolor = 'red'
|
|
||||||
fontweight = 'bold'
|
|
||||||
ax.annotate(f"{idx+1}", (speed_array[peak], power_total_sum[peak]),
|
|
||||||
textcoords="offset points", xytext=(8, 5),
|
|
||||||
ha='left', fontsize=13, color=fontcolor, weight=fontweight)
|
|
||||||
ax2.plot([], [], ' ', label=f'Number of peaks: {num_peaks}')
|
|
||||||
else:
|
|
||||||
ax2.plot([], [], ' ', label=f'No peaks detected')
|
|
||||||
|
|
||||||
for idx, (start, end, energy) in enumerate(low_energy_zones):
|
|
||||||
ax.axvline(speed_array[start], color='red', linestyle='dotted', linewidth=1.5)
|
|
||||||
ax.axvline(speed_array[end], color='red', linestyle='dotted', linewidth=1.5)
|
|
||||||
ax2.fill_between(speed_array[start:end], 0, power_total_sum[start:end], color='green', alpha=0.2, label=f'Zone {idx+1}: {speed_array[start]:.1f} to {speed_array[end]:.1f} mm/s (mean energy: {energy:.2f}%)')
|
|
||||||
|
|
||||||
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
|
||||||
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
|
||||||
ax.grid(which='major', color='grey')
|
|
||||||
ax.grid(which='minor', color='lightgrey')
|
|
||||||
fontP = matplotlib.font_manager.FontProperties()
|
|
||||||
fontP.set_size('small')
|
|
||||||
ax.legend(loc='upper left', prop=fontP)
|
|
||||||
ax2.legend(loc='upper right', prop=fontP)
|
|
||||||
|
|
||||||
if peaks.size:
|
|
||||||
return speed_array[peaks]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def plot_spectrogram(ax, speeds, freqs, power_spectral_densities, peaks, max_freq):
|
|
||||||
spectrum = np.empty([len(freqs), len(speeds)])
|
|
||||||
|
|
||||||
for i in range(len(speeds)):
|
|
||||||
for j in range(len(freqs)):
|
|
||||||
spectrum[j, i] = power_spectral_densities[i][0][j]
|
|
||||||
|
|
||||||
ax.set_title("Vibrations spectrogram", fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
|
||||||
ax.pcolormesh(speeds, freqs, spectrum, norm=matplotlib.colors.LogNorm(),
|
|
||||||
cmap='inferno', shading='gouraud')
|
|
||||||
|
|
||||||
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
|
||||||
if peaks is not None:
|
|
||||||
for idx, peak in enumerate(peaks):
|
|
||||||
ax.axvline(peak, color='cyan', linestyle='dotted', linewidth=0.75)
|
|
||||||
ax.annotate(f"Peak {idx+1}", (peak, freqs[-1]*0.9),
|
|
||||||
textcoords="data", color='cyan', rotation=90, fontsize=10,
|
|
||||||
verticalalignment='top', horizontalalignment='right')
|
|
||||||
|
|
||||||
ax.set_ylim([0., max_freq])
|
|
||||||
ax.set_ylabel('Frequency (hz)')
|
|
||||||
ax.set_xlabel('Speed (mm/s)')
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
# Startup and main routines
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
def parse_log(logname):
|
|
||||||
with open(logname) as f:
|
|
||||||
for header in f:
|
|
||||||
if not header.startswith('#'):
|
|
||||||
break
|
|
||||||
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
|
||||||
# Raw accelerometer data
|
|
||||||
return np.loadtxt(logname, comments='#', delimiter=',')
|
|
||||||
# Power spectral density data or shaper calibration data
|
|
||||||
raise ValueError("File %s does not contain raw accelerometer data and therefore "
|
|
||||||
"is not supported by graph_vibrations.py script. Please use "
|
|
||||||
"calibrate_shaper.py script to process it instead." % (logname,))
|
|
||||||
|
|
||||||
|
|
||||||
def extract_speed(logname):
|
|
||||||
try:
|
|
||||||
speed = re.search('sp(.+?)n', os.path.basename(logname)).group(1).replace('_','.')
|
|
||||||
except AttributeError:
|
|
||||||
raise ValueError("File %s does not contain speed in its name and therefore "
|
|
||||||
"is not supported by graph_vibrations.py script." % (logname,))
|
|
||||||
return float(speed)
|
|
||||||
|
|
||||||
|
|
||||||
def sort_and_slice(raw_speeds, raw_datas, remove):
|
|
||||||
# Sort to get the speeds and their datas aligned and in ascending order
|
|
||||||
raw_speeds, raw_datas = zip(*sorted(zip(raw_speeds, raw_datas), key=operator.itemgetter(0)))
|
|
||||||
|
|
||||||
# Remove beginning and end of the datas for each file to get only
|
|
||||||
# constant speed data and remove the start/stop phase of the movements
|
|
||||||
datas = []
|
|
||||||
for data in raw_datas:
|
|
||||||
sliced = round((len(data) * remove / 100) / 2)
|
|
||||||
datas.append(data[sliced:len(data)-sliced])
|
|
||||||
|
|
||||||
return raw_speeds, datas
|
|
||||||
|
|
||||||
|
|
||||||
def setup_klipper_import(kdir):
|
|
||||||
global shaper_calibrate
|
|
||||||
kdir = os.path.expanduser(kdir)
|
|
||||||
sys.path.append(os.path.join(kdir, 'klippy'))
|
|
||||||
shaper_calibrate = importlib.import_module('.shaper_calibrate', 'extras')
|
|
||||||
|
|
||||||
|
|
||||||
def vibrations_calibration(lognames, klipperdir="~/klipper", axisname=None, max_freq=1000., remove=0):
|
|
||||||
setup_klipper_import(klipperdir)
|
|
||||||
|
|
||||||
# Parse the raw data and get them ready for analysis
|
|
||||||
raw_datas = [parse_log(filename) for filename in lognames]
|
|
||||||
raw_speeds = [extract_speed(filename) for filename in lognames]
|
|
||||||
speeds, datas = sort_and_slice(raw_speeds, raw_datas, remove)
|
|
||||||
|
|
||||||
# As we assume that we have the same number of file for each speeds. We can group
|
|
||||||
# the PSD results by this number (to combine vibrations at given speed on all movements)
|
|
||||||
group_by = speeds.count(speeds[0])
|
|
||||||
# Compute psd and total power of the signal
|
|
||||||
freqs, power_spectral_densities = calc_psd(datas, group_by, max_freq)
|
|
||||||
power_total = calc_powertot(power_spectral_densities, freqs)
|
|
||||||
|
|
||||||
fig = matplotlib.pyplot.figure()
|
|
||||||
gs = matplotlib.gridspec.GridSpec(2, 1, height_ratios=[4, 3])
|
|
||||||
ax1 = fig.add_subplot(gs[0])
|
|
||||||
ax2 = fig.add_subplot(gs[1])
|
|
||||||
|
|
||||||
title_line1 = "VIBRATIONS MEASUREMENT TOOL"
|
|
||||||
fig.text(0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold')
|
|
||||||
try:
|
|
||||||
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
|
||||||
dt = datetime.strptime(f"{filename_parts[1]} {filename_parts[2].split('-')[0]}", "%Y%m%d %H%M%S")
|
|
||||||
title_line2 = dt.strftime('%x %X') + ' -- ' + axisname.upper() + ' axis'
|
|
||||||
except:
|
|
||||||
print("Warning: CSV filename look to be different than expected (%s)" % (lognames[0]))
|
|
||||||
title_line2 = lognames[0].split('/')[-1]
|
|
||||||
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
|
||||||
|
|
||||||
# Remove speeds duplicates and graph the processed datas
|
|
||||||
speeds = list(OrderedDict((x, True) for x in speeds).keys())
|
|
||||||
|
|
||||||
peaks = plot_total_power(ax1, speeds, power_total)
|
|
||||||
plot_spectrogram(ax2, speeds, freqs, power_spectral_densities, peaks, max_freq)
|
|
||||||
|
|
||||||
fig.set_size_inches(8.3, 11.6)
|
|
||||||
fig.tight_layout()
|
|
||||||
fig.subplots_adjust(top=0.89)
|
|
||||||
|
|
||||||
# Adding a small Klippain logo to the top left corner of the figure
|
|
||||||
ax_logo = fig.add_axes([0.001, 0.899, 0.1, 0.1], anchor='NW', zorder=-1)
|
|
||||||
ax_logo.imshow(matplotlib.pyplot.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
|
||||||
ax_logo.axis('off')
|
|
||||||
|
|
||||||
return fig
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Parse command-line arguments
|
|
||||||
usage = "%prog [options] <raw logs>"
|
|
||||||
opts = optparse.OptionParser(usage)
|
|
||||||
opts.add_option("-o", "--output", type="string", dest="output",
|
|
||||||
default=None, help="filename of output graph")
|
|
||||||
opts.add_option("-a", "--axis", type="string", dest="axisname",
|
|
||||||
default=None, help="axis name to be shown on the side of the graph")
|
|
||||||
opts.add_option("-f", "--max_freq", type="float", default=1000.,
|
|
||||||
help="maximum frequency to graph")
|
|
||||||
opts.add_option("-r", "--remove", type="int", default=0,
|
|
||||||
help="percentage of data removed at start/end of each files")
|
|
||||||
opts.add_option("-k", "--klipper_dir", type="string", dest="klipperdir",
|
|
||||||
default="~/klipper", help="main klipper directory")
|
|
||||||
options, args = opts.parse_args()
|
|
||||||
if len(args) < 1:
|
|
||||||
opts.error("No CSV file(s) to analyse")
|
|
||||||
if options.output is None:
|
|
||||||
opts.error("You must specify an output file.png to use the script (option -o)")
|
|
||||||
if options.remove > 50 or options.remove < 0:
|
|
||||||
opts.error("You must specify a correct percentage (option -r) in the 0-50 range")
|
|
||||||
|
|
||||||
fig = vibrations_calibration(args, options.klipperdir, options.axisname, options.max_freq, options.remove)
|
|
||||||
fig.savefig(options.output)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,231 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
############################################
|
|
||||||
###### INPUT SHAPER KLIPPAIN WORKFLOW ######
|
|
||||||
############################################
|
|
||||||
# Written by Frix_x#0161 #
|
|
||||||
# @version: 2.0
|
|
||||||
|
|
||||||
# CHANGELOG:
|
|
||||||
# v2.0: new version of this as a Python script (to replace the old bash script) and implement the newer and improved shaper plotting scripts
|
|
||||||
# v1.7: updated the handling of shaper files to account for the new analysis scripts as we are now using raw data directly
|
|
||||||
# v1.6: - updated the handling of shaper graph files to be able to optionnaly account for added positions in the filenames and remove them
|
|
||||||
# - fixed a bug in the belt graph on slow SD card or Pi clones (Klipper was still writing in the file while we were already reading it)
|
|
||||||
# v1.5: fixed klipper unnexpected fail at the end of the execution, even if graphs were correctly generated (unicode decode error fixed)
|
|
||||||
# v1.4: added the ~/klipper dir parameter to the call of graph_vibrations.py for a better user handling (in case user is not "pi")
|
|
||||||
# v1.3: some documentation improvement regarding the line endings that needs to be LF for this file
|
|
||||||
# v1.2: added the movement name to be transfered to the Python script in vibration calibration (to print it on the result graphs)
|
|
||||||
# v1.1: multiple fixes and tweaks (mainly to avoid having empty files read by the python scripts after the mv command)
|
|
||||||
# v1.0: first version of the script based on a Zellneralex script
|
|
||||||
|
|
||||||
# Usage:
|
|
||||||
# This script was designed to be used with gcode_shell_commands directly from Klipper
|
|
||||||
# Parameters availables:
|
|
||||||
# BELTS - To generate belts diagrams after calling the Klipper TEST_RESONANCES AXIS=1,(-)1 OUTPUT=raw_data
|
|
||||||
# SHAPER - To generate input shaper diagrams after calling the Klipper TEST_RESONANCES AXIS=X/Y OUTPUT=raw_data
|
|
||||||
# VIBRATIONS - To generate vibration diagram after calling the custom (Frix_x#0161) VIBRATIONS_CALIBRATION macro
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import glob
|
|
||||||
import sys
|
|
||||||
import shutil
|
|
||||||
import tarfile
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
#################################################################################################################
|
|
||||||
RESULTS_FOLDER = os.path.expanduser('~/printer_data/config/K-ShakeTune_results')
|
|
||||||
KLIPPER_FOLDER = os.path.expanduser('~/klipper')
|
|
||||||
STORE_RESULTS = 3
|
|
||||||
#################################################################################################################
|
|
||||||
|
|
||||||
from graph_belts import belts_calibration
|
|
||||||
from graph_shaper import shaper_calibration
|
|
||||||
from graph_vibrations import vibrations_calibration
|
|
||||||
|
|
||||||
RESULTS_SUBFOLDERS = ['belts', 'inputshaper', 'vibrations']
|
|
||||||
|
|
||||||
|
|
||||||
def is_file_open(filepath):
|
|
||||||
for proc in os.listdir('/proc'):
|
|
||||||
if proc.isdigit():
|
|
||||||
for fd in glob.glob(f'/proc/{proc}/fd/*'):
|
|
||||||
try:
|
|
||||||
if os.path.samefile(fd, filepath):
|
|
||||||
return True
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_belts_graph():
|
|
||||||
current_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
||||||
lognames = []
|
|
||||||
|
|
||||||
globbed_files = glob.glob('/tmp/raw_data_axis*.csv')
|
|
||||||
if not globbed_files:
|
|
||||||
print("No CSV files found in the /tmp folder to create the belt graphs!")
|
|
||||||
sys.exit(1)
|
|
||||||
if len(globbed_files) < 2:
|
|
||||||
print("Not enough CSV files found in the /tmp folder. Two files are required for the belt graphs!")
|
|
||||||
sys.exit(1)
|
|
||||||
sorted_files = sorted(globbed_files, key=os.path.getmtime, reverse=True)
|
|
||||||
|
|
||||||
for filename in sorted_files[:2]:
|
|
||||||
# Wait for the file handler to be released by Klipper
|
|
||||||
while is_file_open(filename):
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
# Extract the tested belt from the filename and rename/move the CSV file to the result folder
|
|
||||||
belt = os.path.basename(filename).split('_')[3].split('.')[0].upper()
|
|
||||||
new_file = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[0], f'belt_{current_date}_{belt}.csv')
|
|
||||||
shutil.move(filename, new_file)
|
|
||||||
|
|
||||||
# Save the file path for later
|
|
||||||
lognames.append(new_file)
|
|
||||||
|
|
||||||
# Generate the belts graph and its name
|
|
||||||
fig = belts_calibration(lognames, KLIPPER_FOLDER)
|
|
||||||
png_filename = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[0], f'belts_{current_date}.png')
|
|
||||||
|
|
||||||
return fig, png_filename
|
|
||||||
|
|
||||||
|
|
||||||
def get_shaper_graph():
|
|
||||||
current_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
||||||
|
|
||||||
# Get all the files and sort them based on last modified time to select the most recent one
|
|
||||||
globbed_files = glob.glob('/tmp/raw_data*.csv')
|
|
||||||
if not globbed_files:
|
|
||||||
print("No CSV files found in the /tmp folder to create the input shaper graphs!")
|
|
||||||
sys.exit(1)
|
|
||||||
sorted_files = sorted(globbed_files, key=os.path.getmtime, reverse=True)
|
|
||||||
filename = sorted_files[0]
|
|
||||||
|
|
||||||
# Wait for the file handler to be released by Klipper
|
|
||||||
while is_file_open(filename):
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
# Extract the tested axis from the filename and rename/move the CSV file to the result folder
|
|
||||||
axis = os.path.basename(filename).split('_')[3].split('.')[0].upper()
|
|
||||||
new_file = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[1], f'resonances_{current_date}_{axis}.csv')
|
|
||||||
shutil.move(filename, new_file)
|
|
||||||
|
|
||||||
# Generate the shaper graph and its name
|
|
||||||
fig = shaper_calibration([new_file], KLIPPER_FOLDER)
|
|
||||||
png_filename = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[1], f'resonances_{current_date}_{axis}.png')
|
|
||||||
|
|
||||||
return fig, png_filename
|
|
||||||
|
|
||||||
|
|
||||||
def get_vibrations_graph(axis_name):
|
|
||||||
current_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
||||||
lognames = []
|
|
||||||
|
|
||||||
globbed_files = glob.glob('/tmp/adxl345-*.csv')
|
|
||||||
if not globbed_files:
|
|
||||||
print("No CSV files found in the /tmp folder to create the vibration graphs!")
|
|
||||||
sys.exit(1)
|
|
||||||
if len(globbed_files) < 3:
|
|
||||||
print("Not enough CSV files found in the /tmp folder. At least 3 files are required for the vibration graphs!")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
for filename in globbed_files:
|
|
||||||
# Wait for the file handler to be released by Klipper
|
|
||||||
while is_file_open(filename):
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
# Cleanup of the filename and moving it in the result folder
|
|
||||||
cleanfilename = os.path.basename(filename).replace('adxl345', f'vibr_{current_date}')
|
|
||||||
new_file = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[2], cleanfilename)
|
|
||||||
shutil.move(filename, new_file)
|
|
||||||
|
|
||||||
# Save the file path for later
|
|
||||||
lognames.append(new_file)
|
|
||||||
|
|
||||||
# Sync filesystem to avoid problems as there is a lot of file copied
|
|
||||||
os.sync()
|
|
||||||
|
|
||||||
# Generate the vibration graph and its name
|
|
||||||
fig = vibrations_calibration(lognames, KLIPPER_FOLDER, axis_name)
|
|
||||||
png_filename = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[2], f'vibrations_{current_date}_{axis_name}.png')
|
|
||||||
|
|
||||||
# Archive all the csv files in a tarball and remove them to clean up the results folder
|
|
||||||
with tarfile.open(os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[2], f'vibrations_{current_date}_{axis_name}.tar.gz'), 'w:gz') as tar:
|
|
||||||
for csv_file in glob.glob(os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[2], f'vibr_{current_date}*.csv')):
|
|
||||||
tar.add(csv_file, recursive=False)
|
|
||||||
os.remove(csv_file)
|
|
||||||
|
|
||||||
return fig, png_filename
|
|
||||||
|
|
||||||
|
|
||||||
# Utility function to get old files based on their modification time
|
|
||||||
def get_old_files(folder, extension, limit):
|
|
||||||
files = [os.path.join(folder, f) for f in os.listdir(folder) if f.endswith(extension)]
|
|
||||||
files.sort(key=lambda x: os.path.getmtime(x), reverse=True)
|
|
||||||
return files[limit:]
|
|
||||||
|
|
||||||
def clean_files():
|
|
||||||
# Define limits based on STORE_RESULTS
|
|
||||||
keep1 = STORE_RESULTS + 1
|
|
||||||
keep2 = 2 * STORE_RESULTS + 1
|
|
||||||
|
|
||||||
# Find old files in each directory
|
|
||||||
old_belts_files = get_old_files(os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[0]), '.png', keep1)
|
|
||||||
old_inputshaper_files = get_old_files(os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[1]), '.png', keep2)
|
|
||||||
old_vibrations_files = get_old_files(os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[2]), '.png', keep1)
|
|
||||||
|
|
||||||
# Remove the old belt files
|
|
||||||
for old_file in old_belts_files:
|
|
||||||
file_date = "_".join(os.path.splitext(os.path.basename(old_file))[0].split('_')[1:3])
|
|
||||||
for suffix in ['A', 'B']:
|
|
||||||
csv_file = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[0], f'belt_{file_date}_{suffix}.csv')
|
|
||||||
if os.path.exists(csv_file):
|
|
||||||
os.remove(csv_file)
|
|
||||||
os.remove(old_file)
|
|
||||||
|
|
||||||
# Remove the old shaper files
|
|
||||||
for old_file in old_inputshaper_files:
|
|
||||||
csv_file = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[1], os.path.splitext(os.path.basename(old_file))[0] + ".csv")
|
|
||||||
if os.path.exists(csv_file):
|
|
||||||
os.remove(csv_file)
|
|
||||||
os.remove(old_file)
|
|
||||||
|
|
||||||
# Remove the old vibrations files
|
|
||||||
for old_file in old_vibrations_files:
|
|
||||||
os.remove(old_file)
|
|
||||||
tar_file = os.path.join(RESULTS_FOLDER, RESULTS_SUBFOLDERS[2], os.path.splitext(os.path.basename(old_file))[0] + ".tar.gz")
|
|
||||||
if os.path.exists(tar_file):
|
|
||||||
os.remove(tar_file)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Check if results folders are there or create them
|
|
||||||
for result_subfolder in RESULTS_SUBFOLDERS:
|
|
||||||
folder = os.path.join(RESULTS_FOLDER, result_subfolder)
|
|
||||||
if not os.path.exists(folder):
|
|
||||||
os.makedirs(folder)
|
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Usage: plot_graphs.py [SHAPER|BELTS|VIBRATIONS]")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if sys.argv[1].lower() == 'belts':
|
|
||||||
fig, png_filename = get_belts_graph()
|
|
||||||
elif sys.argv[1].lower() == 'shaper':
|
|
||||||
fig, png_filename = get_shaper_graph()
|
|
||||||
elif sys.argv[1].lower() == 'vibrations':
|
|
||||||
fig, png_filename = get_vibrations_graph(axis_name=sys.argv[2])
|
|
||||||
else:
|
|
||||||
print("Usage: plot_graphs.py [SHAPER|BELTS|VIBRATIONS]")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
fig.savefig(png_filename)
|
|
||||||
|
|
||||||
clean_files()
|
|
||||||
print(f"Graphs created. You will find the results in {RESULTS_FOLDER}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
10
K-ShakeTune/shaketune.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# This script is used to run the Shake&Tune Python scripts as a module
|
||||||
|
# from the project root directory using its virtual environment
|
||||||
|
# Usage: ./shaketune.sh <args>
|
||||||
|
|
||||||
|
source ~/klippain_shaketune-env/bin/activate
|
||||||
|
cd ~/klippain_shaketune
|
||||||
|
python -m src.is_workflow "$@"
|
||||||
|
deactivate
|
||||||
6
K-ShakeTune/shaketune_cmd.cfg
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[gcode_shell_command shaketune]
|
||||||
|
command: ~/printer_data/config/K-ShakeTune/shaketune.sh
|
||||||
|
timeout: 600.0
|
||||||
|
verbose: True
|
||||||
|
|
||||||
|
[respond]
|
||||||
47
README.md
@@ -1,6 +1,6 @@
|
|||||||
# Klippain Shake&Tune Module
|
# Klipper Shake&Tune Module
|
||||||
|
|
||||||
This Klippain "Shake&Tune" repository is a standalone module from the [Klippain](https://github.com/Frix-x/klippain) ecosystem, designed to automate and calibrate the input shaper system on your Klipper 3D printer with a streamlined workflow and insightful vizualisations.
|
This "Shake&Tune" repository is a standalone module from the [Klippain](https://github.com/Frix-x/klippain) ecosystem, designed to automate and calibrate the input shaper system on your Klipper 3D printer with a streamlined workflow and insightful vizualisations. This can be installed on any Klipper machine. It is not limited to those using Klippain.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -11,45 +11,36 @@ It operates in two steps:
|
|||||||
2. Relocates the graphs and associated CSV files to your Klipper config folder for easy access via Mainsail/Fluidd to eliminate the need for SSH.
|
2. Relocates the graphs and associated CSV files to your Klipper config folder for easy access via Mainsail/Fluidd to eliminate the need for SSH.
|
||||||
3. Manages the folder by retaining only the most recent results (default setting of keeping the latest three sets).
|
3. Manages the folder by retaining only the most recent results (default setting of keeping the latest three sets).
|
||||||
|
|
||||||
The [detailed documentation is here](./docs/README.md).
|
Check out the **[detailed documentation of the Shake&Tune module here](./docs/README.md)**. You can also look at the documentation for each type of graph by directly clicking on them below to better understand your results and tune your machine!
|
||||||
|
|
||||||
| Belts graphs | Axis graphs | Vibrations measurement |
|
| [Belts graph](./docs/macros/belts_tuning.md) | [Axis input shaper graphs](./docs/macros/axis_tuning.md) | [Vibrations graph](./docs/macros/vibrations_profile.md) |
|
||||||
|:----------------:|:------------:|:---------------------:|
|
|:----------------:|:------------:|:---------------------:|
|
||||||
|  |  |  |
|
| [<img src="./docs/images/belts_example.png">](./docs/macros/belts_tuning.md) | [<img src="./docs/images/axis_example.png">](./docs/macros/axis_tuning.md) | [<img src="./docs/images/vibrations_example.png">](./docs/macros/vibrations_profile.md) |
|
||||||
|
|
||||||
|
> **Note**:
|
||||||
|
>
|
||||||
|
> Be aware that Shake&Tune uses the [Gcode shell command plugin](https://github.com/dw-0/kiauh/blob/master/docs/gcode_shell_command.md) under the hood to call the Python scripts that generate the graphs. While my scripts should be safe, the Gcode shell command plugin also has great potential for abuse if not used carefully for other purposes, since it opens shell access from Klipper.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
For those not using the full [Klippain](https://github.com/Frix-x/klippain), follow these steps to integrate this Shake&Tune module in your setup:
|
Follow these steps to install the Shake&Tune module in your printer:
|
||||||
1. Run the install script over SSH on your printer:
|
1. Be sure to have a working accelerometer on your machine and a `[resonance_tester]` section defined. You can follow the official [Measuring Resonances Klipper documentation](https://www.klipper3d.org/Measuring_Resonances.html) to configure it.
|
||||||
|
1. Install the Shake&Tune package by running over SSH on your printer:
|
||||||
```bash
|
```bash
|
||||||
wget -O - https://raw.githubusercontent.com/Frix-x/klippain-shaketune/main/install.sh | bash
|
wget -O - https://raw.githubusercontent.com/Frix-x/klippain-shaketune/main/install.sh | bash
|
||||||
```
|
```
|
||||||
2. Append the following to your `printer.cfg` file:
|
1. Then, append the following to your `printer.cfg` file and restart Klipper (if prefered, you can include only the needed macros: using `*.cfg` is a convenient way to include them all at once):
|
||||||
```
|
```
|
||||||
[include K-ShakeTune/*.cfg]
|
[include K-ShakeTune/*.cfg]
|
||||||
```
|
```
|
||||||
3. Optionally, if you want to get automatic updates, add the following to your `moonraker.cfg` file:
|
|
||||||
```
|
|
||||||
[update_manager Klippain-ShakeTune]
|
|
||||||
type: git_repo
|
|
||||||
path: ~/klippain_shaketune
|
|
||||||
channel: beta
|
|
||||||
origin: https://github.com/Frix-x/klippain-shaketune.git
|
|
||||||
primary_branch: main
|
|
||||||
managed_services: klipper
|
|
||||||
install_script: install.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Note**:
|
|
||||||
>
|
|
||||||
> If already using my old IS workflow scripts, please remove everything before installing this new module. This include the macros, the Python scripts, the `plot_graph.sh` and the `[gcode_shell_command plot_graph]` section.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
Ensure your machine is homed, then invoke one of the following macros as needed:
|
Ensure your machine is homed, then invoke one of the following macros as needed:
|
||||||
- `BELTS_SHAPER_CALIBRATION` for belt resonance graphs, useful for verifying belt tension and differential belt paths behavior.
|
- `AXES_MAP_CALIBRATION` to automatically find Klipper's `axes_map` parameter for your accelerometer orientation (be careful, this is experimental for now and known to give bad results).
|
||||||
- `AXES_SHAPER_CALIBRATION` for input shaper graphs to mitigate ringing/ghosting by tuning Klipper's input shaper system.
|
- `COMPARE_BELTS_RESPONSES` for a differential belt resonance graph, useful for checking relative belt tensions and belt path behaviors on a CoreXY printer.
|
||||||
- `VIBRATIONS_CALIBRATION` for machine vibration graphs to optimize your slicer speed profiles.
|
- `AXES_SHAPER_CALIBRATION` for standard input shaper graphs, used to mitigate ringing/ghosting by tuning Klipper's input shaper filters.
|
||||||
- `EXCITATE_AXIS_AT_FREQ` to sustain a specific excitation frequency, useful to let you inspect and find out what is resonating.
|
- `CREATE_VIBRATIONS_PROFILE` for vibrations graphs as a function of toolhead direction and speed, used to find problematic ranges where the printer could be exposed to more VFAs and optimize your slicer speed profiles and TMC driver parameters.
|
||||||
|
- `EXCITATE_AXIS_AT_FREQ` to maintain a specific excitation frequency, useful to inspect and find out what is resonating.
|
||||||
|
|
||||||
For further insights on the usage of the macros and the generated graphs, refer to the [K-Shake&Tune module documentation](./docs/README.md).
|
For further insights on the usage of these macros and the generated graphs, refer to the [K-Shake&Tune module documentation](./docs/README.md).
|
||||||
|
|||||||
@@ -1,14 +1,59 @@
|
|||||||
# Klippain Shake&Tune module documentation
|
# Klippain Shake&Tune module documentation
|
||||||
|
|
||||||
### Detailed documentation
|

|
||||||
|
|
||||||
1. [Input Shaping and tuning generalities](./is_tuning_generalities.md)
|
## Resonance testing
|
||||||
1. [Belt graphs](./macros/belts_tuning.md)
|
|
||||||
1. [Axis Input Shaper graphs](./macros/axis_tuning.md)
|
|
||||||
1. [Klippain vibrations graphs](./macros/vibrations_tuning.md)
|
|
||||||
|
|
||||||

|
First, check out the **[input shaping and tuning generalities](./is_tuning_generalities.md)** documentation to understand how it all works and what to look for when taking these measurements.
|
||||||
|
|
||||||
### Complementary ressources
|
Then look at the documentation for each type of graph by clicking on them below tu run the tests and better understand your results to tune your machine!
|
||||||
|
|
||||||
|
| [Belt response comparison](./macros/belts_tuning.md) | [Axis input shaper graphs](./macros/axis_tuning.md) | [Vibrations profile](./macros/vibrations_profile.md) |
|
||||||
|
|:----------------:|:------------:|:---------------------:|
|
||||||
|
| [<img src="./images/belts_example.png">](./macros/belts_tuning.md) | [<img src="./images/axis_example.png">](./macros/axis_tuning.md) | [<img src="./images/vibrations_example.png">](./macros/vibrations_profile.md) |
|
||||||
|
|
||||||
|
|
||||||
|
## Additional macros
|
||||||
|
|
||||||
|
### AXES_MAP_CALIBRATION (experimental)
|
||||||
|
|
||||||
|
All graphs generated by this package show plots based on accelerometer measurements, typically labeled with the X, Y, and Z axes. It's important to note that if the accelerometer is rotated, its axes may not align correctly with the machine axes, making the plots more difficult to interpret, analyze, and understand. The `AXES_MAP_CALIBRATION` is designed to automatically measure the alignement of the accelerometer in order to set it correctly.
|
||||||
|
|
||||||
|
> **Note**:
|
||||||
|
>
|
||||||
|
> This misalignment doesn't affect the measurements because the total sum across all axes is used to set the input shaper filters. It's just an optional but convenient way to configure Klipper's `[adxl345]` (or whichever accelerometer you have) "axes_map" parameter.
|
||||||
|
|
||||||
|
Here are the parameters available when calling this macro:
|
||||||
|
|
||||||
|
| parameters | default value | description |
|
||||||
|
|-----------:|---------------|-------------|
|
||||||
|
|Z_HEIGHT|20|z height to put the toolhead before starting the movements. Be careful, if your accelerometer is mounted under the nozzle, increase it to avoid crashing it on the bed of the machine|
|
||||||
|
|SPEED|80|speed of the toolhead in mm/s for the movements|
|
||||||
|
|ACCEL|1500 (or max printer accel)|accel in mm/s^2 used for all the moves|
|
||||||
|
|TRAVEL_SPEED|120|speed in mm/s used for all the travels moves|
|
||||||
|
|ACCEL_CHIP|"adxl345"|accelerometer chip name in the config|
|
||||||
|
|
||||||
|
The machine will move slightly in +X, +Y, and +Z, and output in the console: `Detected axes_map: -z,y,x`.
|
||||||
|
|
||||||
|
Use this value in your `printer.cfg` config file:
|
||||||
|
```
|
||||||
|
[adxl345] # replace "adxl345" by your correct accelerometer name
|
||||||
|
axes_map: -z,y,x
|
||||||
|
```
|
||||||
|
|
||||||
|
### EXCITATE_AXIS_AT_FREQ
|
||||||
|
|
||||||
|
The `EXCITATE_AXIS_AT_FREQ` macro is particularly useful for troubleshooting mechanical vibrations or resonance issues. This macro allows you to maintain a specific excitation frequency for a set duration, enabling hands-on diagnostics. By touching different components during the excitation, you can identify the source of the vibration, as contact usually stops it.
|
||||||
|
|
||||||
|
Here are the parameters available when calling this macro:
|
||||||
|
|
||||||
|
| parameters | default value | description |
|
||||||
|
|-----------:|---------------|-------------|
|
||||||
|
|FREQUENCY|25|excitation frequency (in Hz) that you want to maintain. Usually, it's the frequency of a peak on one of the graphs|
|
||||||
|
|TIME|10|time in second to maintain this excitation|
|
||||||
|
|AXIS|x|axis you want to excitate. Can be set to either "x", "y", "a", "b"|
|
||||||
|
|
||||||
|
|
||||||
|
## Complementary ressources
|
||||||
|
|
||||||
- [Sineos post](https://klipper.discourse.group/t/interpreting-the-input-shaper-graphs/9879) in the Klipper knowledge base
|
- [Sineos post](https://klipper.discourse.group/t/interpreting-the-input-shaper-graphs/9879) in the Klipper knowledge base
|
||||||
|
|||||||
BIN
docs/banner_long.png
Normal file
|
After Width: | Height: | Size: 740 KiB |
|
Before Width: | Height: | Size: 204 KiB After Width: | Height: | Size: 247 KiB |
BIN
docs/images/belt_graphs/chipcomp_adxl.png
Normal file
|
After Width: | Height: | Size: 365 KiB |
BIN
docs/images/belt_graphs/chipcomp_s2dw.png
Normal file
|
After Width: | Height: | Size: 465 KiB |
|
Before Width: | Height: | Size: 152 KiB After Width: | Height: | Size: 641 KiB |
|
Before Width: | Height: | Size: 196 KiB After Width: | Height: | Size: 637 KiB |
BIN
docs/images/shaper_graphs/chipcomp_adxl.png
Normal file
|
After Width: | Height: | Size: 756 KiB |
BIN
docs/images/shaper_graphs/chipcomp_s2dw.png
Normal file
|
After Width: | Height: | Size: 824 KiB |
BIN
docs/images/shaper_graphs/chipcomp_s2dw_2.png
Normal file
|
After Width: | Height: | Size: 757 KiB |
BIN
docs/images/shaper_graphs/fan_maybeproblematic.png
Normal file
|
After Width: | Height: | Size: 660 KiB |
BIN
docs/images/shaper_graphs/fan_notproblematic.png
Normal file
|
After Width: | Height: | Size: 627 KiB |
BIN
docs/images/shaper_graphs/fan_problematic.png
Normal file
|
After Width: | Height: | Size: 684 KiB |
BIN
docs/images/shaper_graphs/good_x.png
Normal file
|
After Width: | Height: | Size: 756 KiB |
BIN
docs/images/shaper_graphs/good_y.png
Normal file
|
After Width: | Height: | Size: 725 KiB |
|
Before Width: | Height: | Size: 149 KiB |
|
Before Width: | Height: | Size: 2.1 MiB |
|
Before Width: | Height: | Size: 2.2 MiB |
|
Before Width: | Height: | Size: 496 KiB After Width: | Height: | Size: 1.3 MiB |
BIN
docs/images/vibrations_graphs/angular_speed_energy_profile.png
Normal file
|
After Width: | Height: | Size: 98 KiB |
BIN
docs/images/vibrations_graphs/global_speed_energy_profile.png
Normal file
|
After Width: | Height: | Size: 157 KiB |
BIN
docs/images/vibrations_graphs/motor_frequency_profile.png
Normal file
|
After Width: | Height: | Size: 71 KiB |
BIN
docs/images/vibrations_graphs/polar_angle_energy_profile.png
Normal file
|
After Width: | Height: | Size: 230 KiB |
|
Before Width: | Height: | Size: 389 KiB |
BIN
docs/images/vibrations_graphs/vibrations_heatmaps.png
Normal file
|
After Width: | Height: | Size: 656 KiB |
@@ -13,25 +13,29 @@ When a 3D printer moves, the motors apply some force to move the toolhead along
|
|||||||
## Generalities on the graphs
|
## Generalities on the graphs
|
||||||
|
|
||||||
When tuning Input Shaper, keep the following in mind:
|
When tuning Input Shaper, keep the following in mind:
|
||||||
1. **Focus on the shape of the graphs, not the exact numbers**. There could be differences between ADXL boards or even printers, so there is no specific "target" value. This means that you shouldn't expect to get the same graphs between different printers, even if they are similar in term of brand, parts, size and assembly.
|
1. **Focus on the shape of the graphs, not the exact numbers**. There could be differences between accelerometer boards or even printers, so there is no specific "target" value. This means that you shouldn't expect to get the same graphs between different printers, even if they are similar in term of brand, parts, size and assembly.
|
||||||
1. Small differences between consecutive test runs are normal, as ADXL quality and sensitivity is quite variable between boards.
|
1. Small differences between consecutive test runs are normal, as accelerometer quality and sensitivity is quite variable between boards.
|
||||||
1. Perform the tests when the machine is heat-soaked and close to printing conditions, as the temperature will impact the machine components such as belt tension or even the frame that is known to expand a little bit.
|
1. Perform the tests when the machine is heat-soaked and close to printing conditions, as the temperature will impact the machine components such as belt tension or even the frame that is known to expand a little bit.
|
||||||
1. Avoid running the toolhead fans during the tests, as they introduce unnecessary noise to the graphs, making them harder to interpret. This means that even if you should heatsoak the printer, you should also refrain from activating the hotend heater during the test, as it will also trigger the hotend fan. However, as a bad fan usually introduce some vibrations, you can use the test to diagnose an unbalanced fan as seen in the [Examples of Input Shaper graphs](./macros/axis_tuning.md) section.
|
1. Avoid running the toolhead fans during the tests, as they introduce unnecessary noise to the graphs, making them harder to interpret. This means that even if you should heatsoak the printer, you should also refrain from activating the hotend heater during the test, as it will also trigger the hotend fan. However, as a bad fan usually introduce some vibrations, you can use the test to diagnose an unbalanced fan as seen in the [Examples of Input Shaper graphs](./macros/axis_tuning.md) section.
|
||||||
1. Ensure the accuracy of your ADXL measurements by running a `MEASURE_AXES_NOISE` test and checking that the result is below 100 for all axes. If it's not, check your ADXL board and wiring before continuing.
|
1. Ensure the accuracy of your accelerometer measurements by running a `MEASURE_AXES_NOISE` test and checking that the result is below 100 for all axes. If it's not, check your accelerometer board and wiring before continuing.
|
||||||
1. The graphs can only show symptoms of possible problems and in different ways. Those symptoms can sometimes suggest causes, but they rarely pinpoint the exact issues. For example, while you may be able to diagnose that some screws are not tightened properly, you will unlikely find which exact screw is problematic using only these tests. You will most always need to tinker and experiment.
|
1. The graphs can only show symptoms of possible problems and in different ways. Those symptoms can sometimes suggest causes, but they rarely pinpoint the exact issues. For example, while you may be able to diagnose that some screws are not tightened properly, you will unlikely find which exact screw is problematic using only these tests. You will most always need to tinker and experiment.
|
||||||
1. Finally, remember why you're running these tests: to get clean prints. Don't become too obsessive over perfect graphs, as the last bits of optimization will probably have the least impact on the printed parts in terms of ringing and ghosting.
|
1. Finally, remember why you're running these tests: to get clean prints. Don't become too obsessive over perfect graphs, as the last bits of optimization will probably have the least impact on the printed parts in terms of ringing and ghosting.
|
||||||
|
|
||||||
|
|
||||||
### Special note on accelerometer (ADXL) mounting point
|
### Note on accelerometer mounting point
|
||||||
Input Shaping algorithms work by suppressing a single resonant frequency (or a range around a single resonant frequency). When setting the filter, **the primary goal is to target the resonant frequency of the toolhead and belts system** (see the [theory behind it](#theory-behind-it)), as this has the most significant impact on print quality and is the root cause of ringing.
|
Input Shaping algorithms are designed to mitigate resonances by targeting a specific resonant frequency or a range around it. When setting the filter, **the primary goal is to target the resonant frequency of the toolhead and belts system** (see the [theory behind it](#theory-behind-it)), as this has the most significant impact on print quality and is the root cause of ringing.
|
||||||
|
|
||||||
When setting up Input Shaper, it is important to consider the accelerometer mounting point. There are mainly two possibilities, each with its pros and cons:
|
Choosing the accelerometer's mounting point is important. There are currently three mounting strategies, each offering distinct advantages:
|
||||||
|
|
||||||
| Directly at the nozzle tip | Near the toolhead's center of gravity |
|
| Mounting Point | Advantages | Considerations |
|
||||||
| --- | --- |
|
| --- | --- | --- |
|
||||||
| This method provides a more accurate and comprehensive measurement of everything in your machine. It captures the main resonant frequency along with other vibrations and movements, such as toolhead wobbling and printer frame movements. This approach is excellent for diagnosing your machine's kinematics and troubleshooting problems. However, it also leads to noisier graphs, making it harder for the algorithm to select the correct filter for input shaping. Graphs may appear worse, but this is due to the different "point of view" of the printer's behavior. | I personally recommend mounting the accelerometer in this way, as it provides a clear view of the main resonant frequency you want to target, allowing for accurate input shaper filter settings. This approach results in cleaner graphs with less visible noise from other subsystem vibrations, making interpretation easier for both automatic algorithms and users. However, this method provides less detail in the graphs and may be slightly less effective for troubleshooting printer problems. |
|
| **Directly at the nozzle tip** | Provides a comprehensive view of all machine vibrations, including the main resonance, but also toolhead wobbling and global frame movements. Ideal for diagnosing kinematic issues and troubleshooting. | Results in noisier data, which may complicate the final Input Shaping filter selection on machines that are not perfect and/or not fully rigid. |
|
||||||
|
| **Near the toolhead's center of gravity** | Provides a view of mostly only the primary resonant frequencies of the toolhead and belts, allowing precise filter selection for Input Shaping. The data is often cleaner, with only severe mechanical issues or very problematic toolhead wobble visible on the graphs. | May provide less detail on secondary vibrations (which have a fairly minor effect on ringing) and may be less effective in diagnosing unrelated mechanical problems. |
|
||||||
|
| **Integrated accelerometer on a CANBus Board** | Simple and effective, requires no additional installation and always available. Can help for diagnosing issues like those caused by bowden tubes, umbillical coords and cable chains. If toolhead is very rigid, measurements are close enough to those of the center of gravity. | Not accurate for a detailed analysis or diagnosing mechanical issues due to distance from the nozzle tip and potential noise from attached components. |
|
||||||
|
|
||||||
A suggested workflow is to first use the nozzle mount to diagnose mechanical issues, such as loose screws or a bad X carriage. Once the mechanics are in good condition, switch to a mounting point closer to the toolhead's center of gravity for setting the input shaper filter settings by using cleaner graphs that highlights the most impactful frequency.
|
While you should usually try to focus on the toolhead/belts mechanical subsystem for resonance mitigation (since it has the most impact on ringing and print quality), you don't want to overlook the importance of nozzle tip measurements for other sources of vibration. Indeed, if resonance analysis results vary a lot between mounting points, reinforcing the toolhead's rigidity to minimize wobbling and vibrations is recommended. Here is a strategy that attempts to methodically address mechanical issues and then allow for the day-to-day selection of input shaping filters as needed:
|
||||||
|
1. **Diagnosis phase**: Begin with the nozzle tip mount to identify and troubleshoot mechanical issues to ensure the printer components are healthy and the assembly is well done and optimized.
|
||||||
|
1. **Filter selection phase**: If the graphs are mostly clean, you can transition to a mounting point near the toolhead's center of gravity for cleaner data on the main resonance, facilitating accurate Input Shaping filter settings. You can also consider the CANBus integrated accelerometer for its simplicity, especially if the toolhead is particularly rigid and minimally affected by wobble.
|
||||||
|
|
||||||
|
|
||||||
## Theory behind it
|
## Theory behind it
|
||||||
|
|||||||
@@ -11,11 +11,14 @@ Then, call the `AXES_SHAPER_CALIBRATION` macro and look for the graphs in the re
|
|||||||
|
|
||||||
| parameters | default value | description |
|
| parameters | default value | description |
|
||||||
|-----------:|---------------|-------------|
|
|-----------:|---------------|-------------|
|
||||||
|VERBOSE|1|Wether to log things in the console|
|
|
||||||
|FREQ_START|5|Starting excitation frequency|
|
|FREQ_START|5|Starting excitation frequency|
|
||||||
|FREQ_END|133|Maximum excitation frequency|
|
|FREQ_END|133|Maximum excitation frequency|
|
||||||
|HZ_PER_SEC|1|Number of Hz per seconds for the test|
|
|HZ_PER_SEC|1|Number of Hz per seconds for the test|
|
||||||
|AXIS|"all"|Axis you want to test in the list of "all", "X" or "Y"|
|
|AXIS|"all"|Axis you want to test in the list of "all", "X" or "Y"|
|
||||||
|
|SCV|printer square corner velocity|Square corner velocity you want to use to calculate shaper recommendations. Using higher SCV values usually results in more smoothing and lower maximum accelerations|
|
||||||
|
|MAX_SMOOTHING|None|Max smoothing allowed when calculating shaper recommendations|
|
||||||
|
|KEEP_N_RESULTS|3|Total number of results to keep in the result folder after running the test. The older results are automatically cleaned up|
|
||||||
|
|KEEP_CSV|0|Weither or not to keep the CSV data file alonside the PNG graphs|
|
||||||
|
|
||||||
|
|
||||||
## Graphs description
|
## Graphs description
|
||||||
@@ -38,13 +41,13 @@ For setting your Input Shaping filters, rely on the auto-computed values display
|
|||||||
* `MZV` is usually the top pick for well-adjusted machines. It's a good compromise for low remaining vibrations while still allowing pretty good acceleration values. Keep in mind, `MZV` is only recommended by Klipper on good graphs.
|
* `MZV` is usually the top pick for well-adjusted machines. It's a good compromise for low remaining vibrations while still allowing pretty good acceleration values. Keep in mind, `MZV` is only recommended by Klipper on good graphs.
|
||||||
* `EI` can be used as a fallback for challenging graphs. But first, try to fix your mechanical issues before using it: almost every printer should be able to run `MZV` instead.
|
* `EI` can be used as a fallback for challenging graphs. But first, try to fix your mechanical issues before using it: almost every printer should be able to run `MZV` instead.
|
||||||
* `2HUMP_EI` and `3HUMP_EI` are last-resort choices. Usually, they lead to a high level of smoothing in order to suppress the ringing while also using relatively low acceleration values. If they pop up as suggestions, it's likely your machine has underlying mechanical issues (that lead to pretty bad or "wide" graphs).
|
* `2HUMP_EI` and `3HUMP_EI` are last-resort choices. Usually, they lead to a high level of smoothing in order to suppress the ringing while also using relatively low acceleration values. If they pop up as suggestions, it's likely your machine has underlying mechanical issues (that lead to pretty bad or "wide" graphs).
|
||||||
- **Recommended Acceleration** (`accel<=...`): This isn't a standalone figure. It's essential to also consider the `vibr` and `sm` values as it's a compromise between the three. They will give you the percentage of remaining vibrations and the smoothing after Input Shaping, when using the recommended acceleration. Nothing will prevent you from using higher acceleration values; they are not a limit. However, when doing so, Input Shaping may not be able to suppress all the ringing on your parts. Finally, keep in mind that high acceleration values are not useful at all if there is still a high level of remaining vibrations: you should address any mechanical issues first.
|
- **Recommended Acceleration** (`accel<=...`): This isn't a standalone figure. It's essential to also consider the `vibr` and `sm` values as it's a compromise between the three. They will give you the percentage of remaining vibrations and the smoothing after Input Shaping, when using the recommended acceleration. Nothing will prevent you from using higher acceleration values; they are not a limit. However, in this case, Input Shaping may not be able to suppress all the ringing on your parts, and more smoothing will occur. Finally, keep in mind that high acceleration values are not useful at all if there is still a high level of remaining vibrations: you should address any mechanical issues first.
|
||||||
- **The remaining vibrations** (`vibr`): This directly correlates with ringing. It correspond to the total value of the blue "after shaper" signal. Ideally, you want a filter with minimal or zero vibrations.
|
- **The remaining vibrations** (`vibr`): This directly correlates with ringing. It correspond to the total value of the "after shaper" signal. Ideally, you want a filter with minimal remaining vibrations.
|
||||||
- **Shaper recommendations**: This script will give you some tailored recommendations based on your graphs. Pick the one that suit your needs:
|
- **Shaper recommendations**: This script will give you some tailored recommendations based on your graphs. Pick the one that suit your needs:
|
||||||
* The "performance" shaper is Klipper's original suggestion that is good for high acceleration while also sometimes allowing a little bit of remaining vibrations. Use it if your goal is speed printing and you don't care much about some remaining ringing.
|
* The "performance" shaper is Klipper's original suggestion, which is good for high acceleration, but sometimes allows a little residual vibration while minimizing smoothing. Use it if your goal is speed printing and you don't care much about some remaining ringing.
|
||||||
* The "low vibration" shaper aims for the lowest level of remaining vibration to ensure the best print quality with minimal ringing. This should be the best bet for most users.
|
* The "low vibration" shaper aims for the lowest level of remaining vibration to ensure the best print quality with minimal ringing. This should be the best bet for most users.
|
||||||
* Sometimes, only a single recommendation called "best" shaper is presented. This means that either no suitable "low vibration" shaper was found (due to a high level of vibration or with too much smoothing) or because the "performance" shaper is also the one with the lowest vibration level.
|
* Sometimes only a single recommendation is given as the "best" shaper. This means that either no suitable "low vibration" shaper was found (due to a high level of residual vibration or too much smoothing), or that the "performance" shaper is also the one with the lowest vibration level.
|
||||||
- **Damping Ratio**: Displayed at the end, this estimatation is only reliable when the graph shows a distinct, standalone and clean peak. On a well tuned machine, setting the damping ratio (instead of Klipper's 0.1 default value) can further reduce the ringing at high accelerations and with higher square corner velocities.
|
- **Damping Ratio**: Displayed at the end, this is an estimate based on your data that is used to improve the shaper recommendations for your machine. Defining it in the `[input_shaper]` section (instead of Klipper's default value of 0.1) can further reduce ringing at high accelerations and higher square corner velocities.
|
||||||
|
|
||||||
Then, add to your configuration:
|
Then, add to your configuration:
|
||||||
```
|
```
|
||||||
@@ -74,23 +77,23 @@ That said, interpreting Input Shaper graphs isn't an exact science. While we can
|
|||||||
|
|
||||||
### Good graphs
|
### Good graphs
|
||||||
|
|
||||||
These two graphs are considered good and is what you're aiming for. They each display a single, distinct peak that stands out clearly against the background noise. Note that the main frequencies of the X and Y graph peaks differ. This variance is expected and normal, as explained in the last point of the [useful facts and myths debunking](#useful-facts-and-myths-debunking) section.
|
These two graphs are considered good and is what you're aiming for. They each display a single, distinct peak that stands out clearly against the background noise. Note that the main frequencies of the X and Y graph peaks differ. This variance is expected and normal, as explained in the last point of the [useful facts and myths debunking](#useful-facts-and-myths-debunking) section. The spectrogram is clean with only the resonance diagonals. Note that a fan was running during the test, as shown by the purple vertical line (see section [fan behavior](#fan-behavior)).
|
||||||
|
|
||||||
| Good X graph | Good Y graph |
|
| Good X graph | Good Y graph |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
|  |  |
|
|  |  |
|
||||||
|
|
||||||
### Low frequency energy
|
### Low frequency energy
|
||||||
|
|
||||||
These graphs have some low frequency energy (signal near 0 Hz) on a rather low maximum amplitude (around 1e2 or 1e3). This means that there is some binding, rubbing or grinding during movements: basically, something isn't moving freely. Minor low frequency energy in the graphs might be due to a lot of issues such as a faulty idler/bearing or an overly tightened carriage screw that prevent it to move freely on its linear rail, ... However, major low frequency energy suggest more important problems like improper belt routing (the most common), or defective motor, ...
|
These graphs have low frequency (near 0 Hz) at a rather low maximum amplitude (around 1e2 or 1e3) signal. This means that there is some binding, rubbing, or grinding during motion: basically, something isn't moving freely. Minor low frequency energy in the graphs can be due to many problems, such as a faulty idlers/bearing or an over-tightened carriage screw that prevents it from moving freely on its linear rail, a belt running on a bearing flange, ... However, large amounts of low frequency energy indicate more important problems such as improper belt routing (the most common), or defective motor, ...
|
||||||
|
|
||||||
Here's how to troubleshoot the issue:
|
Here's how to troubleshoot the issue:
|
||||||
1. **Belts Examination**:
|
1. **Belts Examination**:
|
||||||
- Ensure your belts are properly routed.
|
- Ensure your belts are properly routed.
|
||||||
- Check for correct alignment of the belts on all bearing flanges during movement (check them during a print).
|
- Check for correct alignment of the belts on all bearing flanges during movement (check them during a print).
|
||||||
- Belt dust is often a sign of misalignment or wear.
|
- Belt dust is often a sign of misalignment or wear.
|
||||||
2. **Toolhead behavior on CoreXY printers**: With motors off and the toolhead centered, gently push the Y-axis front-to-back. The toolhead shouldn't move left or right. If it does, one of the belts might be obstructed and requires inspection to find out the problem.
|
1. **Toolhead behavior on CoreXY printers**: With motors off and the toolhead centered, gently push the Y-axis front-to-back. The toolhead shouldn't move left or right. If it does, one of the belts might be obstructed and requires inspection to find out the problem.
|
||||||
3. **Gantry Squareness**:
|
1. **Gantry Squareness**:
|
||||||
- Ensure your gantry is perfectly parallel and square. You can refer to [Nero3D's de-racking video](https://youtu.be/cOn6u9kXvy0?si=ZCSdWU6br3Y9rGsy) for guidance.
|
- Ensure your gantry is perfectly parallel and square. You can refer to [Nero3D's de-racking video](https://youtu.be/cOn6u9kXvy0?si=ZCSdWU6br3Y9rGsy) for guidance.
|
||||||
- After removing the belts, test the toolhead's movement by hand across all positions. Movement should be smooth with no hard points or areas of resistance.
|
- After removing the belts, test the toolhead's movement by hand across all positions. Movement should be smooth with no hard points or areas of resistance.
|
||||||
|
|
||||||
@@ -102,9 +105,9 @@ Here's how to troubleshoot the issue:
|
|||||||
|
|
||||||
Such graph patterns can arise from various factors, and there isn't a one-size-fits-all solution. To address them:
|
Such graph patterns can arise from various factors, and there isn't a one-size-fits-all solution. To address them:
|
||||||
1. A wobbly table can be the cause. So first thing to do is to try with the printer directly on the floor.
|
1. A wobbly table can be the cause. So first thing to do is to try with the printer directly on the floor.
|
||||||
2. Ensure optimal belt tension using the [`BELTS_SHAPER_CALIBRATION` macro](./belts_tuning.md).
|
1. Ensure optimal belt tension using the [`COMPARE_BELTS_RESPONSES` macro](./belts_tuning.md).
|
||||||
3. If problems persist, it might be due to an improperly squared gantry. For correction, refer to [Nero3D's de-racking video](https://youtu.be/cOn6u9kXvy0?si=ZCSdWU6br3Y9rGsy).
|
1. If problems persist, it might be due to an improperly squared gantry. For correction, refer to [Nero3D's de-racking video](https://youtu.be/cOn6u9kXvy0?si=ZCSdWU6br3Y9rGsy).
|
||||||
4. If it's still there... you will need to find out what is resonating to fix it. You can use the `EXCITATE_AXIS_AT_FREQ` macro to help you find it.
|
1. If it's still there... you will need to find out what is resonating to fix it. You can use the `EXCITATE_AXIS_AT_FREQ` macro to help you find it.
|
||||||
|
|
||||||
| Two peaks | Single wide peak |
|
| Two peaks | Single wide peak |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
@@ -112,7 +115,7 @@ Such graph patterns can arise from various factors, and there isn't a one-size-f
|
|||||||
|
|
||||||
### Problematic CANBUS speed
|
### Problematic CANBUS speed
|
||||||
|
|
||||||
Using CANBUS toolheads with an integrated ADXL chip can sometimes pose challenges if the CANBUS speed is set too low. While users might lower the bus speed to fix Klipper's timing errors, this change will also affect input shaping measurements. An example outcome of a low bus speed is the following graph that, though generally well-shaped, appears jagged and spiky throughout. Additional low-frequency energy might also be present. For optimal ADXL board operation on your CANBUS toolhead, a speed setting of 500k is the minimum, but 1M is advisable.
|
Using CANBUS toolheads with an integrated accelerometer chip can sometimes pose challenges if the CANBUS speed is set too low. While users might lower the bus speed to fix Klipper's timing errors, this change will also affect input shaping measurements. An example outcome of a low bus speed is the following graph that, though generally well-shaped, appears jagged and spiky throughout. Additional low-frequency energy might also be present. For optimal accelerometer board operation on your CANBUS toolhead, a speed setting of 500k is the minimum, but 1M is advisable. You might want to look at [this excellent guide by Esoterical](https://github.com/Esoterical/voron_canbus/tree/main).
|
||||||
|
|
||||||
| CANBUS problem present | CANBUS problem solved |
|
| CANBUS problem present | CANBUS problem solved |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
@@ -120,29 +123,50 @@ Using CANBUS toolheads with an integrated ADXL chip can sometimes pose challenge
|
|||||||
|
|
||||||
### Toolhead or TAP wobble
|
### Toolhead or TAP wobble
|
||||||
|
|
||||||
The [Voron TAP](https://github.com/VoronDesign/Voron-Tap) can introduce anomalies to input shaper graphs, notably on the X graph. Its design with an internal MGN rail introduces a separate and decoupled mass, leading to its own resonance, typically around 125Hz. Combatting this can be pretty challenging, but using premium components and a careful assembly can help mitigate the issue. Ensure you employ a good quality and well-preloaded TAP MGN rail for optimal assembly stiffness, coupled with genuine and strong N52 magnets (avoid lower-quality N35 or N45 substitutes often found on chinese marketplaces). Prioritize careful assembly and consider using the TAP Rev8 version or above.
|
The [Voron TAP](https://github.com/VoronDesign/Voron-Tap) can introduce anomalies to input shaper graphs, notably on the X graph. Its design with an internal MGN rail introduces a separate and decoupled mass, leading to its own resonance, typically around 125Hz.
|
||||||
|
|
||||||
Additionally, without a Voron TAP, small 125hz peaks can sometimes tie back to the toolhead itself. Common culprits include loosely fitted screws or a bad quality X linear MGN axis that can have some play in the carriage, leading to slight toolhead wobbling. This is often represented as a Z component in the graphs.
|
Small 125Hz peaks are also most often due to the toolhead itself, since most toolheads are about the same mass. Common culprits include loose screws or a bad quality X linear MGN axis that can have some play in the carriage, causing the toolhead to wobble slightly. This is often shown as a Z component in the graphs and can be amplified by the bowden tube or an umbilical that applies some forces on top of the toolhead.
|
||||||
|
|
||||||
If your graph shows this kind of anomalies, begin by disassembling the toolhead up to the X carriage. Check for any looseness, then reassemble, ensuring everything is tightened properly for a rigid assembly. Also, don't forget to check your extruder and validate its assembly as well. Finally, ensure you have some filament loaded during measurements to prevent extruder gear vibrations.
|
If your graph shows this kind of anomalies:
|
||||||
|
1. Start by looking at the bowden tube and umbilical to make sure they are not exerting excessive force on the toolhead. You want them to create no drag or as little drag as possible.
|
||||||
|
1. If that's not enough, continue disassembling the toolhead down to the X carriage. Check for any loose or cracked parts, then reassemble, making sure everything is tightened properly for a rigid assembly.
|
||||||
|
1. When using TAP, this can be quite a challenge to combat, but using quality components and careful assembly can help mitigate the problem. In particular, be sure to use a well-preloaded TAP MGN rail for maximum rigidity, coupled with genuine and strong N52 magnets that are properly seated and not loose.
|
||||||
|
1. Don't forget to check your extruder and make sure you have some filament loaded during the measurements to avoid extruder gear vibration.
|
||||||
|
|
||||||
| TAP wobble problem | TAP wobble problem partially mitigated<br/>Or toolhead wobbling |
|
| TAP wobble problem | TAP wobble problem mitigated<br/>Or toolhead wobbling |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
|  |  |
|
|  |  |
|
||||||
|
|
||||||
### Unbalanced fan
|
### Fan behavior
|
||||||
|
|
||||||
The presence of an unbalanced or badly running fan can be directly observed in the graphs. While you should let the toolhead fans off during the final IS tuning, you can use this test to validate their correct behavior: an unbalanced fan usually add some very thin peak around 100-150Hz that disapear when the fan is off. Also please note that an unbalanced fan constant frequency is manifested as a vertical line on the bottom spectrogram.
|
The presence of an unbalanced or poorly running fan can be directly observed in the spectrogram:
|
||||||
|
1. A properly running fan can be seen as a vertical purple line on the spectrogram that doesn't shine too much. This is perfectly normal because it's running at a constant speed (i.e. constant frequency) throughout the test. The purple color means that its vibration energy is quite low and should not cause any problems. There are no corresponding peaks on the top graph.
|
||||||
|
1. When the vertical line on the spectrogram starts to become yellowish, pay special attention to the top graph to see if there is a corresponding peak. In the example from the middle below, the fan is in the limit with a very small bump corresponding to it. So it may or may not cause trouble... Do some test prints and look for VFAs, if you find some you may want to replace the fan.
|
||||||
|
1. If the vertical line is bright orange/yellow, there will most likely be a corresponding thin but high peak on the top graph. This fan is out of balance, producing bad vibrations and needs to be replaced.
|
||||||
|
|
||||||
| Unbalanced fan running | Unbalanced fan off |
|
| Healthy fan running | Fan start to be problematic | Fan need to be changed |
|
||||||
| --- | --- |
|
| --- | --- | --- |
|
||||||
|  |  |
|
|  |  |  |
|
||||||
|
|
||||||
|
### Spectrogram lightshow (LIS2DW)
|
||||||
|
|
||||||
|
The integration of LIS2DW as a resonance measuring device in Klipper is becoming more and more common, especially because some manufacturers are promoting its superiority over the established ADXL345. It's indeed a new generation chip that should be better to measure traditional "accelerations". However, a detailed comparison of their datasheets and practical measurements paints a more complex picture: the LIS2DW boasts greater sensitivity, but it has a lower sampling rate and produce significant aliasing that results in a "lightshow" effect on the spectrogram, characterized by multiple spurious resonance lines parallel to the main resonance, accompanied by intersecting interference lines that distort the harmonic profile.
|
||||||
|
|
||||||
|
While in most cases the overall shape of the upper resonance curve, including resonant frequency and damping ratio, should be close to reality with fairly similar input shaping filter recommendations, this aliasing makes it difficult to identify subtle details and complicates the diagnosis of mechanical problems. In particular, it introduces a potential misinterpretation of "[binding](#low-frequency-energy)" due to a global offset of the curve. In the worst cases (see the last example below), the aliasing is too severe and adds too much noise to the graph, making it unusable.
|
||||||
|
|
||||||
|
> **Note**:
|
||||||
|
>
|
||||||
|
> It seems that some LIS2DW chips are better than others: in some cases aliasing is not a problem, but it can also be very problematic and lead to bad graphs, as seen in the "Extreme Aliasing" example below.
|
||||||
|
|
||||||
|
| ADXL345 measurement | LIS2DW measurement | LIS2DW extreme aliasing |
|
||||||
|
| --- | --- | --- |
|
||||||
|
|  |  |  |
|
||||||
|
|
||||||
### Crazy graphs and miscs
|
### Crazy graphs and miscs
|
||||||
|
|
||||||
The depicted graphs are challenging to analyze due to the overwhelming noise across the spectrum. Such patterns are often associated with an improperly assembled and non-squared mechanical structure. To address this:
|
The depicted graphs are challenging to analyze due to the overwhelming noise across the spectrum. Such patterns are often associated with an improperly assembled and non-squared mechanical structure. To address this:
|
||||||
1. Refer to the [Low frequency energy](#low-frequency-energy) section for troubleshooting steps.
|
1. Refer to the [Low frequency energy](#low-frequency-energy) section for troubleshooting steps.
|
||||||
2. If unresolved, consider disassembling the entire gantry, inspect the printed and mechanical components, and ensure meticulous reassembly. A thorough and careful assembly should help alleviate the issue. Measure again post-assembly for changes.
|
1. If unresolved, consider disassembling the entire gantry, inspect the printed and mechanical components, and ensure meticulous reassembly. A thorough and careful assembly should help alleviate the issue. Measure again post-assembly for changes.
|
||||||
|
|
||||||
Also please note that for this kind of graphs, as they are mainly consisting of noise, Klipper's algorithm recommendations must not be used and will not help with ringing. You will need to fix your mechanical issues instead!
|
Also please note that for this kind of graphs, as they are mainly consisting of noise, Klipper's algorithm recommendations must not be used and will not help with ringing. You will need to fix your mechanical issues instead!
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,21 @@
|
|||||||
# Belt relative difference measurements
|
# Belt relative difference measurements
|
||||||
|
|
||||||
The `BELTS_SHAPER_CALIBRATION` macro is dedicated for CoreXY machines where it can help you to diagnose belt path problems by measuring and plotting the differences between their behavior. It will also help you tension your belts at the same tension.
|
The `COMPARE_BELTS_RESPONSES` macro is dedicated for CoreXY machines where it can help you to diagnose belt path problems by measuring and plotting the differences between their behavior. It will also help you tension your belts at the same tension.
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
**Before starting, ensure that the belts are properly tensioned**. For example, you can follow the [Voron belt tensioning documentation](https://docs.vorondesign.com/tuning/secondary_printer_tuning.html#belt-tension). This is crucial: you need a good starting point to then iterate from it!
|
**Before starting, ensure that the belts are properly tensioned**. For example, you can follow the [Voron belt tensioning documentation](https://docs.vorondesign.com/tuning/secondary_printer_tuning.html#belt-tension). This is crucial: you need a good starting point to then iterate from it!
|
||||||
|
|
||||||
Then, call the `BELTS_SHAPER_CALIBRATION` macro and look for the graphs in the results folder. Here are the parameters available:
|
Then, call the `COMPARE_BELTS_RESPONSES` macro and look for the graphs in the results folder. Here are the parameters available:
|
||||||
|
|
||||||
| parameters | default value | description |
|
| parameters | default value | description |
|
||||||
|-----------:|---------------|-------------|
|
|-----------:|---------------|-------------|
|
||||||
|VERBOSE|1|Wether to log things in the console|
|
|
||||||
|FREQ_START|5|Starting excitation frequency|
|
|FREQ_START|5|Starting excitation frequency|
|
||||||
|FREQ_END|133|Maximum excitation frequency|
|
|FREQ_END|133|Maximum excitation frequency|
|
||||||
|HZ_PER_SEC|1|Number of Hz per seconds for the test|
|
|HZ_PER_SEC|1|Number of Hz per seconds for the test|
|
||||||
|
|KEEP_N_RESULTS|3|Total number of results to keep in the result folder after running the test. The older results are automatically cleaned up|
|
||||||
|
|KEEP_CSV|0|Weither or not to keep the CSV data files alonside the PNG graphs|
|
||||||
|
|
||||||
|
|
||||||
## Graphs description
|
## Graphs description
|
||||||
@@ -59,7 +60,6 @@ The following graphs show the effect of incorrect or uneven belt tension. Rememb
|
|||||||
| The A belt tension is slightly lower than the B belt tension. This can be quickly remedied by tightening the screw only about one-half to one full turn. |  |
|
| The A belt tension is slightly lower than the B belt tension. This can be quickly remedied by tightening the screw only about one-half to one full turn. |  |
|
||||||
| B belt tension is significantly lower than the A belt. If you encounter this graph, I recommend going back to the [Voron belt tensioning documentation](https://docs.vorondesign.com/tuning/secondary_printer_tuning.html#belt-tension) for a more solid base. However, you could slightly increase the B tension and decrease the A tension, but exercise caution to avoid diverging from the recommended 110Hz base. |  |
|
| B belt tension is significantly lower than the A belt. If you encounter this graph, I recommend going back to the [Voron belt tensioning documentation](https://docs.vorondesign.com/tuning/secondary_printer_tuning.html#belt-tension) for a more solid base. However, you could slightly increase the B tension and decrease the A tension, but exercise caution to avoid diverging from the recommended 110Hz base. |  |
|
||||||
|
|
||||||
|
|
||||||
### Belt path problem
|
### Belt path problem
|
||||||
|
|
||||||
If there's an issue within the belt path, aligning and overlaying the curve might be unachievable even with proper belt tension. Begin by verifying that each belt has **the exact same number of teeth**. Then, inspect the belt paths, bearings, any signs of wear (like belt dust), and ensure the belt aligns correctly on all bearing flanges during motion.
|
If there's an issue within the belt path, aligning and overlaying the curve might be unachievable even with proper belt tension. Begin by verifying that each belt has **the exact same number of teeth**. Then, inspect the belt paths, bearings, any signs of wear (like belt dust), and ensure the belt aligns correctly on all bearing flanges during motion.
|
||||||
@@ -69,3 +69,13 @@ If there's an issue within the belt path, aligning and overlaying the curve migh
|
|||||||
| On this chart, there are two peaks. The first pair of peaks seems nearly aligned, but the second peak appears solely on the B belt, significantly deviating from the A belt. This suggests an issue with the belt path, likely with the B belt. |  |
|
| On this chart, there are two peaks. The first pair of peaks seems nearly aligned, but the second peak appears solely on the B belt, significantly deviating from the A belt. This suggests an issue with the belt path, likely with the B belt. |  |
|
||||||
| This chart is quite complex, displaying 3 peaks. While all the pairs seem well-aligned and tension ok, there are more than just two total peaks because `[1]` is split in two smaller peaks. This could be an issue, but it's not certain. It's recommended to generate the [Axis Input Shaper Graphs](./axis_tuning.md) to determine its impact. |  |
|
| This chart is quite complex, displaying 3 peaks. While all the pairs seem well-aligned and tension ok, there are more than just two total peaks because `[1]` is split in two smaller peaks. This could be an issue, but it's not certain. It's recommended to generate the [Axis Input Shaper Graphs](./axis_tuning.md) to determine its impact. |  |
|
||||||
| This graph might indicate too low belt tension, but also potential binding, friction or something impeding the toolhead's smooth movement. Indeed, the signal strength is considerably low (with a peak around 300k, compared to the typical ~1M) and is primarily filled with noise. Start by going back [here](https://docs.vorondesign.com/tuning/secondary_printer_tuning.html#belt-tension) to establish a robust tension foundation. Next, produce the [Axis Input Shaper Graphs](./axis_tuning.md) to identify any binding and address the issue. |  |
|
| This graph might indicate too low belt tension, but also potential binding, friction or something impeding the toolhead's smooth movement. Indeed, the signal strength is considerably low (with a peak around 300k, compared to the typical ~1M) and is primarily filled with noise. Start by going back [here](https://docs.vorondesign.com/tuning/secondary_printer_tuning.html#belt-tension) to establish a robust tension foundation. Next, produce the [Axis Input Shaper Graphs](./axis_tuning.md) to identify any binding and address the issue. |  |
|
||||||
|
|
||||||
|
### Spectrogram lightshow (LIS2DW)
|
||||||
|
|
||||||
|
The integration of LIS2DW as a resonance measuring device in Klipper is becoming more and more common, especially because some manufacturers are promoting its superiority over the established ADXL345. It's indeed a new generation chip that should be better to measure traditional "accelerations". However, a detailed comparison of their datasheets and practical measurements paints a more complex picture: the LIS2DW boasts greater sensitivity, but it has a lower sampling rate and produce significant aliasing that results in a "lightshow" effect on the spectrogram, characterized by multiple spurious resonance lines parallel to the main resonance, accompanied by intersecting interference lines that distort the harmonic profile.
|
||||||
|
|
||||||
|
For the belt graph, this can be problematic because it can introduce a lot of noise into the results and make them difficult to interpret, and it will probably tell you that there is a mechanical problem when there isn't.
|
||||||
|
|
||||||
|
| ADXL345 measurement | LIS2DW measurement |
|
||||||
|
| --- | --- |
|
||||||
|
|  |  |
|
||||||
|
|||||||
99
docs/macros/vibrations_profile.md
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# Machine vibrations profiles
|
||||||
|
|
||||||
|
The `CREATE_VIBRATIONS_PROFILE` macro analyzes accelerometer data to plot the vibration profile of your 3D printer. The resulting graphs highlight optimal print speeds and angles that produce the least amount of vibration. It provides a technical basis for adjustments in your slicer profiles, but also in hardware setup and TMC driver parameters to improve print quality and reduce VFAs (vertical fines artifacts).
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> You will need to calibrate the standard input shaper algorithms of Klipper using the other macros first! This test should be used as a last step to calibrate your printer with Shake&Tune.
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Call the `CREATE_VIBRATIONS_PROFILE` macro with the speed range you want to measure. Here are the parameters available:
|
||||||
|
|
||||||
|
| parameters | default value | description |
|
||||||
|
|-----------:|---------------|-------------|
|
||||||
|
|SIZE|100|maximum size in mm of the circle in which the recorded movements take place|
|
||||||
|
|Z_HEIGHT|20|z height to put the toolhead before starting the movements. Be careful, if your accelerometer is mounted under the nozzle, increase it to avoid crashing it on the bed of the machine|
|
||||||
|
|ACCEL|3000 (or max printer accel)|accel in mm/s^2 used for all moves. Try to keep it relatively low to avoid dynamic effects that alter the measurements, but high enough to achieve a constant speed for >~70% of the segments. 3000 is a reasonable default for most printers, unless you want to record at very high speed, in which case you will want to increase SIZE and decrease ACCEL a bit.|
|
||||||
|
|MAX_SPEED|200|maximum speed of the toolhead in mm/s to record for analysis|
|
||||||
|
|SPEED_INCREMENT|2|toolhead speed increments in mm/s between each movement|
|
||||||
|
|TRAVEL_SPEED|200|speed in mm/s used for all the travels moves|
|
||||||
|
|ACCEL_CHIP|"adxl345"|accelerometer chip name in the config|
|
||||||
|
|KEEP_N_RESULTS|3|Total number of results to keep in the result folder after running the test. The older results are automatically cleaned up|
|
||||||
|
|KEEP_CSV|0|Weither or not to keep the CSV data files alonside the PNG graphs (archived in a tarball)|
|
||||||
|
|
||||||
|
|
||||||
|
## Graphs description
|
||||||
|
|
||||||
|
The `CREATE_VIBRATIONS_PROFILE` macro results are constituted of a set of 6 plots. At the top of the figure you can also see all the detected motor, current and TMC driver parameters. These notes are just for reference in case you want to tinker with them and don't forget what you changed between each run of the macro.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### Global Speed Energy Profile
|
||||||
|
|
||||||
|
| Example | description |
|
||||||
|
|:-----|-------------|
|
||||||
|
||This plot shows the relationship between toolhead speed (mm/s) and vibrational energy, providing a global view of how speed impacts vibration across all movements. By using speeds from the green zones, your printer will run more smoothly and you will minimize vibrations and related fine artifacts in prints|
|
||||||
|
|
||||||
|
This graph is the most important one of this tool. You want to use it to adapt your slicer profile, especially by looking at the "vibration metric" curve, that will helps you find which speeds can be problematic for your printer. Here's the magic behind it, broken down into two key parts:
|
||||||
|
1. **Spectrum Variance**: This is like the mood ring of your printer, showing how the vibes (a.k.a vibrations) change when printing from different angles. If the "vibration metric" is low, it means your printer is keeping its cool, staying consistent no matter the angle. But if it spikes, it's a sign that some angles are making your printer jitter more than a caffeinated squirrel. *Imagine it like this: You're looking for a chill party vibe where the music's good at every angle, not one where you turn a corner and suddenly it's too loud or too soft.*
|
||||||
|
2. **Spectrum Max**: This one's about the max volume of the party, or how loud the strongest vibration is across all angles at any speed. We're aiming to avoid the speeds that crank up the volume too high, causing a resonance rave in the motors. *Think of it this way: You don't want the base so high that it feels like your heart's going to beat out of your chest. We're looking for a nice background level where everyone can chat and have a good time.*
|
||||||
|
|
||||||
|
And why do we care so much about finding these speeds? Because during a print, the toolhead will move in all directions depending on the geometry, and we want a speed that's like a good friend, reliable no matter what the situation. Fortunately, since the motors in our printers share their vibes without non-linear mixing and just add up (think of it as each doing its own dance without bumping into each other), we can find those happy green zones on the graph: these are the speeds that keep the vibe cool and the energy just right, making them perfect for all your print jobs.
|
||||||
|
|
||||||
|
### Polar Angle Energy Profile
|
||||||
|
|
||||||
|
| Example | description |
|
||||||
|
|:-----|-------------|
|
||||||
|
||Shows how vibrational energy varies with the direction where the toolhead is running. It helps in identifying angles that produce less vibration, and potentially detect assymetries in the belt paths for a CoreXY printer|
|
||||||
|
|
||||||
|
This plot is like your go-to playlist for finding those angles where the vibe is just right. But here's the thing: when printing, your toolhead will groove in all directions and angles, depending on the geometry of your parts, so sticking to just one angle isn't possible. My tip to make the most of this chart for your prints: if you're working on something rectangular, try to align it so that most of the edges match the angles that's least likely to make your printer jitter. For those sleek CoreXY printers, aiming for 45/135 degrees is usually a hit, while the trusty Cartesian printers groove best at 0/90 degrees. And for everything else? Well, there's not much more to do here except rely on the [Global Speed Energy Profile chart](#global-speed-energy-profile) to tune your slicer profile speeds instead.
|
||||||
|
|
||||||
|
Now, onto the symmetry indicator. Think of this tool as the dance coach for your printer, especially designed for those with a symmetrical setup like CoreXY models. It's all about using some pretty neat math (cross-correlation, to be exact) to check out the vibes from both sides of the dance floor. Picture it as a top-notch party dancer, scanning the room at every angle, judging each dancer, and only giving top marks when everyone is perfectly in sync. This tool is ace at catching any sneakiness in your motor control or belt path, highlighting any "butterfly" shapes or even the slightest variations in the motors' resonance patterns. It's like having a magnifying glass that points out exactly where the party fouls are, helping you to fix them and keep your prints rolling out smooth and stunning.
|
||||||
|
|
||||||
|
### Angular Speed Energy Profiles
|
||||||
|
|
||||||
|
| Example | description |
|
||||||
|
|:-----|-------------|
|
||||||
|
||Provides a detailed view of how energy distribution changes with speed for specific angles. It's useful for fine-tuning speeds for different directions of motion, or for tracking and diagnosing your printer's behavior across the major axes|
|
||||||
|
|
||||||
|
This chart is like a snapshot, capturing the vibe at certain angles of your printing party. But remember, it's just a glimpse into a few specific angles and doesn't fully reveal the whole dance floor where the toolhead moves in every direction, vibing with the unique geometry of your parts. So, think of it as a way to peek into how everyone's grooving in each corner of the party. It's great for a quick check-up to see how the vibe is holding up, but when it comes to setting the rhythm of your slicer speeds, you're going to want to use the [Global Speed Energy Profile chart](#global-speed-energy-profile) instead.
|
||||||
|
|
||||||
|
### Vibrations Heatmaps
|
||||||
|
|
||||||
|
| Example | description |
|
||||||
|
|:-----|-------------|
|
||||||
|
||Both plots provides a comprehensive overview of vibrational energy across speeds and angles. It visually identifies zones of high and low energy, aiding in the comprehensive understanding of the printer motors behavior. It's what is captured by the accelerometer and the base of all the other plots|
|
||||||
|
|
||||||
|
Both heatmaps lay down the vibe of vibrational energy across all speeds and angles, painting a picture of how the beat spreads throughout your printer's dance floor. The polar heatmap gives you a 360-degree whirl of the action, while the regular one lays it out in a classic 2D groove, yet both are vibing to the same tune and showing you where the energy's hot and popping and where it's cool and mellow across your printer's operational range. Think of it as the unique fingerprint of your motor's behavior captured by the accelerometer, it's the raw rhythm of your printer in action.
|
||||||
|
|
||||||
|
Because the scale is both normalized and logarithmic, you're looking for a heatmap (or spectrogram) that has a cool, consistent "orangish" vibe throughout, signaling not so much change over the spectrum with fairly low motor resonances. See areas in your heatmap that swing from deep purple/black to bright white/yellow? That's a sign that your printer motors are hitting high resonances at certain angles and speed combinations that are above the baseline vibrations outside of those areas. But remember, this is just the lay of the land, a snapshot of the scene: tweaking this vibe directly may not be easy, but you can still [play around with the TMC driver parameters](#improving-the-results) to adjust the beats and find a smoother rhythm.
|
||||||
|
|
||||||
|
### Motor Frequency Profile
|
||||||
|
|
||||||
|
| Example | description |
|
||||||
|
|:-----|-------------|
|
||||||
|
||Identifies the resonant frequencies of the motors and their damping ratios. Informative for now, but will be used later|
|
||||||
|
|
||||||
|
For now, this graph is purely informational and is a measurement of the motor's natural resonance profile. Think of this plot as a sneak peek at the inner workings of your printer's dance floor. It's not quite ready to hit the main stage for practical use, but just you wait... Keep an eye on this chart as it hints at future remixes where you'll get to play DJ and tweak and tune your printer's performance like never before.
|
||||||
|
|
||||||
|
|
||||||
|
## Improving the results
|
||||||
|
|
||||||
|
These graphs essentially depict the behavior of the motor control on your machine. While there isn't much room for easy adjustments to enhance them, most of you should only utilize them to configure your slicer profile to avoid problematic speeds.
|
||||||
|
|
||||||
|
However, if you want to go the rabbit hole, as the data in these graphs largely hinges on the type of motors, their physical characteristic and the way they are controlled by the TMC drivers black magic, there are opportunities for optimization. Tweaking TMC parameters allow to adjust the peaks, enhance machine performance, or diminish overall machine noise. For this process, I recommend to directly use the [Klipper TMC Autotune](https://github.com/andrewmcgr/klipper_tmc_autotune) plugin, which should simplify everything considerably. But keep in mind that it's still an experimental plugin and it's not perfect.
|
||||||
|
|
||||||
|
For individuals inclined to reach the bottom of the rabbit hole and that want to handle this manually, the use of an oscilloscope is mandatory. Majority of the necessary resources are available directly on the Trinamics TMC website:
|
||||||
|
1. You should first consult the datasheet specific to your TMC model for guidance on parameter names and their respective uses.
|
||||||
|
2. Then to tune the parameters, have a look at the application notes available on their platform, especially [AN001](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN001-SpreadCycle.pdf), [AN002](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN002-StallGuard2.pdf), [AN003](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN003_-_DcStep_Basics_and_Wizard.pdf) and [AN009](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN009_Tuning_coolStep.pdf).
|
||||||
|
3. For a more comprehensive understanding, you might also want to explore [AN015](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN015-StealthChop_Performance.pdf) and [AN021](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN021-StealthChop_Performance_comparison_V1.12.pdf ), although they are more geared towards enhancing comprehension than calibration, akin to the TMC datasheet.
|
||||||
|
|
||||||
|
For reference, the default settings used in Klipper are:
|
||||||
|
```
|
||||||
|
#driver_TBL: 2
|
||||||
|
#driver_TOFF: 3
|
||||||
|
#driver_HEND: 0
|
||||||
|
#driver_HSTRT: 5
|
||||||
|
```
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
# Vibrations measurements
|
|
||||||
|
|
||||||
The `VIBRATIONS_CALIBRATION` macro helps you to identify the speed settings that exacerbate the vibrations of the machine (ie. where the frame and motors resonate badly). This will help you to find the clean speed ranges where the machine is more silent and less prone to vertical fine artifacts on the prints.
|
|
||||||
|
|
||||||
> **Warning**
|
|
||||||
>
|
|
||||||
> You will first need to calibrate the standard input shaper algorithm of Klipper using the other macros! This test should not be used before as it would be useless and the results invalid.
|
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
Call the `VIBRATIONS_CALIBRATION` macro with the direction and speed range you want to measure. Here are the parameters available:
|
|
||||||
|
|
||||||
| parameters | default value | description |
|
|
||||||
|-----------:|---------------|-------------|
|
|
||||||
|SIZE|60|size in mm of the area where the movements are done|
|
|
||||||
|DIRECTION|"XY"|direction vector where you want to do the measurements. Can be set to either "XY", "AB", "ABXY", "A", "B", "X", "Y", "Z", "E"|
|
|
||||||
|Z_HEIGHT|20|z height to put the toolhead before starting the movements. Be careful, if your ADXL is under the nozzle, increase it to avoid a crash of the ADXL on the bed of the machine|
|
|
||||||
|VERBOSE|1|Wether to log the current speed in the console|
|
|
||||||
|MIN_SPEED|20|minimum speed of the toolhead in mm/s for the movements|
|
|
||||||
|MAX_SPEED|200|maximum speed of the toolhead in mm/s for the movements|
|
|
||||||
|SPEED_INCREMENT|2|speed increments of the toolhead in mm/s between every movements|
|
|
||||||
|TRAVEL_SPEED|200|speed in mm/s used for all the travels moves|
|
|
||||||
|ACCEL_CHIP|"adxl345"|accelerometer chip name in the config|
|
|
||||||
|
|
||||||
|
|
||||||
## Graphs description
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Improving the results
|
|
||||||
|
|
||||||
These graphs essentially depict the behavior of the motor control on your machine. While there isn't much room for easy adjustments to enhance them, most of you should only utilize them to configure your slicer profile to avoid problematic speeds.
|
|
||||||
|
|
||||||
However, if you want to go the rabbit hole, as the data in these graphs largely hinges on the type of motors and their physical characteristic and their control by the TMC black magic, there are opportunities for optimization. Tweaking TMC parameters allow to adjust the peaks, enhance machine performance, or diminish overall machine noise. For this process, I recommend to directly use the [Klipper TMC Autotune](https://github.com/andrewmcgr/klipper_tmc_autotune) plugin, which should simplify everything considerably. But keep in mind that it's still an experimental plugin and it's not perfect.
|
|
||||||
|
|
||||||
For individuals inclined to reach the bottom of the rabbit hole and that want to handle this manually, the use of an oscilloscope is mandatory. Majority of the necessary resources are available directly on the Trinamics TMC website:
|
|
||||||
1. You should first consult the datasheet specific to your TMC model for guidance on parameter names and their respective uses.
|
|
||||||
2. Then to tune the parameters, have a look at the application notes available on their platform, especially [AN001](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN001-SpreadCycle.pdf), [AN002](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN002-StallGuard2.pdf), [AN003](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN003_-_DcStep_Basics_and_Wizard.pdf) and [AN009](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN009_Tuning_coolStep.pdf).
|
|
||||||
3. For a more comprehensive understanding, you might also want to explore [AN015](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN015-StealthChop_Performance.pdf) and [AN021](https://www.trinamic.com/fileadmin/assets/Support/AppNotes/AN021-StealthChop_Performance_comparison_V1.12.pdf ), although they are more geared towards enhancing comprehension than calibration, akin to the TMC datasheet.
|
|
||||||
|
|
||||||
For reference, the default settings used in Klipper are:
|
|
||||||
```
|
|
||||||
#driver_TBL: 2
|
|
||||||
#driver_TOFF: 3
|
|
||||||
#driver_HEND: 0
|
|
||||||
#driver_HSTRT: 5
|
|
||||||
```
|
|
||||||
75
install.sh
@@ -1,8 +1,11 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
USER_CONFIG_PATH="${HOME}/printer_data/config"
|
USER_CONFIG_PATH="${HOME}/printer_data/config"
|
||||||
|
MOONRAKER_CONFIG="${HOME}/printer_data/config/moonraker.conf"
|
||||||
KLIPPER_PATH="${HOME}/klipper"
|
KLIPPER_PATH="${HOME}/klipper"
|
||||||
|
|
||||||
K_SHAKETUNE_PATH="${HOME}/klippain_shaketune"
|
K_SHAKETUNE_PATH="${HOME}/klippain_shaketune"
|
||||||
|
K_SHAKETUNE_VENV_PATH="${HOME}/klippain_shaketune-env"
|
||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
export LC_ALL=C
|
export LC_ALL=C
|
||||||
@@ -14,6 +17,11 @@ function preflight_checks {
|
|||||||
exit -1
|
exit -1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if ! command -v python3 &> /dev/null; then
|
||||||
|
echo "[ERROR] Python 3 is not installed. Please install Python 3 to use the Shake&Tune module!"
|
||||||
|
exit -1
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "$(sudo systemctl list-units --full -all -t service --no-legend | grep -F 'klipper.service')" ]; then
|
if [ "$(sudo systemctl list-units --full -all -t service --no-legend | grep -F 'klipper.service')" ]; then
|
||||||
printf "[PRE-CHECK] Klipper service found! Continuing...\n\n"
|
printf "[PRE-CHECK] Klipper service found! Continuing...\n\n"
|
||||||
else
|
else
|
||||||
@@ -21,11 +29,30 @@ function preflight_checks {
|
|||||||
exit -1
|
exit -1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -d "${HOME}/klippain_config" ]; then
|
install_package_requirements
|
||||||
if [ -f "${USER_CONFIG_PATH}/.VERSION" ]; then
|
}
|
||||||
echo "[ERROR] Klippain full installation found! Nothing is needed in order to use the K-Shake&Tune module!"
|
|
||||||
exit -1
|
# Function to check if a package is installed
|
||||||
|
function is_package_installed {
|
||||||
|
dpkg -s "$1" &> /dev/null
|
||||||
|
return $?
|
||||||
|
}
|
||||||
|
|
||||||
|
function install_package_requirements {
|
||||||
|
packages=("python3-venv" "libopenblas-dev" "libatlas-base-dev")
|
||||||
|
packages_to_install=""
|
||||||
|
|
||||||
|
for package in "${packages[@]}"; do
|
||||||
|
if is_package_installed "$package"; then
|
||||||
|
echo "$package is already installed"
|
||||||
|
else
|
||||||
|
packages_to_install="$packages_to_install $package"
|
||||||
fi
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -n "$packages_to_install" ]; then
|
||||||
|
echo "Installing missing packages: $packages_to_install"
|
||||||
|
sudo apt update && sudo apt install -y $packages_to_install
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -48,9 +75,31 @@ function check_download {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function setup_venv {
|
||||||
|
if [ ! -d "${K_SHAKETUNE_VENV_PATH}" ]; then
|
||||||
|
echo "[SETUP] Creating Python virtual environment..."
|
||||||
|
python3 -m venv "${K_SHAKETUNE_VENV_PATH}"
|
||||||
|
else
|
||||||
|
echo "[SETUP] Virtual environment already exists. Continuing..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
source "${K_SHAKETUNE_VENV_PATH}/bin/activate"
|
||||||
|
echo "[SETUP] Installing/Updating K-Shake&Tune dependencies..."
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install -r "${K_SHAKETUNE_PATH}/requirements.txt"
|
||||||
|
deactivate
|
||||||
|
printf "\n"
|
||||||
|
}
|
||||||
|
|
||||||
function link_extension {
|
function link_extension {
|
||||||
echo "[INSTALL] Linking scripts to your config directory..."
|
echo "[INSTALL] Linking scripts to your config directory..."
|
||||||
ln -frsn ${K_SHAKETUNE_PATH}/K-ShakeTune ${USER_CONFIG_PATH}/K-ShakeTune
|
|
||||||
|
if [ -d "${HOME}/klippain_config" ] && [ -f "${USER_CONFIG_PATH}/.VERSION" ]; then
|
||||||
|
echo "[INSTALL] Klippain full installation found! Linking module to the script folder of Klippain"
|
||||||
|
ln -frsn ${K_SHAKETUNE_PATH}/K-ShakeTune ${USER_CONFIG_PATH}/scripts/K-ShakeTune
|
||||||
|
else
|
||||||
|
ln -frsn ${K_SHAKETUNE_PATH}/K-ShakeTune ${USER_CONFIG_PATH}/K-ShakeTune
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function link_gcodeshellcommandpy {
|
function link_gcodeshellcommandpy {
|
||||||
@@ -62,11 +111,24 @@ function link_gcodeshellcommandpy {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function add_updater {
|
||||||
|
update_section=$(grep -c '\[update_manager[a-z ]* Klippain-ShakeTune\]' $MOONRAKER_CONFIG || true)
|
||||||
|
if [ "$update_section" -eq 0 ]; then
|
||||||
|
echo -n "[INSTALL] Adding update manager to moonraker.conf..."
|
||||||
|
cat ${K_SHAKETUNE_PATH}/moonraker.conf >> $MOONRAKER_CONFIG
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function restart_klipper {
|
function restart_klipper {
|
||||||
echo "[POST-INSTALL] Restarting Klipper..."
|
echo "[POST-INSTALL] Restarting Klipper..."
|
||||||
sudo systemctl restart klipper
|
sudo systemctl restart klipper
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function restart_moonraker {
|
||||||
|
echo "[POST-INSTALL] Restarting Moonraker..."
|
||||||
|
sudo systemctl restart moonraker
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
printf "\n=============================================\n"
|
printf "\n=============================================\n"
|
||||||
echo "- Klippain Shake&Tune module install script -"
|
echo "- Klippain Shake&Tune module install script -"
|
||||||
@@ -76,6 +138,9 @@ printf "=============================================\n\n"
|
|||||||
# Run steps
|
# Run steps
|
||||||
preflight_checks
|
preflight_checks
|
||||||
check_download
|
check_download
|
||||||
|
setup_venv
|
||||||
link_extension
|
link_extension
|
||||||
|
add_updater
|
||||||
link_gcodeshellcommandpy
|
link_gcodeshellcommandpy
|
||||||
restart_klipper
|
restart_klipper
|
||||||
|
restart_moonraker
|
||||||
|
|||||||
11
moonraker.conf
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
## Klippain Shake&Tune automatic update management
|
||||||
|
[update_manager Klippain-ShakeTune]
|
||||||
|
type: git_repo
|
||||||
|
origin: https://github.com/Frix-x/klippain-shaketune.git
|
||||||
|
path: ~/klippain_shaketune
|
||||||
|
virtualenv: ~/klippain_shaketune-env
|
||||||
|
requirements: requirements.txt
|
||||||
|
system_dependencies: system-dependencies.json
|
||||||
|
primary_branch: main
|
||||||
|
managed_services: klipper
|
||||||
29
pyproject.toml
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[project]
|
||||||
|
name = "Shake&Tune"
|
||||||
|
description = "Klipper streamlined input shaper workflow and calibration tools"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">= 3.9"
|
||||||
|
authors = [
|
||||||
|
{name = "Félix Boisselier", email = "felix@fboisselier.fr"}
|
||||||
|
]
|
||||||
|
keywords = ["klipper", "input shaper", "calibration", "3d printer"]
|
||||||
|
license = {file = "LICENSE"}
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Repository = "https://github.com/Frix-x/klippain-shaketune"
|
||||||
|
Documentation = "https://github.com/Frix-x/klippain-shaketune/tree/main/docs"
|
||||||
|
Issues = "https://github.com/Frix-x/klippain-shaketune/issues"
|
||||||
|
Changelog = "https://github.com/Frix-x/klippain-shaketune/releases"
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 120 # We all have modern screens now and I believe this should be brought in line with current technology
|
||||||
|
indent-width = 4
|
||||||
|
target-version = "py39"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = ["E4", "E7", "E9", "F", "B"]
|
||||||
|
unfixable = ["B"]
|
||||||
|
|
||||||
|
[tool.ruff.format]
|
||||||
|
quote-style = "single"
|
||||||
|
skip-magic-trailing-comma = false
|
||||||
4
requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
GitPython==3.1.40
|
||||||
|
matplotlib==3.8.2
|
||||||
|
numpy==1.26.2
|
||||||
|
scipy==1.11.4
|
||||||
0
src/graph_creators/__init.py__
Normal file
154
src/graph_creators/analyze_axesmap.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
######################################
|
||||||
|
###### AXE_MAP DETECTION SCRIPT ######
|
||||||
|
######################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from scipy.signal import butter, filtfilt
|
||||||
|
|
||||||
|
from ..helpers.locale_utils import print_with_c_locale
|
||||||
|
|
||||||
|
NUM_POINTS = 500
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Computation
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def accel_signal_filter(data, cutoff=2, fs=100, order=5):
|
||||||
|
nyq = 0.5 * fs
|
||||||
|
normal_cutoff = cutoff / nyq
|
||||||
|
b, a = butter(order, normal_cutoff, btype='low', analog=False)
|
||||||
|
filtered_data = filtfilt(b, a, data)
|
||||||
|
filtered_data -= np.mean(filtered_data)
|
||||||
|
return filtered_data
|
||||||
|
|
||||||
|
|
||||||
|
def find_first_spike(data):
|
||||||
|
min_index, max_index = np.argmin(data), np.argmax(data)
|
||||||
|
return ('-', min_index) if min_index < max_index else ('', max_index)
|
||||||
|
|
||||||
|
|
||||||
|
def get_movement_vector(data, start_idx, end_idx):
|
||||||
|
if start_idx < end_idx:
|
||||||
|
vector = []
|
||||||
|
for i in range(3):
|
||||||
|
vector.append(np.mean(data[i][start_idx:end_idx], axis=0))
|
||||||
|
return vector
|
||||||
|
else:
|
||||||
|
return np.zeros(3)
|
||||||
|
|
||||||
|
|
||||||
|
def angle_between(v1, v2):
|
||||||
|
v1_u = v1 / np.linalg.norm(v1)
|
||||||
|
v2_u = v2 / np.linalg.norm(v2)
|
||||||
|
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
|
||||||
|
|
||||||
|
|
||||||
|
def compute_errors(filtered_data, spikes_sorted, accel_value, num_points):
|
||||||
|
# Get the movement start points in the correct order from the sorted bag of spikes
|
||||||
|
movement_starts = [spike[0][1] for spike in spikes_sorted]
|
||||||
|
|
||||||
|
# Theoretical unit vectors for X, Y, Z printer axes
|
||||||
|
printer_axes = {'x': np.array([1, 0, 0]), 'y': np.array([0, 1, 0]), 'z': np.array([0, 0, 1])}
|
||||||
|
|
||||||
|
alignment_errors = {}
|
||||||
|
sensitivity_errors = {}
|
||||||
|
for i, axis in enumerate(['x', 'y', 'z']):
|
||||||
|
movement_start = movement_starts[i]
|
||||||
|
movement_end = movement_start + num_points
|
||||||
|
movement_vector = get_movement_vector(filtered_data, movement_start, movement_end)
|
||||||
|
alignment_errors[axis] = angle_between(movement_vector, printer_axes[axis])
|
||||||
|
|
||||||
|
measured_accel_magnitude = np.linalg.norm(movement_vector)
|
||||||
|
if accel_value != 0:
|
||||||
|
sensitivity_errors[axis] = abs(measured_accel_magnitude - accel_value) / accel_value * 100
|
||||||
|
else:
|
||||||
|
sensitivity_errors[axis] = None
|
||||||
|
|
||||||
|
return alignment_errors, sensitivity_errors
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Startup and main routines
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def parse_log(logname):
|
||||||
|
with open(logname) as f:
|
||||||
|
for header in f:
|
||||||
|
if not header.startswith('#'):
|
||||||
|
break
|
||||||
|
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
||||||
|
# Raw accelerometer data
|
||||||
|
return np.loadtxt(logname, comments='#', delimiter=',')
|
||||||
|
# Power spectral density data or shaper calibration data
|
||||||
|
raise ValueError(
|
||||||
|
'File %s does not contain raw accelerometer data and therefore '
|
||||||
|
'is not supported by this script. Please use the official Klipper '
|
||||||
|
'calibrate_shaper.py script to process it instead.' % (logname,)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def axesmap_calibration(lognames, accel=None):
|
||||||
|
# Parse the raw data and get them ready for analysis
|
||||||
|
raw_datas = [parse_log(filename) for filename in lognames]
|
||||||
|
if len(raw_datas) > 1:
|
||||||
|
raise ValueError('Analysis of multiple CSV files at once is not possible with this script')
|
||||||
|
|
||||||
|
filtered_data = [accel_signal_filter(raw_datas[0][:, i + 1]) for i in range(3)]
|
||||||
|
spikes = [find_first_spike(filtered_data[i]) for i in range(3)]
|
||||||
|
spikes_sorted = sorted([(spikes[0], 'x'), (spikes[1], 'y'), (spikes[2], 'z')], key=lambda x: x[0][1])
|
||||||
|
|
||||||
|
# Using the previous variables to get the axes_map and errors
|
||||||
|
axes_map = ','.join([f'{spike[0][0]}{spike[1]}' for spike in spikes_sorted])
|
||||||
|
# alignment_error, sensitivity_error = compute_errors(filtered_data, spikes_sorted, accel, NUM_POINTS)
|
||||||
|
|
||||||
|
results = f'Detected axes_map:\n {axes_map}\n'
|
||||||
|
|
||||||
|
# TODO: work on this function that is currently not giving good results...
|
||||||
|
# results += "Accelerometer angle deviation:\n"
|
||||||
|
# for axis, angle in alignment_error.items():
|
||||||
|
# angle_degrees = np.degrees(angle) # Convert radians to degrees
|
||||||
|
# results += f" {axis.upper()} axis: {angle_degrees:.2f} degrees\n"
|
||||||
|
|
||||||
|
# results += "Accelerometer sensitivity error:\n"
|
||||||
|
# for axis, error in sensitivity_error.items():
|
||||||
|
# results += f" {axis.upper()} axis: {error:.2f}%\n"
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Parse command-line arguments
|
||||||
|
usage = '%prog [options] <raw logs>'
|
||||||
|
opts = optparse.OptionParser(usage)
|
||||||
|
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||||
|
opts.add_option(
|
||||||
|
'-a', '--accel', type='string', dest='accel', default=None, help='acceleration value used to do the movements'
|
||||||
|
)
|
||||||
|
options, args = opts.parse_args()
|
||||||
|
if len(args) < 1:
|
||||||
|
opts.error('No CSV file(s) to analyse')
|
||||||
|
if options.accel is None:
|
||||||
|
opts.error('You must specify the acceleration value used when generating the CSV file (option -a)')
|
||||||
|
try:
|
||||||
|
accel_value = float(options.accel)
|
||||||
|
except ValueError:
|
||||||
|
opts.error('Invalid acceleration value. It should be a numeric value.')
|
||||||
|
|
||||||
|
results = axesmap_calibration(args, accel_value)
|
||||||
|
print_with_c_locale(results)
|
||||||
|
|
||||||
|
if options.output is not None:
|
||||||
|
with open(options.output, 'w') as f:
|
||||||
|
f.write(results)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
558
src/graph_creators/graph_belts.py
Normal file
@@ -0,0 +1,558 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
#################################################
|
||||||
|
######## CoreXY BELTS CALIBRATION SCRIPT ########
|
||||||
|
#################################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
from collections import namedtuple
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import matplotlib.colors
|
||||||
|
import matplotlib.font_manager
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import matplotlib.ticker
|
||||||
|
import numpy as np
|
||||||
|
from scipy.interpolate import griddata
|
||||||
|
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
|
||||||
|
from ..helpers.common_func import (
|
||||||
|
compute_curve_similarity_factor,
|
||||||
|
compute_spectrogram,
|
||||||
|
detect_peaks,
|
||||||
|
parse_log,
|
||||||
|
setup_klipper_import,
|
||||||
|
)
|
||||||
|
from ..helpers.locale_utils import print_with_c_locale, set_locale
|
||||||
|
|
||||||
|
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # For paired peaks names
|
||||||
|
|
||||||
|
PEAKS_DETECTION_THRESHOLD = 0.20
|
||||||
|
CURVE_SIMILARITY_SIGMOID_K = 0.6
|
||||||
|
DC_GRAIN_OF_SALT_FACTOR = 0.75
|
||||||
|
DC_THRESHOLD_METRIC = 1.5e9
|
||||||
|
DC_MAX_UNPAIRED_PEAKS_ALLOWED = 4
|
||||||
|
|
||||||
|
# Define the SignalData namedtuple
|
||||||
|
SignalData = namedtuple('CalibrationData', ['freqs', 'psd', 'peaks', 'paired_peaks', 'unpaired_peaks'])
|
||||||
|
|
||||||
|
KLIPPAIN_COLORS = {
|
||||||
|
'purple': '#70088C',
|
||||||
|
'orange': '#FF8D32',
|
||||||
|
'dark_purple': '#150140',
|
||||||
|
'dark_orange': '#F24130',
|
||||||
|
'red_pink': '#F2055C',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Computation of the PSD graph
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
# This function create pairs of peaks that are close in frequency on two curves (that are known
|
||||||
|
# to be resonances points and must be similar on both belts on a CoreXY kinematic)
|
||||||
|
def pair_peaks(peaks1, freqs1, psd1, peaks2, freqs2, psd2):
|
||||||
|
# Compute a dynamic detection threshold to filter and pair peaks efficiently
|
||||||
|
# even if the signal is very noisy (this get clipped to a maximum of 10Hz diff)
|
||||||
|
distances = []
|
||||||
|
for p1 in peaks1:
|
||||||
|
for p2 in peaks2:
|
||||||
|
distances.append(abs(freqs1[p1] - freqs2[p2]))
|
||||||
|
distances = np.array(distances)
|
||||||
|
|
||||||
|
median_distance = np.median(distances)
|
||||||
|
iqr = np.percentile(distances, 75) - np.percentile(distances, 25)
|
||||||
|
|
||||||
|
threshold = median_distance + 1.5 * iqr
|
||||||
|
threshold = min(threshold, 10)
|
||||||
|
|
||||||
|
# Pair the peaks using the dynamic thresold
|
||||||
|
paired_peaks = []
|
||||||
|
unpaired_peaks1 = list(peaks1)
|
||||||
|
unpaired_peaks2 = list(peaks2)
|
||||||
|
|
||||||
|
while unpaired_peaks1 and unpaired_peaks2:
|
||||||
|
min_distance = threshold + 1
|
||||||
|
pair = None
|
||||||
|
|
||||||
|
for p1 in unpaired_peaks1:
|
||||||
|
for p2 in unpaired_peaks2:
|
||||||
|
distance = abs(freqs1[p1] - freqs2[p2])
|
||||||
|
if distance < min_distance:
|
||||||
|
min_distance = distance
|
||||||
|
pair = (p1, p2)
|
||||||
|
|
||||||
|
if pair is None: # No more pairs below the threshold
|
||||||
|
break
|
||||||
|
|
||||||
|
p1, p2 = pair
|
||||||
|
paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2])))
|
||||||
|
unpaired_peaks1.remove(p1)
|
||||||
|
unpaired_peaks2.remove(p2)
|
||||||
|
|
||||||
|
return paired_peaks, unpaired_peaks1, unpaired_peaks2
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Computation of the differential spectrogram
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
# Interpolate source_data (2D) to match target_x and target_y in order to
|
||||||
|
# get similar time and frequency dimensions for the differential spectrogram
|
||||||
|
def interpolate_2d(target_x, target_y, source_x, source_y, source_data):
|
||||||
|
# Create a grid of points in the source and target space
|
||||||
|
source_points = np.array([(x, y) for y in source_y for x in source_x])
|
||||||
|
target_points = np.array([(x, y) for y in target_y for x in target_x])
|
||||||
|
|
||||||
|
# Flatten the source data to match the flattened source points
|
||||||
|
source_values = source_data.flatten()
|
||||||
|
|
||||||
|
# Interpolate and reshape the interpolated data to match the target grid shape and replace NaN with zeros
|
||||||
|
interpolated_data = griddata(source_points, source_values, target_points, method='nearest')
|
||||||
|
interpolated_data = interpolated_data.reshape((len(target_y), len(target_x)))
|
||||||
|
interpolated_data = np.nan_to_num(interpolated_data)
|
||||||
|
|
||||||
|
return interpolated_data
|
||||||
|
|
||||||
|
|
||||||
|
# Main logic function to combine two similar spectrogram - ie. from both belts paths - by substracting signals in order to create
|
||||||
|
# a new composite spectrogram. This result of a divergent but mostly centered new spectrogram (center will be white) with some colored zones
|
||||||
|
# highlighting differences in the belts paths. The summative spectrogram is used for the MHI calculation.
|
||||||
|
def compute_combined_spectrogram(data1, data2):
|
||||||
|
pdata1, bins1, t1 = compute_spectrogram(data1)
|
||||||
|
pdata2, bins2, t2 = compute_spectrogram(data2)
|
||||||
|
|
||||||
|
# Interpolate the spectrograms
|
||||||
|
pdata2_interpolated = interpolate_2d(bins1, t1, bins2, t2, pdata2)
|
||||||
|
|
||||||
|
# Combine them in two form: a summed diff for the MHI computation and a diverging diff for the spectrogram colors
|
||||||
|
combined_sum = np.abs(pdata1 - pdata2_interpolated)
|
||||||
|
combined_divergent = pdata1 - pdata2_interpolated
|
||||||
|
|
||||||
|
return combined_sum, combined_divergent, bins1, t1
|
||||||
|
|
||||||
|
|
||||||
|
# Compute a composite and highly subjective value indicating the "mechanical health of the printer (0 to 100%)" that represent the
|
||||||
|
# likelihood of mechanical issues on the printer. It is based on the differential spectrogram sum of gradient, salted with a bit
|
||||||
|
# of the estimated similarity cross-correlation from compute_curve_similarity_factor() and with a bit of the number of unpaired peaks.
|
||||||
|
# This result in a percentage value quantifying the machine behavior around the main resonances that give an hint if only touching belt tension
|
||||||
|
# will give good graphs or if there is a chance of mechanical issues in the background (above 50% should be considered as probably problematic)
|
||||||
|
def compute_mhi(combined_data, similarity_coefficient, num_unpaired_peaks):
|
||||||
|
# filtered_data = combined_data[combined_data > 100]
|
||||||
|
filtered_data = np.abs(combined_data)
|
||||||
|
|
||||||
|
# First compute a "total variability metric" based on the sum of the gradient that sum the magnitude of will emphasize regions of the
|
||||||
|
# spectrogram where there are rapid changes in magnitude (like the edges of resonance peaks).
|
||||||
|
total_variability_metric = np.sum(np.abs(np.gradient(filtered_data)))
|
||||||
|
# Scale the metric to a percentage using the threshold (found empirically on a large number of user data shared to me)
|
||||||
|
base_percentage = (np.log1p(total_variability_metric) / np.log1p(DC_THRESHOLD_METRIC)) * 100
|
||||||
|
|
||||||
|
# Adjust the percentage based on the similarity_coefficient to add a grain of salt
|
||||||
|
adjusted_percentage = base_percentage * (1 - DC_GRAIN_OF_SALT_FACTOR * (similarity_coefficient / 100))
|
||||||
|
|
||||||
|
# Adjust the percentage again based on the number of unpaired peaks to add a second grain of salt
|
||||||
|
peak_confidence = num_unpaired_peaks / DC_MAX_UNPAIRED_PEAKS_ALLOWED
|
||||||
|
final_percentage = (1 - peak_confidence) * adjusted_percentage + peak_confidence * 100
|
||||||
|
|
||||||
|
# Ensure the result lies between 0 and 100 by clipping the computed value
|
||||||
|
final_percentage = np.clip(final_percentage, 0, 100)
|
||||||
|
|
||||||
|
return final_percentage, mhi_lut(final_percentage)
|
||||||
|
|
||||||
|
|
||||||
|
# LUT to transform the MHI into a textual value easy to understand for the users of the script
|
||||||
|
def mhi_lut(mhi):
|
||||||
|
ranges = [
|
||||||
|
(0, 30, 'Excellent mechanical health'),
|
||||||
|
(30, 45, 'Good mechanical health'),
|
||||||
|
(45, 55, 'Acceptable mechanical health'),
|
||||||
|
(55, 70, 'Potential signs of a mechanical issue'),
|
||||||
|
(70, 85, 'Likely a mechanical issue'),
|
||||||
|
(85, 100, 'Mechanical issue detected'),
|
||||||
|
]
|
||||||
|
for lower, upper, message in ranges:
|
||||||
|
if lower < mhi <= upper:
|
||||||
|
return message
|
||||||
|
|
||||||
|
return 'Error computing MHI value'
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Graphing
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def plot_compare_frequency(ax, lognames, signal1, signal2, similarity_factor, max_freq):
|
||||||
|
# Get the belt name for the legend to avoid putting the full file name
|
||||||
|
signal1_belt = (lognames[0].split('/')[-1]).split('_')[-1][0]
|
||||||
|
signal2_belt = (lognames[1].split('/')[-1]).split('_')[-1][0]
|
||||||
|
|
||||||
|
if signal1_belt == 'A' and signal2_belt == 'B':
|
||||||
|
signal1_belt += ' (axis 1,-1)'
|
||||||
|
signal2_belt += ' (axis 1, 1)'
|
||||||
|
elif signal1_belt == 'B' and signal2_belt == 'A':
|
||||||
|
signal1_belt += ' (axis 1, 1)'
|
||||||
|
signal2_belt += ' (axis 1,-1)'
|
||||||
|
else:
|
||||||
|
print_with_c_locale(
|
||||||
|
"Warning: belts doesn't seem to have the correct name A and B (extracted from the filename.csv)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Plot the two belts PSD signals
|
||||||
|
ax.plot(signal1.freqs, signal1.psd, label='Belt ' + signal1_belt, color=KLIPPAIN_COLORS['purple'])
|
||||||
|
ax.plot(signal2.freqs, signal2.psd, label='Belt ' + signal2_belt, color=KLIPPAIN_COLORS['orange'])
|
||||||
|
|
||||||
|
# Trace the "relax region" (also used as a threshold to filter and detect the peaks)
|
||||||
|
psd_lowest_max = min(signal1.psd.max(), signal2.psd.max())
|
||||||
|
peaks_warning_threshold = PEAKS_DETECTION_THRESHOLD * psd_lowest_max
|
||||||
|
ax.axhline(y=peaks_warning_threshold, color='black', linestyle='--', linewidth=0.5)
|
||||||
|
ax.fill_between(signal1.freqs, 0, peaks_warning_threshold, color='green', alpha=0.15, label='Relax Region')
|
||||||
|
|
||||||
|
# Trace and annotate the peaks on the graph
|
||||||
|
paired_peak_count = 0
|
||||||
|
unpaired_peak_count = 0
|
||||||
|
offsets_table_data = []
|
||||||
|
|
||||||
|
for _, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
||||||
|
label = ALPHABET[paired_peak_count]
|
||||||
|
amplitude_offset = abs(
|
||||||
|
((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / max(signal1.psd[peak1[0]], signal2.psd[peak2[0]])) * 100
|
||||||
|
)
|
||||||
|
frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]])
|
||||||
|
offsets_table_data.append([f'Peaks {label}', f'{frequency_offset:.1f} Hz', f'{amplitude_offset:.1f} %'])
|
||||||
|
|
||||||
|
ax.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], 'x', color='black')
|
||||||
|
ax.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], 'x', color='black')
|
||||||
|
ax.plot(
|
||||||
|
[signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]],
|
||||||
|
[signal1.psd[peak1[0]], signal2.psd[peak2[0]]],
|
||||||
|
':',
|
||||||
|
color='gray',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.annotate(
|
||||||
|
label + '1',
|
||||||
|
(signal1.freqs[peak1[0]], signal1.psd[peak1[0]]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(8, 5),
|
||||||
|
ha='left',
|
||||||
|
fontsize=13,
|
||||||
|
color='black',
|
||||||
|
)
|
||||||
|
ax.annotate(
|
||||||
|
label + '2',
|
||||||
|
(signal2.freqs[peak2[0]], signal2.psd[peak2[0]]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(8, 5),
|
||||||
|
ha='left',
|
||||||
|
fontsize=13,
|
||||||
|
color='black',
|
||||||
|
)
|
||||||
|
paired_peak_count += 1
|
||||||
|
|
||||||
|
for peak in signal1.unpaired_peaks:
|
||||||
|
ax.plot(signal1.freqs[peak], signal1.psd[peak], 'x', color='black')
|
||||||
|
ax.annotate(
|
||||||
|
str(unpaired_peak_count + 1),
|
||||||
|
(signal1.freqs[peak], signal1.psd[peak]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(8, 5),
|
||||||
|
ha='left',
|
||||||
|
fontsize=13,
|
||||||
|
color='red',
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
unpaired_peak_count += 1
|
||||||
|
|
||||||
|
for peak in signal2.unpaired_peaks:
|
||||||
|
ax.plot(signal2.freqs[peak], signal2.psd[peak], 'x', color='black')
|
||||||
|
ax.annotate(
|
||||||
|
str(unpaired_peak_count + 1),
|
||||||
|
(signal2.freqs[peak], signal2.psd[peak]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(8, 5),
|
||||||
|
ha='left',
|
||||||
|
fontsize=13,
|
||||||
|
color='red',
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
unpaired_peak_count += 1
|
||||||
|
|
||||||
|
# Add estimated similarity to the graph
|
||||||
|
ax2 = ax.twinx() # To split the legends in two box
|
||||||
|
ax2.yaxis.set_visible(False)
|
||||||
|
ax2.plot([], [], ' ', label=f'Estimated similarity: {similarity_factor:.1f}%')
|
||||||
|
ax2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}')
|
||||||
|
|
||||||
|
# Setting axis parameters, grid and graph title
|
||||||
|
ax.set_xlabel('Frequency (Hz)')
|
||||||
|
ax.set_xlim([0, max_freq])
|
||||||
|
ax.set_ylabel('Power spectral density')
|
||||||
|
psd_highest_max = max(signal1.psd.max(), signal2.psd.max())
|
||||||
|
ax.set_ylim([0, psd_highest_max + psd_highest_max * 0.05])
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('small')
|
||||||
|
ax.set_title(
|
||||||
|
'Belts Frequency Profiles (estimated similarity: {:.1f}%)'.format(similarity_factor),
|
||||||
|
fontsize=14,
|
||||||
|
color=KLIPPAIN_COLORS['dark_orange'],
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print the table of offsets ontop of the graph below the original legend (upper right)
|
||||||
|
if len(offsets_table_data) > 0:
|
||||||
|
columns = [
|
||||||
|
'',
|
||||||
|
'Frequency delta',
|
||||||
|
'Amplitude delta',
|
||||||
|
]
|
||||||
|
offset_table = ax.table(
|
||||||
|
cellText=offsets_table_data,
|
||||||
|
colLabels=columns,
|
||||||
|
bbox=[0.66, 0.75, 0.33, 0.15],
|
||||||
|
loc='upper right',
|
||||||
|
cellLoc='center',
|
||||||
|
)
|
||||||
|
offset_table.auto_set_font_size(False)
|
||||||
|
offset_table.set_fontsize(8)
|
||||||
|
offset_table.auto_set_column_width([0, 1, 2])
|
||||||
|
offset_table.set_zorder(100)
|
||||||
|
cells = [key for key in offset_table.get_celld().keys()]
|
||||||
|
for cell in cells:
|
||||||
|
offset_table[cell].set_facecolor('white')
|
||||||
|
offset_table[cell].set_alpha(0.6)
|
||||||
|
|
||||||
|
ax.legend(loc='upper left', prop=fontP)
|
||||||
|
ax2.legend(loc='upper right', prop=fontP)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_difference_spectrogram(ax, signal1, signal2, t, bins, combined_divergent, textual_mhi, max_freq):
|
||||||
|
ax.set_title('Differential Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
ax.plot([], [], ' ', label=f'{textual_mhi} (experimental)')
|
||||||
|
|
||||||
|
# Draw the differential spectrogram with a specific custom norm to get orange or purple values where there is signal or white near zeros
|
||||||
|
# imgshow is better suited here than pcolormesh since its result is already rasterized and we doesn't need to keep vector graphics
|
||||||
|
# when saving to a final .png file. Using it also allow to save ~150-200MB of RAM during the "fig.savefig" operation.
|
||||||
|
colors = [
|
||||||
|
KLIPPAIN_COLORS['dark_orange'],
|
||||||
|
KLIPPAIN_COLORS['orange'],
|
||||||
|
'white',
|
||||||
|
KLIPPAIN_COLORS['purple'],
|
||||||
|
KLIPPAIN_COLORS['dark_purple'],
|
||||||
|
]
|
||||||
|
cm = matplotlib.colors.LinearSegmentedColormap.from_list(
|
||||||
|
'klippain_divergent', list(zip([0, 0.25, 0.5, 0.75, 1], colors))
|
||||||
|
)
|
||||||
|
norm = matplotlib.colors.TwoSlopeNorm(vmin=np.min(combined_divergent), vcenter=0, vmax=np.max(combined_divergent))
|
||||||
|
ax.imshow(
|
||||||
|
combined_divergent.T,
|
||||||
|
cmap=cm,
|
||||||
|
norm=norm,
|
||||||
|
aspect='auto',
|
||||||
|
extent=[t[0], t[-1], bins[0], bins[-1]],
|
||||||
|
interpolation='bilinear',
|
||||||
|
origin='lower',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.set_xlabel('Frequency (hz)')
|
||||||
|
ax.set_xlim([0.0, max_freq])
|
||||||
|
ax.set_ylabel('Time (s)')
|
||||||
|
ax.set_ylim([0, bins[-1]])
|
||||||
|
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('medium')
|
||||||
|
ax.legend(loc='best', prop=fontP)
|
||||||
|
|
||||||
|
# Plot vertical lines for unpaired peaks
|
||||||
|
unpaired_peak_count = 0
|
||||||
|
for _, peak in enumerate(signal1.unpaired_peaks):
|
||||||
|
ax.axvline(signal1.freqs[peak], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5)
|
||||||
|
ax.annotate(
|
||||||
|
f'Peak {unpaired_peak_count + 1}',
|
||||||
|
(signal1.freqs[peak], t[-1] * 0.05),
|
||||||
|
textcoords='data',
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
rotation=90,
|
||||||
|
fontsize=10,
|
||||||
|
verticalalignment='bottom',
|
||||||
|
horizontalalignment='right',
|
||||||
|
)
|
||||||
|
unpaired_peak_count += 1
|
||||||
|
|
||||||
|
for _, peak in enumerate(signal2.unpaired_peaks):
|
||||||
|
ax.axvline(signal2.freqs[peak], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5)
|
||||||
|
ax.annotate(
|
||||||
|
f'Peak {unpaired_peak_count + 1}',
|
||||||
|
(signal2.freqs[peak], t[-1] * 0.05),
|
||||||
|
textcoords='data',
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
rotation=90,
|
||||||
|
fontsize=10,
|
||||||
|
verticalalignment='bottom',
|
||||||
|
horizontalalignment='right',
|
||||||
|
)
|
||||||
|
unpaired_peak_count += 1
|
||||||
|
|
||||||
|
# Plot vertical lines and zones for paired peaks
|
||||||
|
for idx, (peak1, peak2) in enumerate(signal1.paired_peaks):
|
||||||
|
label = ALPHABET[idx]
|
||||||
|
x_min = min(peak1[1], peak2[1])
|
||||||
|
x_max = max(peak1[1], peak2[1])
|
||||||
|
ax.axvline(x_min, color=KLIPPAIN_COLORS['dark_purple'], linestyle='dotted', linewidth=1.5)
|
||||||
|
ax.axvline(x_max, color=KLIPPAIN_COLORS['dark_purple'], linestyle='dotted', linewidth=1.5)
|
||||||
|
ax.fill_between([x_min, x_max], 0, np.max(combined_divergent), color=KLIPPAIN_COLORS['dark_purple'], alpha=0.3)
|
||||||
|
ax.annotate(
|
||||||
|
f'Peaks {label}',
|
||||||
|
(x_min, t[-1] * 0.05),
|
||||||
|
textcoords='data',
|
||||||
|
color=KLIPPAIN_COLORS['dark_purple'],
|
||||||
|
rotation=90,
|
||||||
|
fontsize=10,
|
||||||
|
verticalalignment='bottom',
|
||||||
|
horizontalalignment='right',
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Custom tools
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
# Original Klipper function to get the PSD data of a raw accelerometer signal
|
||||||
|
def compute_signal_data(data, max_freq):
|
||||||
|
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||||
|
calibration_data = helper.process_accelerometer_data(data)
|
||||||
|
|
||||||
|
freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq]
|
||||||
|
psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq]
|
||||||
|
|
||||||
|
_, peaks, _ = detect_peaks(psd, freqs, PEAKS_DETECTION_THRESHOLD * psd.max())
|
||||||
|
|
||||||
|
return SignalData(freqs=freqs, psd=psd, peaks=peaks, paired_peaks=None, unpaired_peaks=None)
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Startup and main routines
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def belts_calibration(lognames, klipperdir='~/klipper', max_freq=200.0, st_version=None):
|
||||||
|
set_locale()
|
||||||
|
global shaper_calibrate
|
||||||
|
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||||
|
|
||||||
|
# Parse data
|
||||||
|
datas = [parse_log(fn) for fn in lognames]
|
||||||
|
if len(datas) > 2:
|
||||||
|
raise ValueError('Incorrect number of .csv files used (this function needs exactly two files to compare them)!')
|
||||||
|
|
||||||
|
# Compute calibration data for the two datasets with automatic peaks detection
|
||||||
|
signal1 = compute_signal_data(datas[0], max_freq)
|
||||||
|
signal2 = compute_signal_data(datas[1], max_freq)
|
||||||
|
combined_sum, combined_divergent, bins, t = compute_combined_spectrogram(datas[0], datas[1])
|
||||||
|
del datas
|
||||||
|
|
||||||
|
# Pair the peaks across the two datasets
|
||||||
|
paired_peaks, unpaired_peaks1, unpaired_peaks2 = pair_peaks(
|
||||||
|
signal1.peaks, signal1.freqs, signal1.psd, signal2.peaks, signal2.freqs, signal2.psd
|
||||||
|
)
|
||||||
|
signal1 = signal1._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks1)
|
||||||
|
signal2 = signal2._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks2)
|
||||||
|
|
||||||
|
# Compute the similarity (using cross-correlation of the PSD signals)
|
||||||
|
similarity_factor = compute_curve_similarity_factor(
|
||||||
|
signal1.freqs, signal1.psd, signal2.freqs, signal2.psd, CURVE_SIMILARITY_SIGMOID_K
|
||||||
|
)
|
||||||
|
print_with_c_locale(f'Belts estimated similarity: {similarity_factor:.1f}%')
|
||||||
|
# Compute the MHI value from the differential spectrogram sum of gradient, salted with the similarity factor and the number of
|
||||||
|
# unpaired peaks from the belts frequency profile. Be careful, this value is highly opinionated and is pretty experimental!
|
||||||
|
mhi, textual_mhi = compute_mhi(
|
||||||
|
combined_sum, similarity_factor, len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks)
|
||||||
|
)
|
||||||
|
print_with_c_locale(f'[experimental] Mechanical Health Indicator: {textual_mhi.lower()} ({mhi:.1f}%)')
|
||||||
|
|
||||||
|
# Create graph layout
|
||||||
|
fig, (ax1, ax2) = plt.subplots(
|
||||||
|
2,
|
||||||
|
1,
|
||||||
|
gridspec_kw={
|
||||||
|
'height_ratios': [4, 3],
|
||||||
|
'bottom': 0.050,
|
||||||
|
'top': 0.890,
|
||||||
|
'left': 0.085,
|
||||||
|
'right': 0.966,
|
||||||
|
'hspace': 0.169,
|
||||||
|
'wspace': 0.200,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
fig.set_size_inches(8.3, 11.6)
|
||||||
|
|
||||||
|
# Add title
|
||||||
|
title_line1 = 'RELATIVE BELTS CALIBRATION TOOL'
|
||||||
|
fig.text(
|
||||||
|
0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
filename = lognames[0].split('/')[-1]
|
||||||
|
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", '%Y%m%d %H%M%S')
|
||||||
|
title_line2 = dt.strftime('%x %X')
|
||||||
|
except Exception:
|
||||||
|
print_with_c_locale(
|
||||||
|
'Warning: CSV filenames look to be different than expected (%s , %s)' % (lognames[0], lognames[1])
|
||||||
|
)
|
||||||
|
title_line2 = lognames[0].split('/')[-1] + ' / ' + lognames[1].split('/')[-1]
|
||||||
|
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
|
|
||||||
|
# Plot the graphs
|
||||||
|
plot_compare_frequency(ax1, lognames, signal1, signal2, similarity_factor, max_freq)
|
||||||
|
plot_difference_spectrogram(ax2, signal1, signal2, t, bins, combined_divergent, textual_mhi, max_freq)
|
||||||
|
|
||||||
|
# Adding a small Klippain logo to the top left corner of the figure
|
||||||
|
ax_logo = fig.add_axes([0.001, 0.8995, 0.1, 0.1], anchor='NW')
|
||||||
|
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||||
|
ax_logo.axis('off')
|
||||||
|
|
||||||
|
# Adding Shake&Tune version in the top right corner
|
||||||
|
if st_version != 'unknown':
|
||||||
|
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||||
|
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Parse command-line arguments
|
||||||
|
usage = '%prog [options] <raw logs>'
|
||||||
|
opts = optparse.OptionParser(usage)
|
||||||
|
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||||
|
opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph')
|
||||||
|
opts.add_option(
|
||||||
|
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||||
|
)
|
||||||
|
options, args = opts.parse_args()
|
||||||
|
if len(args) < 1:
|
||||||
|
opts.error('Incorrect number of arguments')
|
||||||
|
if options.output is None:
|
||||||
|
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||||
|
|
||||||
|
fig = belts_calibration(args, options.klipperdir, options.max_freq)
|
||||||
|
fig.savefig(options.output, dpi=150)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
421
src/graph_creators/graph_shaper.py
Normal file
@@ -0,0 +1,421 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
#################################################
|
||||||
|
######## INPUT SHAPER CALIBRATION SCRIPT ########
|
||||||
|
#################################################
|
||||||
|
# Derived from the calibrate_shaper.py official Klipper script
|
||||||
|
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com>
|
||||||
|
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
|
||||||
|
# Highly modified and improved by Frix_x#0161 #
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import matplotlib.font_manager
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import matplotlib.ticker
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
|
||||||
|
from ..helpers.common_func import (
|
||||||
|
compute_mechanical_parameters,
|
||||||
|
compute_spectrogram,
|
||||||
|
detect_peaks,
|
||||||
|
parse_log,
|
||||||
|
setup_klipper_import,
|
||||||
|
)
|
||||||
|
from ..helpers.locale_utils import print_with_c_locale, set_locale
|
||||||
|
|
||||||
|
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||||
|
PEAKS_EFFECT_THRESHOLD = 0.12
|
||||||
|
SPECTROGRAM_LOW_PERCENTILE_FILTER = 5
|
||||||
|
MAX_SMOOTHING = 0.1
|
||||||
|
|
||||||
|
KLIPPAIN_COLORS = {
|
||||||
|
'purple': '#70088C',
|
||||||
|
'orange': '#FF8D32',
|
||||||
|
'dark_purple': '#150140',
|
||||||
|
'dark_orange': '#F24130',
|
||||||
|
'red_pink': '#F2055C',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Computation
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
# Find the best shaper parameters using Klipper's official algorithm selection with
|
||||||
|
# a proper precomputed damping ratio (zeta) and using the configured printer SQV value
|
||||||
|
def calibrate_shaper(datas, max_smoothing, scv, max_freq):
|
||||||
|
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||||
|
calibration_data = helper.process_accelerometer_data(datas)
|
||||||
|
calibration_data.normalize_to_frequencies()
|
||||||
|
|
||||||
|
fr, zeta, _, _ = compute_mechanical_parameters(calibration_data.psd_sum, calibration_data.freq_bins)
|
||||||
|
|
||||||
|
# If the damping ratio computation fail, we use Klipper default value instead
|
||||||
|
if zeta is None:
|
||||||
|
zeta = 0.1
|
||||||
|
|
||||||
|
compat = False
|
||||||
|
try:
|
||||||
|
shaper, all_shapers = helper.find_best_shaper(
|
||||||
|
calibration_data,
|
||||||
|
shapers=None,
|
||||||
|
damping_ratio=zeta,
|
||||||
|
scv=scv,
|
||||||
|
shaper_freqs=None,
|
||||||
|
max_smoothing=max_smoothing,
|
||||||
|
test_damping_ratios=None,
|
||||||
|
max_freq=max_freq,
|
||||||
|
logger=print_with_c_locale,
|
||||||
|
)
|
||||||
|
except TypeError:
|
||||||
|
print_with_c_locale(
|
||||||
|
'[WARNING] You seem to be using an older version of Klipper that is not compatible with all the latest Shake&Tune features!'
|
||||||
|
)
|
||||||
|
print_with_c_locale(
|
||||||
|
'Shake&Tune now runs in compatibility mode: be aware that the results may be slightly off, since the real damping ratio cannot be used to create the filter recommendations'
|
||||||
|
)
|
||||||
|
compat = True
|
||||||
|
shaper, all_shapers = helper.find_best_shaper(calibration_data, max_smoothing, print_with_c_locale)
|
||||||
|
|
||||||
|
print_with_c_locale(
|
||||||
|
'\n-> Recommended shaper is %s @ %.1f Hz (when using a square corner velocity of %.1f and a damping ratio of %.3f)'
|
||||||
|
% (shaper.name.upper(), shaper.freq, scv, zeta)
|
||||||
|
)
|
||||||
|
|
||||||
|
return shaper.name, all_shapers, calibration_data, fr, zeta, compat
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Graphing
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def plot_freq_response(
|
||||||
|
ax, calibration_data, shapers, performance_shaper, peaks, peaks_freqs, peaks_threshold, fr, zeta, max_freq
|
||||||
|
):
|
||||||
|
freqs = calibration_data.freqs
|
||||||
|
psd = calibration_data.psd_sum
|
||||||
|
px = calibration_data.psd_x
|
||||||
|
py = calibration_data.psd_y
|
||||||
|
pz = calibration_data.psd_z
|
||||||
|
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('x-small')
|
||||||
|
|
||||||
|
ax.set_xlabel('Frequency (Hz)')
|
||||||
|
ax.set_xlim([0, max_freq])
|
||||||
|
ax.set_ylabel('Power spectral density')
|
||||||
|
ax.set_ylim([0, psd.max() + psd.max() * 0.05])
|
||||||
|
|
||||||
|
ax.plot(freqs, psd, label='X+Y+Z', color='purple', zorder=5)
|
||||||
|
ax.plot(freqs, px, label='X', color='red')
|
||||||
|
ax.plot(freqs, py, label='Y', color='green')
|
||||||
|
ax.plot(freqs, pz, label='Z', color='blue')
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5))
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
|
||||||
|
ax2 = ax.twinx()
|
||||||
|
ax2.yaxis.set_visible(False)
|
||||||
|
|
||||||
|
lowvib_shaper_vibrs = float('inf')
|
||||||
|
lowvib_shaper = None
|
||||||
|
lowvib_shaper_freq = None
|
||||||
|
lowvib_shaper_accel = 0
|
||||||
|
|
||||||
|
# Draw the shappers curves and add their specific parameters in the legend
|
||||||
|
# This adds also a way to find the best shaper with a low level of vibrations (with a resonable level of smoothing)
|
||||||
|
for shaper in shapers:
|
||||||
|
shaper_max_accel = round(shaper.max_accel / 100.0) * 100.0
|
||||||
|
label = '%s (%.1f Hz, vibr=%.1f%%, sm~=%.2f, accel<=%.f)' % (
|
||||||
|
shaper.name.upper(),
|
||||||
|
shaper.freq,
|
||||||
|
shaper.vibrs * 100.0,
|
||||||
|
shaper.smoothing,
|
||||||
|
shaper_max_accel,
|
||||||
|
)
|
||||||
|
ax2.plot(freqs, shaper.vals, label=label, linestyle='dotted')
|
||||||
|
|
||||||
|
# Get the performance shaper
|
||||||
|
if shaper.name == performance_shaper:
|
||||||
|
performance_shaper_freq = shaper.freq
|
||||||
|
performance_shaper_vibr = shaper.vibrs * 100.0
|
||||||
|
performance_shaper_vals = shaper.vals
|
||||||
|
|
||||||
|
# Get the low vibration shaper
|
||||||
|
if (
|
||||||
|
shaper.vibrs * 100 < lowvib_shaper_vibrs
|
||||||
|
or (shaper.vibrs * 100 == lowvib_shaper_vibrs and shaper_max_accel > lowvib_shaper_accel)
|
||||||
|
) and shaper.smoothing < MAX_SMOOTHING:
|
||||||
|
lowvib_shaper_accel = shaper_max_accel
|
||||||
|
lowvib_shaper = shaper.name
|
||||||
|
lowvib_shaper_freq = shaper.freq
|
||||||
|
lowvib_shaper_vibrs = shaper.vibrs * 100
|
||||||
|
lowvib_shaper_vals = shaper.vals
|
||||||
|
|
||||||
|
# User recommendations are added to the legend: one is Klipper's original suggestion that is usually good for performances
|
||||||
|
# and the other one is the custom "low vibration" recommendation that looks for a suitable shaper that doesn't have excessive
|
||||||
|
# smoothing and that have a lower vibration level. If both recommendation are the same shaper, or if no suitable "low
|
||||||
|
# vibration" shaper is found, then only a single line as the "best shaper" recommendation is added to the legend
|
||||||
|
if (
|
||||||
|
lowvib_shaper is not None
|
||||||
|
and lowvib_shaper != performance_shaper
|
||||||
|
and lowvib_shaper_vibrs <= performance_shaper_vibr
|
||||||
|
):
|
||||||
|
ax2.plot(
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
' ',
|
||||||
|
label='Recommended performance shaper: %s @ %.1f Hz'
|
||||||
|
% (performance_shaper.upper(), performance_shaper_freq),
|
||||||
|
)
|
||||||
|
ax.plot(
|
||||||
|
freqs, psd * performance_shaper_vals, label='With %s applied' % (performance_shaper.upper()), color='cyan'
|
||||||
|
)
|
||||||
|
ax2.plot(
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
' ',
|
||||||
|
label='Recommended low vibrations shaper: %s @ %.1f Hz' % (lowvib_shaper.upper(), lowvib_shaper_freq),
|
||||||
|
)
|
||||||
|
ax.plot(freqs, psd * lowvib_shaper_vals, label='With %s applied' % (lowvib_shaper.upper()), color='lime')
|
||||||
|
else:
|
||||||
|
ax2.plot(
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
' ',
|
||||||
|
label='Recommended best shaper: %s @ %.1f Hz' % (performance_shaper.upper(), performance_shaper_freq),
|
||||||
|
)
|
||||||
|
ax.plot(
|
||||||
|
freqs, psd * performance_shaper_vals, label='With %s applied' % (performance_shaper.upper()), color='cyan'
|
||||||
|
)
|
||||||
|
|
||||||
|
# And the estimated damping ratio is finally added at the end of the legend
|
||||||
|
ax2.plot([], [], ' ', label='Estimated damping ratio (ζ): %.3f' % (zeta))
|
||||||
|
|
||||||
|
# Draw the detected peaks and name them
|
||||||
|
# This also draw the detection threshold and warning threshold (aka "effect zone")
|
||||||
|
ax.plot(peaks_freqs, psd[peaks], 'x', color='black', markersize=8)
|
||||||
|
for idx, peak in enumerate(peaks):
|
||||||
|
if psd[peak] > peaks_threshold[1]:
|
||||||
|
fontcolor = 'red'
|
||||||
|
fontweight = 'bold'
|
||||||
|
else:
|
||||||
|
fontcolor = 'black'
|
||||||
|
fontweight = 'normal'
|
||||||
|
ax.annotate(
|
||||||
|
f'{idx+1}',
|
||||||
|
(freqs[peak], psd[peak]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(8, 5),
|
||||||
|
ha='left',
|
||||||
|
fontsize=13,
|
||||||
|
color=fontcolor,
|
||||||
|
weight=fontweight,
|
||||||
|
)
|
||||||
|
ax.axhline(y=peaks_threshold[0], color='black', linestyle='--', linewidth=0.5)
|
||||||
|
ax.axhline(y=peaks_threshold[1], color='black', linestyle='--', linewidth=0.5)
|
||||||
|
ax.fill_between(freqs, 0, peaks_threshold[0], color='green', alpha=0.15, label='Relax Region')
|
||||||
|
ax.fill_between(freqs, peaks_threshold[0], peaks_threshold[1], color='orange', alpha=0.2, label='Warning Region')
|
||||||
|
|
||||||
|
# Add the main resonant frequency and damping ratio of the axis to the graph title
|
||||||
|
ax.set_title(
|
||||||
|
'Axis Frequency Profile (ω0=%.1fHz, ζ=%.3f)' % (fr, zeta),
|
||||||
|
fontsize=14,
|
||||||
|
color=KLIPPAIN_COLORS['dark_orange'],
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
ax.legend(loc='upper left', prop=fontP)
|
||||||
|
ax2.legend(loc='upper right', prop=fontP)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# Plot a time-frequency spectrogram to see how the system respond over time during the
|
||||||
|
# resonnance test. This can highlight hidden spots from the standard PSD graph from other harmonics
|
||||||
|
def plot_spectrogram(ax, t, bins, pdata, peaks, max_freq):
|
||||||
|
ax.set_title('Time-Frequency Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
|
||||||
|
# We need to normalize the data to get a proper signal on the spectrogram
|
||||||
|
# However, while using "LogNorm" provide too much background noise, using
|
||||||
|
# "Normalize" make only the resonnance appearing and hide interesting elements
|
||||||
|
# So we need to filter out the lower part of the data (ie. find the proper vmin for LogNorm)
|
||||||
|
vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER)
|
||||||
|
|
||||||
|
# Draw the spectrogram using imgshow that is better suited here than pcolormesh since its result is already rasterized and
|
||||||
|
# we doesn't need to keep vector graphics when saving to a final .png file. Using it also allow to
|
||||||
|
# save ~150-200MB of RAM during the "fig.savefig" operation.
|
||||||
|
cm = 'inferno'
|
||||||
|
norm = matplotlib.colors.LogNorm(vmin=vmin_value)
|
||||||
|
ax.imshow(
|
||||||
|
pdata.T,
|
||||||
|
norm=norm,
|
||||||
|
cmap=cm,
|
||||||
|
aspect='auto',
|
||||||
|
extent=[t[0], t[-1], bins[0], bins[-1]],
|
||||||
|
origin='lower',
|
||||||
|
interpolation='antialiased',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.set_xlim([0.0, max_freq])
|
||||||
|
ax.set_ylabel('Time (s)')
|
||||||
|
ax.set_xlabel('Frequency (Hz)')
|
||||||
|
|
||||||
|
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
||||||
|
if peaks is not None:
|
||||||
|
for idx, peak in enumerate(peaks):
|
||||||
|
ax.axvline(peak, color='cyan', linestyle='dotted', linewidth=1)
|
||||||
|
ax.annotate(
|
||||||
|
f'Peak {idx+1}',
|
||||||
|
(peak, bins[-1] * 0.9),
|
||||||
|
textcoords='data',
|
||||||
|
color='cyan',
|
||||||
|
rotation=90,
|
||||||
|
fontsize=10,
|
||||||
|
verticalalignment='top',
|
||||||
|
horizontalalignment='right',
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Startup and main routines
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def shaper_calibration(lognames, klipperdir='~/klipper', max_smoothing=None, scv=5.0, max_freq=200.0, st_version=None):
|
||||||
|
set_locale()
|
||||||
|
global shaper_calibrate
|
||||||
|
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||||
|
|
||||||
|
# Parse data
|
||||||
|
datas = [parse_log(fn) for fn in lognames]
|
||||||
|
if len(datas) > 1:
|
||||||
|
print_with_c_locale('Warning: incorrect number of .csv files detected. Only the first one will be used!')
|
||||||
|
|
||||||
|
# Compute shapers, PSD outputs and spectrogram
|
||||||
|
performance_shaper, shapers, calibration_data, fr, zeta, compat = calibrate_shaper(
|
||||||
|
datas[0], max_smoothing, scv, max_freq
|
||||||
|
)
|
||||||
|
pdata, bins, t = compute_spectrogram(datas[0])
|
||||||
|
del datas
|
||||||
|
|
||||||
|
# Select only the relevant part of the PSD data
|
||||||
|
freqs = calibration_data.freq_bins
|
||||||
|
calibration_data.psd_sum = calibration_data.psd_sum[freqs <= max_freq]
|
||||||
|
calibration_data.psd_x = calibration_data.psd_x[freqs <= max_freq]
|
||||||
|
calibration_data.psd_y = calibration_data.psd_y[freqs <= max_freq]
|
||||||
|
calibration_data.psd_z = calibration_data.psd_z[freqs <= max_freq]
|
||||||
|
calibration_data.freqs = freqs[freqs <= max_freq]
|
||||||
|
|
||||||
|
# Peak detection algorithm
|
||||||
|
peaks_threshold = [
|
||||||
|
PEAKS_DETECTION_THRESHOLD * calibration_data.psd_sum.max(),
|
||||||
|
PEAKS_EFFECT_THRESHOLD * calibration_data.psd_sum.max(),
|
||||||
|
]
|
||||||
|
num_peaks, peaks, peaks_freqs = detect_peaks(calibration_data.psd_sum, calibration_data.freqs, peaks_threshold[0])
|
||||||
|
|
||||||
|
# Print the peaks info in the console
|
||||||
|
peak_freqs_formated = ['{:.1f}'.format(f) for f in peaks_freqs]
|
||||||
|
num_peaks_above_effect_threshold = np.sum(calibration_data.psd_sum[peaks] > peaks_threshold[1])
|
||||||
|
print_with_c_locale(
|
||||||
|
'\nPeaks detected on the graph: %d @ %s Hz (%d above effect threshold)'
|
||||||
|
% (num_peaks, ', '.join(map(str, peak_freqs_formated)), num_peaks_above_effect_threshold)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create graph layout
|
||||||
|
fig, (ax1, ax2) = plt.subplots(
|
||||||
|
2,
|
||||||
|
1,
|
||||||
|
gridspec_kw={
|
||||||
|
'height_ratios': [4, 3],
|
||||||
|
'bottom': 0.050,
|
||||||
|
'top': 0.890,
|
||||||
|
'left': 0.085,
|
||||||
|
'right': 0.966,
|
||||||
|
'hspace': 0.169,
|
||||||
|
'wspace': 0.200,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
fig.set_size_inches(8.3, 11.6)
|
||||||
|
|
||||||
|
# Add a title with some test info
|
||||||
|
title_line1 = 'INPUT SHAPER CALIBRATION TOOL'
|
||||||
|
fig.text(
|
||||||
|
0.12, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||||
|
dt = datetime.strptime(f'{filename_parts[1]} {filename_parts[2]}', '%Y%m%d %H%M%S')
|
||||||
|
title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[3].upper().split('.')[0] + ' axis'
|
||||||
|
if compat:
|
||||||
|
title_line3 = '| Compatibility mode with older Klipper,'
|
||||||
|
title_line4 = '| and no custom S&T parameters are used!'
|
||||||
|
else:
|
||||||
|
title_line3 = '| Square corner velocity: ' + str(scv) + 'mm/s'
|
||||||
|
title_line4 = '| Max allowed smoothing: ' + str(max_smoothing)
|
||||||
|
except Exception:
|
||||||
|
print_with_c_locale('Warning: CSV filename look to be different than expected (%s)' % (lognames[0]))
|
||||||
|
title_line2 = lognames[0].split('/')[-1]
|
||||||
|
title_line3 = ''
|
||||||
|
title_line4 = ''
|
||||||
|
fig.text(0.12, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
|
fig.text(0.58, 0.960, title_line3, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
|
fig.text(0.58, 0.946, title_line4, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
|
|
||||||
|
# Plot the graphs
|
||||||
|
plot_freq_response(
|
||||||
|
ax1, calibration_data, shapers, performance_shaper, peaks, peaks_freqs, peaks_threshold, fr, zeta, max_freq
|
||||||
|
)
|
||||||
|
plot_spectrogram(ax2, t, bins, pdata, peaks_freqs, max_freq)
|
||||||
|
|
||||||
|
# Adding a small Klippain logo to the top left corner of the figure
|
||||||
|
ax_logo = fig.add_axes([0.001, 0.8995, 0.1, 0.1], anchor='NW')
|
||||||
|
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||||
|
ax_logo.axis('off')
|
||||||
|
|
||||||
|
# Adding Shake&Tune version in the top right corner
|
||||||
|
if st_version != 'unknown':
|
||||||
|
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||||
|
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Parse command-line arguments
|
||||||
|
usage = '%prog [options] <logs>'
|
||||||
|
opts = optparse.OptionParser(usage)
|
||||||
|
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||||
|
opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph')
|
||||||
|
opts.add_option('-s', '--max_smoothing', type='float', default=None, help='maximum shaper smoothing to allow')
|
||||||
|
opts.add_option(
|
||||||
|
'--scv', '--square_corner_velocity', type='float', dest='scv', default=5.0, help='square corner velocity'
|
||||||
|
)
|
||||||
|
opts.add_option(
|
||||||
|
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||||
|
)
|
||||||
|
options, args = opts.parse_args()
|
||||||
|
if len(args) < 1:
|
||||||
|
opts.error('Incorrect number of arguments')
|
||||||
|
if options.output is None:
|
||||||
|
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||||
|
if options.max_smoothing is not None and options.max_smoothing < 0.05:
|
||||||
|
opts.error('Too small max_smoothing specified (must be at least 0.05)')
|
||||||
|
|
||||||
|
fig = shaper_calibration(args, options.klipperdir, options.max_smoothing, options.scv, options.max_freq)
|
||||||
|
fig.savefig(options.output, dpi=150)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
840
src/graph_creators/graph_vibrations.py
Normal file
@@ -0,0 +1,840 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
##################################################
|
||||||
|
#### DIRECTIONAL VIBRATIONS PLOTTING SCRIPT ######
|
||||||
|
##################################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
import math
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
import matplotlib.font_manager
|
||||||
|
import matplotlib.gridspec
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import matplotlib.ticker
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
|
||||||
|
from ..helpers.common_func import (
|
||||||
|
compute_mechanical_parameters,
|
||||||
|
detect_peaks,
|
||||||
|
identify_low_energy_zones,
|
||||||
|
parse_log,
|
||||||
|
setup_klipper_import,
|
||||||
|
)
|
||||||
|
from ..helpers.locale_utils import print_with_c_locale, set_locale
|
||||||
|
|
||||||
|
PEAKS_DETECTION_THRESHOLD = 0.05
|
||||||
|
PEAKS_RELATIVE_HEIGHT_THRESHOLD = 0.04
|
||||||
|
CURVE_SIMILARITY_SIGMOID_K = 0.5
|
||||||
|
SPEEDS_VALLEY_DETECTION_THRESHOLD = 0.7 # Lower is more sensitive
|
||||||
|
SPEEDS_AROUND_PEAK_DELETION = 3 # to delete +-3mm/s around a peak
|
||||||
|
ANGLES_VALLEY_DETECTION_THRESHOLD = 1.1 # Lower is more sensitive
|
||||||
|
|
||||||
|
KLIPPAIN_COLORS = {
|
||||||
|
'purple': '#70088C',
|
||||||
|
'orange': '#FF8D32',
|
||||||
|
'dark_purple': '#150140',
|
||||||
|
'dark_orange': '#F24130',
|
||||||
|
'red_pink': '#F2055C',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Computation
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
# Call to the official Klipper input shaper object to do the PSD computation
|
||||||
|
def calc_freq_response(data):
|
||||||
|
helper = shaper_calibrate.ShaperCalibrate(printer=None)
|
||||||
|
return helper.process_accelerometer_data(data)
|
||||||
|
|
||||||
|
|
||||||
|
# Calculate motor frequency profiles based on the measured Power Spectral Density (PSD) measurements for the machine kinematics
|
||||||
|
# main angles and then create a global motor profile as a weighted average (from their own vibrations) of all calculated profiles
|
||||||
|
def compute_motor_profiles(freqs, psds, all_angles_energy, measured_angles=None, energy_amplification_factor=2):
|
||||||
|
if measured_angles is None:
|
||||||
|
measured_angles = [0, 90]
|
||||||
|
|
||||||
|
motor_profiles = {}
|
||||||
|
weighted_sum_profiles = np.zeros_like(freqs)
|
||||||
|
total_weight = 0
|
||||||
|
conv_filter = np.ones(20) / 20
|
||||||
|
|
||||||
|
# Creating the PSD motor profiles for each angles
|
||||||
|
for angle in measured_angles:
|
||||||
|
# Calculate the sum of PSDs for the current angle and then convolve
|
||||||
|
sum_curve = np.sum(np.array([psds[angle][speed] for speed in psds[angle]]), axis=0)
|
||||||
|
motor_profiles[angle] = np.convolve(sum_curve / len(psds[angle]), conv_filter, mode='same')
|
||||||
|
|
||||||
|
# Calculate weights
|
||||||
|
angle_energy = (
|
||||||
|
all_angles_energy[angle] ** energy_amplification_factor
|
||||||
|
) # First weighting factor is based on the total vibrations of the machine at the specified angle
|
||||||
|
curve_area = (
|
||||||
|
np.trapz(motor_profiles[angle], freqs) ** energy_amplification_factor
|
||||||
|
) # Additional weighting factor is based on the area under the current motor profile at this specified angle
|
||||||
|
total_angle_weight = angle_energy * curve_area
|
||||||
|
|
||||||
|
# Update weighted sum profiles to get the global motor profile
|
||||||
|
weighted_sum_profiles += motor_profiles[angle] * total_angle_weight
|
||||||
|
total_weight += total_angle_weight
|
||||||
|
|
||||||
|
# Creating a global average motor profile that is the weighted average of all the PSD motor profiles
|
||||||
|
global_motor_profile = weighted_sum_profiles / total_weight if total_weight != 0 else weighted_sum_profiles
|
||||||
|
|
||||||
|
return motor_profiles, global_motor_profile
|
||||||
|
|
||||||
|
|
||||||
|
# Since it was discovered that there is no non-linear mixing in the stepper "steps" vibrations, instead of measuring
|
||||||
|
# the effects of each speeds at each angles, this function simplify it by using only the main motors axes (X/Y for Cartesian
|
||||||
|
# printers and A/B for CoreXY) measurements and project each points on the [0,360] degrees range using trigonometry
|
||||||
|
# to "sum" the vibration impact of each axis at every points of the generated spectrogram. The result is very similar at the end.
|
||||||
|
def compute_dir_speed_spectrogram(measured_speeds, data, kinematics='cartesian', measured_angles=None):
|
||||||
|
if measured_angles is None:
|
||||||
|
measured_angles = [0, 90]
|
||||||
|
|
||||||
|
# We want to project the motor vibrations measured on their own axes on the [0, 360] range
|
||||||
|
spectrum_angles = np.linspace(0, 360, 720) # One point every 0.5 degrees
|
||||||
|
spectrum_speeds = np.linspace(min(measured_speeds), max(measured_speeds), len(measured_speeds) * 6)
|
||||||
|
spectrum_vibrations = np.zeros((len(spectrum_angles), len(spectrum_speeds)))
|
||||||
|
|
||||||
|
def get_interpolated_vibrations(data, speed, speeds):
|
||||||
|
idx = np.clip(np.searchsorted(speeds, speed, side='left'), 1, len(speeds) - 1)
|
||||||
|
lower_speed = speeds[idx - 1]
|
||||||
|
upper_speed = speeds[idx]
|
||||||
|
lower_vibrations = data.get(lower_speed, 0)
|
||||||
|
upper_vibrations = data.get(upper_speed, 0)
|
||||||
|
return lower_vibrations + (speed - lower_speed) * (upper_vibrations - lower_vibrations) / (
|
||||||
|
upper_speed - lower_speed
|
||||||
|
)
|
||||||
|
|
||||||
|
# Precompute trigonometric values and constant before the loop
|
||||||
|
angle_radians = np.deg2rad(spectrum_angles)
|
||||||
|
cos_vals = np.cos(angle_radians)
|
||||||
|
sin_vals = np.sin(angle_radians)
|
||||||
|
sqrt_2_inv = 1 / math.sqrt(2)
|
||||||
|
|
||||||
|
# Compute the spectrum vibrations for each angle and speed combination
|
||||||
|
for target_angle_idx, (cos_val, sin_val) in enumerate(zip(cos_vals, sin_vals)):
|
||||||
|
for target_speed_idx, target_speed in enumerate(spectrum_speeds):
|
||||||
|
if kinematics == 'cartesian':
|
||||||
|
speed_1 = np.abs(target_speed * cos_val)
|
||||||
|
speed_2 = np.abs(target_speed * sin_val)
|
||||||
|
elif kinematics == 'corexy':
|
||||||
|
speed_1 = np.abs(target_speed * (cos_val + sin_val) * sqrt_2_inv)
|
||||||
|
speed_2 = np.abs(target_speed * (cos_val - sin_val) * sqrt_2_inv)
|
||||||
|
|
||||||
|
vibrations_1 = get_interpolated_vibrations(data[measured_angles[0]], speed_1, measured_speeds)
|
||||||
|
vibrations_2 = get_interpolated_vibrations(data[measured_angles[1]], speed_2, measured_speeds)
|
||||||
|
spectrum_vibrations[target_angle_idx, target_speed_idx] = vibrations_1 + vibrations_2
|
||||||
|
|
||||||
|
return spectrum_angles, spectrum_speeds, spectrum_vibrations
|
||||||
|
|
||||||
|
|
||||||
|
def compute_angle_powers(spectrogram_data):
|
||||||
|
angles_powers = np.trapz(spectrogram_data, axis=1)
|
||||||
|
|
||||||
|
# Since we want to plot it on a continuous polar plot later on, we need to append parts of
|
||||||
|
# the array to start and end of it to smooth transitions when doing the convolution
|
||||||
|
# and get the same value at modulo 360. Then we return the array without the extras
|
||||||
|
extended_angles_powers = np.concatenate([angles_powers[-9:], angles_powers, angles_powers[:9]])
|
||||||
|
convolved_extended = np.convolve(extended_angles_powers, np.ones(15) / 15, mode='same')
|
||||||
|
|
||||||
|
return convolved_extended[9:-9]
|
||||||
|
|
||||||
|
|
||||||
|
def compute_speed_powers(spectrogram_data, smoothing_window=15):
|
||||||
|
min_values = np.amin(spectrogram_data, axis=0)
|
||||||
|
max_values = np.amax(spectrogram_data, axis=0)
|
||||||
|
var_values = np.var(spectrogram_data, axis=0)
|
||||||
|
|
||||||
|
# rescale the variance to the same range as max_values to plot it on the same graph
|
||||||
|
var_values = var_values / var_values.max() * max_values.max()
|
||||||
|
|
||||||
|
# Create a vibration metric that is the product of the max values and the variance to quantify the best
|
||||||
|
# speeds that have at the same time a low global energy level that is also consistent at every angles
|
||||||
|
vibration_metric = max_values * var_values
|
||||||
|
|
||||||
|
# utility function to pad and smooth the data avoiding edge effects
|
||||||
|
conv_filter = np.ones(smoothing_window) / smoothing_window
|
||||||
|
window = int(smoothing_window / 2)
|
||||||
|
|
||||||
|
def pad_and_smooth(data):
|
||||||
|
data_padded = np.pad(data, (window,), mode='edge')
|
||||||
|
smoothed_data = np.convolve(data_padded, conv_filter, mode='valid')
|
||||||
|
return smoothed_data
|
||||||
|
|
||||||
|
# Stack the arrays and apply padding and smoothing in batch
|
||||||
|
data_arrays = np.stack([min_values, max_values, var_values, vibration_metric])
|
||||||
|
smoothed_arrays = np.array([pad_and_smooth(data) for data in data_arrays])
|
||||||
|
|
||||||
|
return smoothed_arrays
|
||||||
|
|
||||||
|
|
||||||
|
# Function that filter and split the good_speed ranges. The goal is to remove some zones around
|
||||||
|
# additional detected small peaks in order to suppress them if there is a peak, even if it's low,
|
||||||
|
# that's probably due to a crossing in the motor resonance pattern that still need to be removed
|
||||||
|
def filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range):
|
||||||
|
# Process each range to filter out and split based on peak indices
|
||||||
|
filtered_good_speeds = []
|
||||||
|
for start, end, energy in good_speeds:
|
||||||
|
start_speed, end_speed = all_speeds[start], all_speeds[end]
|
||||||
|
# Identify peaks that intersect with the current speed range
|
||||||
|
intersecting_peaks_indices = [
|
||||||
|
idx for speed, idx in peak_speed_indices.items() if start_speed <= speed <= end_speed
|
||||||
|
]
|
||||||
|
|
||||||
|
if not intersecting_peaks_indices:
|
||||||
|
filtered_good_speeds.append((start, end, energy))
|
||||||
|
else:
|
||||||
|
intersecting_peaks_indices.sort()
|
||||||
|
current_start = start
|
||||||
|
|
||||||
|
for peak_index in intersecting_peaks_indices:
|
||||||
|
before_peak_end = max(current_start, peak_index - deletion_range)
|
||||||
|
if current_start < before_peak_end:
|
||||||
|
filtered_good_speeds.append((current_start, before_peak_end, energy))
|
||||||
|
current_start = peak_index + deletion_range + 1
|
||||||
|
|
||||||
|
if current_start < end:
|
||||||
|
filtered_good_speeds.append((current_start, end, energy))
|
||||||
|
|
||||||
|
# Sorting by start point once and then merge overlapping ranges
|
||||||
|
sorted_ranges = sorted(filtered_good_speeds, key=lambda x: x[0])
|
||||||
|
merged_ranges = [sorted_ranges[0]]
|
||||||
|
|
||||||
|
for current in sorted_ranges[1:]:
|
||||||
|
last_merged_start, last_merged_end, last_merged_energy = merged_ranges[-1]
|
||||||
|
if current[0] <= last_merged_end:
|
||||||
|
new_end = max(last_merged_end, current[1])
|
||||||
|
new_energy = min(last_merged_energy, current[2])
|
||||||
|
merged_ranges[-1] = (last_merged_start, new_end, new_energy)
|
||||||
|
else:
|
||||||
|
merged_ranges.append(current)
|
||||||
|
|
||||||
|
return merged_ranges
|
||||||
|
|
||||||
|
|
||||||
|
# This function allow the computation of a symmetry score that reflect the spectrogram apparent symmetry between
|
||||||
|
# measured axes on both the shape of the signal and the energy level consistency across both side of the signal
|
||||||
|
def compute_symmetry_analysis(all_angles, spectrogram_data, measured_angles=None):
|
||||||
|
if measured_angles is None:
|
||||||
|
measured_angles = [0, 90]
|
||||||
|
|
||||||
|
total_spectrogram_angles = len(all_angles)
|
||||||
|
half_spectrogram_angles = total_spectrogram_angles // 2
|
||||||
|
|
||||||
|
# Extend the spectrogram by adding half to the beginning (in order to not get an out of bounds error later)
|
||||||
|
extended_spectrogram = np.concatenate((spectrogram_data[-half_spectrogram_angles:], spectrogram_data), axis=0)
|
||||||
|
|
||||||
|
# Calculate the split index directly within the slicing
|
||||||
|
midpoint_angle = np.mean(measured_angles)
|
||||||
|
split_index = int(midpoint_angle * (total_spectrogram_angles / 360) + half_spectrogram_angles)
|
||||||
|
half_segment_length = half_spectrogram_angles // 2
|
||||||
|
|
||||||
|
# Slice out the two segments of the spectrogram and flatten them for comparison
|
||||||
|
segment_1_flattened = extended_spectrogram[split_index - half_segment_length : split_index].flatten()
|
||||||
|
segment_2_flattened = extended_spectrogram[split_index : split_index + half_segment_length].flatten()
|
||||||
|
|
||||||
|
# Compute the correlation coefficient between the two segments of spectrogram
|
||||||
|
correlation = np.corrcoef(segment_1_flattened, segment_2_flattened)[0, 1]
|
||||||
|
percentage_correlation_biased = (100 * np.power(correlation, 0.75)) + 10
|
||||||
|
|
||||||
|
return np.clip(0, 100, percentage_correlation_biased)
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Graphing
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def plot_angle_profile_polar(ax, angles, angles_powers, low_energy_zones, symmetry_factor):
|
||||||
|
angles_radians = np.deg2rad(angles)
|
||||||
|
|
||||||
|
ax.set_title('Polar angle energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
ax.set_theta_zero_location('E')
|
||||||
|
ax.set_theta_direction(1)
|
||||||
|
|
||||||
|
ax.plot(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||||
|
ax.fill(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], alpha=0.3)
|
||||||
|
ax.set_xlim([0, np.deg2rad(360)])
|
||||||
|
ymax = angles_powers.max() * 1.05
|
||||||
|
ax.set_ylim([0, ymax])
|
||||||
|
ax.set_thetagrids([theta * 15 for theta in range(360 // 15)])
|
||||||
|
|
||||||
|
ax.text(
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
f'Symmetry: {symmetry_factor:.1f}%',
|
||||||
|
ha='center',
|
||||||
|
va='center',
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
fontsize=12,
|
||||||
|
fontweight='bold',
|
||||||
|
zorder=6,
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, (start, end, _) in enumerate(low_energy_zones):
|
||||||
|
ax.axvline(
|
||||||
|
angles_radians[start],
|
||||||
|
angles_powers[start] / ymax,
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
linestyle='dotted',
|
||||||
|
linewidth=1.5,
|
||||||
|
)
|
||||||
|
ax.axvline(
|
||||||
|
angles_radians[end],
|
||||||
|
angles_powers[end] / ymax,
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
linestyle='dotted',
|
||||||
|
linewidth=1.5,
|
||||||
|
)
|
||||||
|
ax.fill_between(
|
||||||
|
angles_radians[start:end], angles_powers[start:end], angles_powers.max() * 1.05, color='green', alpha=0.2
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
|
||||||
|
# Polar plot doesn't follow the gridspec margin, so we adjust it manually here
|
||||||
|
pos = ax.get_position()
|
||||||
|
new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height]
|
||||||
|
ax.set_position(new_pos)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_global_speed_profile(
|
||||||
|
ax,
|
||||||
|
all_speeds,
|
||||||
|
sp_min_energy,
|
||||||
|
sp_max_energy,
|
||||||
|
sp_variance_energy,
|
||||||
|
vibration_metric,
|
||||||
|
num_peaks,
|
||||||
|
peaks,
|
||||||
|
low_energy_zones,
|
||||||
|
):
|
||||||
|
ax.set_title('Global speed energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
ax.set_xlabel('Speed (mm/s)')
|
||||||
|
ax.set_ylabel('Energy')
|
||||||
|
ax2 = ax.twinx()
|
||||||
|
ax2.yaxis.set_visible(False)
|
||||||
|
|
||||||
|
ax.plot(all_speeds, sp_min_energy, label='Minimum', color=KLIPPAIN_COLORS['dark_purple'], zorder=5)
|
||||||
|
ax.plot(all_speeds, sp_max_energy, label='Maximum', color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||||
|
ax.plot(all_speeds, sp_variance_energy, label='Variance', color=KLIPPAIN_COLORS['orange'], zorder=5, linestyle='--')
|
||||||
|
ax2.plot(
|
||||||
|
all_speeds,
|
||||||
|
vibration_metric,
|
||||||
|
label=f'Vibration metric ({num_peaks} bad peaks)',
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
zorder=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.set_xlim([all_speeds.min(), all_speeds.max()])
|
||||||
|
ax.set_ylim([0, sp_max_energy.max() * 1.15])
|
||||||
|
|
||||||
|
y2min = -(vibration_metric.max() * 0.025)
|
||||||
|
y2max = vibration_metric.max() * 1.07
|
||||||
|
ax2.set_ylim([y2min, y2max])
|
||||||
|
|
||||||
|
if peaks is not None and len(peaks) > 0:
|
||||||
|
ax2.plot(all_speeds[peaks], vibration_metric[peaks], 'x', color='black', markersize=8, zorder=10)
|
||||||
|
for idx, peak in enumerate(peaks):
|
||||||
|
ax2.annotate(
|
||||||
|
f'{idx+1}',
|
||||||
|
(all_speeds[peak], vibration_metric[peak]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(5, 5),
|
||||||
|
fontweight='bold',
|
||||||
|
ha='left',
|
||||||
|
fontsize=13,
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
zorder=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
for idx, (start, end, _) in enumerate(low_energy_zones):
|
||||||
|
# ax2.axvline(all_speeds[start], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8)
|
||||||
|
# ax2.axvline(all_speeds[end], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8)
|
||||||
|
ax2.fill_between(
|
||||||
|
all_speeds[start:end],
|
||||||
|
y2min,
|
||||||
|
vibration_metric[start:end],
|
||||||
|
color='green',
|
||||||
|
alpha=0.2,
|
||||||
|
label=f'Zone {idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('small')
|
||||||
|
ax.legend(loc='upper left', prop=fontP)
|
||||||
|
ax2.legend(loc='upper right', prop=fontP)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_angular_speed_profiles(ax, speeds, angles, spectrogram_data, kinematics='cartesian'):
|
||||||
|
ax.set_title('Angular speed energy profiles', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
ax.set_xlabel('Speed (mm/s)')
|
||||||
|
ax.set_ylabel('Energy')
|
||||||
|
|
||||||
|
# Define mappings for labels and colors to simplify plotting commands
|
||||||
|
angle_settings = {
|
||||||
|
0: ('X (0 deg)', 'purple', 10),
|
||||||
|
90: ('Y (90 deg)', 'dark_purple', 5),
|
||||||
|
45: ('A (45 deg)' if kinematics == 'corexy' else '45 deg', 'orange', 10),
|
||||||
|
135: ('B (135 deg)' if kinematics == 'corexy' else '135 deg', 'dark_orange', 5),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Plot each angle using settings from the dictionary
|
||||||
|
for angle, (label, color, zorder) in angle_settings.items():
|
||||||
|
idx = np.searchsorted(angles, angle, side='left')
|
||||||
|
ax.plot(speeds, spectrogram_data[idx], label=label, color=KLIPPAIN_COLORS[color], zorder=zorder)
|
||||||
|
|
||||||
|
ax.set_xlim([speeds.min(), speeds.max()])
|
||||||
|
max_value = max(spectrogram_data[angle].max() for angle in [0, 45, 90, 135])
|
||||||
|
ax.set_ylim([0, max_value * 1.1])
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('small')
|
||||||
|
ax.legend(loc='upper right', prop=fontP)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_motor_profiles(ax, freqs, main_angles, motor_profiles, global_motor_profile, max_freq):
|
||||||
|
ax.set_title('Motor frequency profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
ax.set_ylabel('Energy')
|
||||||
|
ax.set_xlabel('Frequency (Hz)')
|
||||||
|
|
||||||
|
ax2 = ax.twinx()
|
||||||
|
ax2.yaxis.set_visible(False)
|
||||||
|
|
||||||
|
# Global weighted average motor profile
|
||||||
|
ax.plot(freqs, global_motor_profile, label='Combined', color=KLIPPAIN_COLORS['purple'], zorder=5)
|
||||||
|
max_value = global_motor_profile.max()
|
||||||
|
|
||||||
|
# Mapping of angles to axis names
|
||||||
|
angle_settings = {0: 'X', 90: 'Y', 45: 'A', 135: 'B'}
|
||||||
|
|
||||||
|
# And then plot the motor profiles at each measured angles
|
||||||
|
for angle in main_angles:
|
||||||
|
profile_max = motor_profiles[angle].max()
|
||||||
|
if profile_max > max_value:
|
||||||
|
max_value = profile_max
|
||||||
|
label = f'{angle_settings[angle]} ({angle} deg)' if angle in angle_settings else f'{angle} deg'
|
||||||
|
ax.plot(freqs, motor_profiles[angle], linestyle='--', label=label, zorder=2)
|
||||||
|
|
||||||
|
ax.set_xlim([0, max_freq])
|
||||||
|
ax.set_ylim([0, max_value * 1.1])
|
||||||
|
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))
|
||||||
|
|
||||||
|
# Then add the motor resonance peak to the graph and print some infos about it
|
||||||
|
motor_fr, motor_zeta, motor_res_idx, lowfreq_max = compute_mechanical_parameters(global_motor_profile, freqs, 30)
|
||||||
|
if lowfreq_max:
|
||||||
|
print_with_c_locale(
|
||||||
|
'[WARNING] There are a lot of low frequency vibrations that can alter the readings. This is probably due to the test being performed at too high an acceleration!'
|
||||||
|
)
|
||||||
|
print_with_c_locale(
|
||||||
|
'Try lowering the ACCEL value and/or increasing the SIZE value before restarting the macro to ensure that only constant speeds are being recorded and that the dynamic behavior of the machine is not affecting the measurements'
|
||||||
|
)
|
||||||
|
if motor_zeta is not None:
|
||||||
|
print_with_c_locale(
|
||||||
|
'Motors have a main resonant frequency at %.1fHz with an estimated damping ratio of %.3f'
|
||||||
|
% (motor_fr, motor_zeta)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print_with_c_locale(
|
||||||
|
'Motors have a main resonant frequency at %.1fHz but it was impossible to estimate a damping ratio.'
|
||||||
|
% (motor_fr)
|
||||||
|
)
|
||||||
|
|
||||||
|
ax.plot(freqs[motor_res_idx], global_motor_profile[motor_res_idx], 'x', color='black', markersize=10)
|
||||||
|
ax.annotate(
|
||||||
|
'R',
|
||||||
|
(freqs[motor_res_idx], global_motor_profile[motor_res_idx]),
|
||||||
|
textcoords='offset points',
|
||||||
|
xytext=(15, 5),
|
||||||
|
ha='right',
|
||||||
|
fontsize=14,
|
||||||
|
color=KLIPPAIN_COLORS['red_pink'],
|
||||||
|
weight='bold',
|
||||||
|
)
|
||||||
|
|
||||||
|
ax2.plot([], [], ' ', label='Motor resonant frequency (ω0): %.1fHz' % (motor_fr))
|
||||||
|
if motor_zeta is not None:
|
||||||
|
ax2.plot([], [], ' ', label='Motor damping ratio (ζ): %.3f' % (motor_zeta))
|
||||||
|
else:
|
||||||
|
ax2.plot([], [], ' ', label='No damping ratio computed')
|
||||||
|
|
||||||
|
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
|
||||||
|
ax.grid(which='major', color='grey')
|
||||||
|
ax.grid(which='minor', color='lightgrey')
|
||||||
|
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('small')
|
||||||
|
ax.legend(loc='upper left', prop=fontP)
|
||||||
|
ax2.legend(loc='upper right', prop=fontP)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_vibration_spectrogram_polar(ax, angles, speeds, spectrogram_data):
|
||||||
|
angles_radians = np.radians(angles)
|
||||||
|
|
||||||
|
# Assuming speeds defines the radial distance from the center, we need to create a meshgrid
|
||||||
|
# for both angles and speeds to map the spectrogram data onto a polar plot correctly
|
||||||
|
radius, theta = np.meshgrid(speeds, angles_radians)
|
||||||
|
|
||||||
|
ax.set_title(
|
||||||
|
'Polar vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold', va='bottom'
|
||||||
|
)
|
||||||
|
ax.set_theta_zero_location('E')
|
||||||
|
ax.set_theta_direction(1)
|
||||||
|
|
||||||
|
ax.pcolormesh(theta, radius, spectrogram_data, norm=matplotlib.colors.LogNorm(), cmap='inferno', shading='auto')
|
||||||
|
ax.set_thetagrids([theta * 15 for theta in range(360 // 15)])
|
||||||
|
ax.tick_params(axis='y', which='both', colors='white', labelsize='medium')
|
||||||
|
ax.set_ylim([0, max(speeds)])
|
||||||
|
|
||||||
|
# Polar plot doesn't follow the gridspec margin, so we adjust it manually here
|
||||||
|
pos = ax.get_position()
|
||||||
|
new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height]
|
||||||
|
ax.set_position(new_pos)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_vibration_spectrogram(ax, angles, speeds, spectrogram_data, peaks):
|
||||||
|
ax.set_title('Vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
|
||||||
|
ax.set_xlabel('Speed (mm/s)')
|
||||||
|
ax.set_ylabel('Angle (deg)')
|
||||||
|
|
||||||
|
ax.imshow(
|
||||||
|
spectrogram_data,
|
||||||
|
norm=matplotlib.colors.LogNorm(),
|
||||||
|
cmap='inferno',
|
||||||
|
aspect='auto',
|
||||||
|
extent=[speeds[0], speeds[-1], angles[0], angles[-1]],
|
||||||
|
origin='lower',
|
||||||
|
interpolation='antialiased',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add peaks lines in the spectrogram to get hint from peaks found in the first graph
|
||||||
|
if peaks is not None and len(peaks) > 0:
|
||||||
|
for idx, peak in enumerate(peaks):
|
||||||
|
ax.axvline(speeds[peak], color='cyan', linewidth=0.75)
|
||||||
|
ax.annotate(
|
||||||
|
f'Peak {idx+1}',
|
||||||
|
(speeds[peak], angles[-1] * 0.9),
|
||||||
|
textcoords='data',
|
||||||
|
color='cyan',
|
||||||
|
rotation=90,
|
||||||
|
fontsize=10,
|
||||||
|
verticalalignment='top',
|
||||||
|
horizontalalignment='right',
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def plot_motor_config_txt(fig, motors, differences):
|
||||||
|
motor_details = [(motors[0], 'X motor'), (motors[1], 'Y motor')]
|
||||||
|
|
||||||
|
distance = 0.12
|
||||||
|
if motors[0].get_property('autotune_enabled'):
|
||||||
|
distance = 0.24
|
||||||
|
config_blocks = [
|
||||||
|
f"| {lbl}: {mot.get_property('motor').upper()} on {mot.get_property('tmc').upper()} @ {mot.get_property('voltage')}V {mot.get_property('run_current')}A"
|
||||||
|
for mot, lbl in motor_details
|
||||||
|
]
|
||||||
|
config_blocks.append('| TMC Autotune enabled')
|
||||||
|
else:
|
||||||
|
config_blocks = [
|
||||||
|
f"| {lbl}: {mot.get_property('tmc').upper()} @ {mot.get_property('run_current')}A"
|
||||||
|
for mot, lbl in motor_details
|
||||||
|
]
|
||||||
|
config_blocks.append('| TMC Autotune not detected')
|
||||||
|
|
||||||
|
for idx, block in enumerate(config_blocks):
|
||||||
|
fig.text(
|
||||||
|
0.40, 0.990 - 0.015 * idx, block, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple']
|
||||||
|
)
|
||||||
|
|
||||||
|
tmc_registers = motors[0].get_registers()
|
||||||
|
idx = -1
|
||||||
|
for idx, (register, settings) in enumerate(tmc_registers.items()):
|
||||||
|
settings_str = ' '.join(f'{k}={v}' for k, v in settings.items())
|
||||||
|
tmc_block = f'| {register.upper()}: {settings_str}'
|
||||||
|
fig.text(
|
||||||
|
0.40 + distance,
|
||||||
|
0.990 - 0.015 * idx,
|
||||||
|
tmc_block,
|
||||||
|
ha='left',
|
||||||
|
va='top',
|
||||||
|
fontsize=10,
|
||||||
|
color=KLIPPAIN_COLORS['dark_purple'],
|
||||||
|
)
|
||||||
|
|
||||||
|
if differences is not None:
|
||||||
|
differences_text = f'| Y motor diff: {differences}'
|
||||||
|
fig.text(
|
||||||
|
0.40 + distance,
|
||||||
|
0.990 - 0.015 * (idx + 1),
|
||||||
|
differences_text,
|
||||||
|
ha='left',
|
||||||
|
va='top',
|
||||||
|
fontsize=10,
|
||||||
|
color=KLIPPAIN_COLORS['dark_purple'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Startup and main routines
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def extract_angle_and_speed(logname):
|
||||||
|
try:
|
||||||
|
match = re.search(r'an(\d+)_\d+sp(\d+)_\d+', os.path.basename(logname))
|
||||||
|
if match:
|
||||||
|
angle = match.group(1)
|
||||||
|
speed = match.group(2)
|
||||||
|
else:
|
||||||
|
raise ValueError(f'File {logname} does not match expected format. Clean your /tmp folder and start again!')
|
||||||
|
except AttributeError as err:
|
||||||
|
raise ValueError(
|
||||||
|
f'File {logname} does not match expected format. Clean your /tmp folder and start again!'
|
||||||
|
) from err
|
||||||
|
return float(angle), float(speed)
|
||||||
|
|
||||||
|
|
||||||
|
def vibrations_profile(
|
||||||
|
lognames, klipperdir='~/klipper', kinematics='cartesian', accel=None, max_freq=1000.0, st_version=None, motors=None
|
||||||
|
):
|
||||||
|
set_locale()
|
||||||
|
global shaper_calibrate
|
||||||
|
shaper_calibrate = setup_klipper_import(klipperdir)
|
||||||
|
|
||||||
|
if kinematics == 'cartesian':
|
||||||
|
main_angles = [0, 90]
|
||||||
|
elif kinematics == 'corexy':
|
||||||
|
main_angles = [45, 135]
|
||||||
|
else:
|
||||||
|
raise ValueError('Only Cartesian and CoreXY kinematics are supported by this tool at the moment!')
|
||||||
|
|
||||||
|
psds = defaultdict(lambda: defaultdict(list))
|
||||||
|
psds_sum = defaultdict(lambda: defaultdict(list))
|
||||||
|
target_freqs_initialized = False
|
||||||
|
|
||||||
|
for logname in lognames:
|
||||||
|
data = parse_log(logname)
|
||||||
|
angle, speed = extract_angle_and_speed(logname)
|
||||||
|
freq_response = calc_freq_response(data)
|
||||||
|
first_freqs = freq_response.freq_bins
|
||||||
|
psd_sum = freq_response.psd_sum
|
||||||
|
|
||||||
|
if not target_freqs_initialized:
|
||||||
|
target_freqs = first_freqs[first_freqs <= max_freq]
|
||||||
|
target_freqs_initialized = True
|
||||||
|
|
||||||
|
psd_sum = psd_sum[first_freqs <= max_freq]
|
||||||
|
first_freqs = first_freqs[first_freqs <= max_freq]
|
||||||
|
|
||||||
|
# Store the interpolated PSD and integral values
|
||||||
|
psds[angle][speed] = np.interp(target_freqs, first_freqs, psd_sum)
|
||||||
|
psds_sum[angle][speed] = np.trapz(psd_sum, first_freqs)
|
||||||
|
|
||||||
|
measured_angles = sorted(psds_sum.keys())
|
||||||
|
measured_speeds = sorted({speed for angle_speeds in psds_sum.values() for speed in angle_speeds.keys()})
|
||||||
|
|
||||||
|
for main_angle in main_angles:
|
||||||
|
if main_angle not in measured_angles:
|
||||||
|
raise ValueError('Measurements not taken at the correct angles for the specified kinematics!')
|
||||||
|
|
||||||
|
# Precompute the variables used in plot functions
|
||||||
|
all_angles, all_speeds, spectrogram_data = compute_dir_speed_spectrogram(
|
||||||
|
measured_speeds, psds_sum, kinematics, main_angles
|
||||||
|
)
|
||||||
|
all_angles_energy = compute_angle_powers(spectrogram_data)
|
||||||
|
sp_min_energy, sp_max_energy, sp_variance_energy, vibration_metric = compute_speed_powers(spectrogram_data)
|
||||||
|
motor_profiles, global_motor_profile = compute_motor_profiles(target_freqs, psds, all_angles_energy, main_angles)
|
||||||
|
|
||||||
|
# symmetry_factor = compute_symmetry_analysis(all_angles, all_angles_energy)
|
||||||
|
symmetry_factor = compute_symmetry_analysis(all_angles, spectrogram_data, main_angles)
|
||||||
|
print_with_c_locale(f'Machine estimated vibration symmetry: {symmetry_factor:.1f}%')
|
||||||
|
|
||||||
|
# Analyze low variance ranges of vibration energy across all angles for each speed to identify clean speeds
|
||||||
|
# and highlight them. Also find the peaks to identify speeds to avoid due to high resonances
|
||||||
|
num_peaks, vibration_peaks, peaks_speeds = detect_peaks(
|
||||||
|
vibration_metric,
|
||||||
|
all_speeds,
|
||||||
|
PEAKS_DETECTION_THRESHOLD * vibration_metric.max(),
|
||||||
|
PEAKS_RELATIVE_HEIGHT_THRESHOLD,
|
||||||
|
10,
|
||||||
|
10,
|
||||||
|
)
|
||||||
|
formated_peaks_speeds = ['{:.1f}'.format(pspeed) for pspeed in peaks_speeds]
|
||||||
|
print_with_c_locale(
|
||||||
|
'Vibrations peaks detected: %d @ %s mm/s (avoid setting a speed near these values in your slicer print profile)'
|
||||||
|
% (num_peaks, ', '.join(map(str, formated_peaks_speeds)))
|
||||||
|
)
|
||||||
|
|
||||||
|
good_speeds = identify_low_energy_zones(vibration_metric, SPEEDS_VALLEY_DETECTION_THRESHOLD)
|
||||||
|
if good_speeds is not None:
|
||||||
|
deletion_range = int(SPEEDS_AROUND_PEAK_DELETION / (all_speeds[1] - all_speeds[0]))
|
||||||
|
peak_speed_indices = {pspeed: np.where(all_speeds == pspeed)[0][0] for pspeed in set(peaks_speeds)}
|
||||||
|
|
||||||
|
# Filter and split ranges based on peak indices, avoiding overlaps
|
||||||
|
good_speeds = filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range)
|
||||||
|
|
||||||
|
# Add some logging about the good speeds found
|
||||||
|
print_with_c_locale(f'Lowest vibrations speeds ({len(good_speeds)} ranges sorted from best to worse):')
|
||||||
|
for idx, (start, end, _) in enumerate(good_speeds):
|
||||||
|
print_with_c_locale(f'{idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s')
|
||||||
|
|
||||||
|
# Angle low energy valleys identification (good angles ranges) and print them to the console
|
||||||
|
good_angles = identify_low_energy_zones(all_angles_energy, ANGLES_VALLEY_DETECTION_THRESHOLD)
|
||||||
|
if good_angles is not None:
|
||||||
|
print_with_c_locale(f'Lowest vibrations angles ({len(good_angles)} ranges sorted from best to worse):')
|
||||||
|
for idx, (start, end, energy) in enumerate(good_angles):
|
||||||
|
print_with_c_locale(
|
||||||
|
f'{idx+1}: {all_angles[start]:.1f}° to {all_angles[end]:.1f}° (mean vibrations energy: {energy:.2f}% of max)'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create graph layout
|
||||||
|
fig, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots(
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
gridspec_kw={
|
||||||
|
'height_ratios': [1, 1],
|
||||||
|
'width_ratios': [4, 8, 6],
|
||||||
|
'bottom': 0.050,
|
||||||
|
'top': 0.890,
|
||||||
|
'left': 0.040,
|
||||||
|
'right': 0.985,
|
||||||
|
'hspace': 0.166,
|
||||||
|
'wspace': 0.138,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Transform ax3 and ax4 to polar plots
|
||||||
|
ax1.remove()
|
||||||
|
ax1 = fig.add_subplot(2, 3, 1, projection='polar')
|
||||||
|
ax4.remove()
|
||||||
|
ax4 = fig.add_subplot(2, 3, 4, projection='polar')
|
||||||
|
|
||||||
|
# Set the global .png figure size
|
||||||
|
fig.set_size_inches(20, 11.5)
|
||||||
|
|
||||||
|
# Add title
|
||||||
|
title_line1 = 'MACHINE VIBRATIONS ANALYSIS TOOL'
|
||||||
|
fig.text(
|
||||||
|
0.060, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
filename_parts = (lognames[0].split('/')[-1]).split('_')
|
||||||
|
dt = datetime.strptime(f"{filename_parts[1]} {filename_parts[2].split('-')[0]}", '%Y%m%d %H%M%S')
|
||||||
|
title_line2 = dt.strftime('%x %X')
|
||||||
|
if accel is not None:
|
||||||
|
title_line2 += ' at ' + str(accel) + ' mm/s² -- ' + kinematics.upper() + ' kinematics'
|
||||||
|
except Exception:
|
||||||
|
print_with_c_locale('Warning: CSV filenames appear to be different than expected (%s)' % (lognames[0]))
|
||||||
|
title_line2 = lognames[0].split('/')[-1]
|
||||||
|
fig.text(0.060, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple'])
|
||||||
|
|
||||||
|
# Add the motors infos to the top of the graph
|
||||||
|
if motors is not None and len(motors) == 2:
|
||||||
|
differences = motors[0].compare_to(motors[1])
|
||||||
|
plot_motor_config_txt(fig, motors, differences)
|
||||||
|
if differences is not None and kinematics == 'corexy':
|
||||||
|
print_with_c_locale(f'Warning: motors have different TMC configurations!\n{differences}')
|
||||||
|
|
||||||
|
# Plot the graphs
|
||||||
|
plot_angle_profile_polar(ax1, all_angles, all_angles_energy, good_angles, symmetry_factor)
|
||||||
|
plot_vibration_spectrogram_polar(ax4, all_angles, all_speeds, spectrogram_data)
|
||||||
|
|
||||||
|
plot_global_speed_profile(
|
||||||
|
ax2,
|
||||||
|
all_speeds,
|
||||||
|
sp_min_energy,
|
||||||
|
sp_max_energy,
|
||||||
|
sp_variance_energy,
|
||||||
|
vibration_metric,
|
||||||
|
num_peaks,
|
||||||
|
vibration_peaks,
|
||||||
|
good_speeds,
|
||||||
|
)
|
||||||
|
plot_angular_speed_profiles(ax3, all_speeds, all_angles, spectrogram_data, kinematics)
|
||||||
|
plot_vibration_spectrogram(ax5, all_angles, all_speeds, spectrogram_data, vibration_peaks)
|
||||||
|
|
||||||
|
plot_motor_profiles(ax6, target_freqs, main_angles, motor_profiles, global_motor_profile, max_freq)
|
||||||
|
|
||||||
|
# Adding a small Klippain logo to the top left corner of the figure
|
||||||
|
ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW')
|
||||||
|
ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png')))
|
||||||
|
ax_logo.axis('off')
|
||||||
|
|
||||||
|
# Adding Shake&Tune version in the top right corner
|
||||||
|
if st_version != 'unknown':
|
||||||
|
fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple'])
|
||||||
|
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Parse command-line arguments
|
||||||
|
usage = '%prog [options] <raw logs>'
|
||||||
|
opts = optparse.OptionParser(usage)
|
||||||
|
opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph')
|
||||||
|
opts.add_option(
|
||||||
|
'-c', '--accel', type='int', dest='accel', default=None, help='accel value to be printed on the graph'
|
||||||
|
)
|
||||||
|
opts.add_option('-f', '--max_freq', type='float', default=1000.0, help='maximum frequency to graph')
|
||||||
|
opts.add_option(
|
||||||
|
'-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory'
|
||||||
|
)
|
||||||
|
opts.add_option(
|
||||||
|
'-m',
|
||||||
|
'--kinematics',
|
||||||
|
type='string',
|
||||||
|
dest='kinematics',
|
||||||
|
default='cartesian',
|
||||||
|
help='machine kinematics configuration',
|
||||||
|
)
|
||||||
|
options, args = opts.parse_args()
|
||||||
|
if len(args) < 1:
|
||||||
|
opts.error('No CSV file(s) to analyse')
|
||||||
|
if options.output is None:
|
||||||
|
opts.error('You must specify an output file.png to use the script (option -o)')
|
||||||
|
if options.kinematics not in ['cartesian', 'corexy']:
|
||||||
|
opts.error('Only cartesian and corexy kinematics are supported by this tool at the moment!')
|
||||||
|
|
||||||
|
fig = vibrations_profile(args, options.klipperdir, options.kinematics, options.accel, options.max_freq)
|
||||||
|
fig.savefig(options.output, dpi=150)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
Before Width: | Height: | Size: 607 KiB After Width: | Height: | Size: 607 KiB |
0
src/helpers/__init__.py
Normal file
228
src/helpers/common_func.py
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Common functions for the Shake&Tune package
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
import math
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from importlib import import_module
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from git import GitCommandError, Repo
|
||||||
|
from scipy.signal import spectrogram
|
||||||
|
|
||||||
|
|
||||||
|
def parse_log(logname):
|
||||||
|
with open(logname) as f:
|
||||||
|
for header in f:
|
||||||
|
if not header.startswith('#'):
|
||||||
|
break
|
||||||
|
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
|
||||||
|
# Raw accelerometer data
|
||||||
|
return np.loadtxt(logname, comments='#', delimiter=',')
|
||||||
|
# Power spectral density data or shaper calibration data
|
||||||
|
raise ValueError(
|
||||||
|
'File %s does not contain raw accelerometer data and therefore '
|
||||||
|
'is not supported by Shake&Tune. Please use the official Klipper '
|
||||||
|
'script to process it instead.' % (logname,)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_klipper_import(kdir):
|
||||||
|
kdir = os.path.expanduser(kdir)
|
||||||
|
sys.path.append(os.path.join(kdir, 'klippy'))
|
||||||
|
return import_module('.shaper_calibrate', 'extras')
|
||||||
|
|
||||||
|
|
||||||
|
# This is used to print the current S&T version on top of the png graph file
|
||||||
|
def get_git_version():
|
||||||
|
try:
|
||||||
|
# Get the absolute path of the script, resolving any symlinks
|
||||||
|
# Then get 2 times to parent dir to be at the git root folder
|
||||||
|
script_path = Path(__file__).resolve()
|
||||||
|
repo_path = script_path.parents[1]
|
||||||
|
repo = Repo(repo_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
version = repo.git.describe('--tags')
|
||||||
|
except GitCommandError:
|
||||||
|
# If no tag is found, use the simplified commit SHA instead
|
||||||
|
version = repo.head.commit.hexsha[:7]
|
||||||
|
return version
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# This is Klipper's spectrogram generation function adapted to use Scipy
|
||||||
|
def compute_spectrogram(data):
|
||||||
|
N = data.shape[0]
|
||||||
|
Fs = N / (data[-1, 0] - data[0, 0])
|
||||||
|
# Round up to a power of 2 for faster FFT
|
||||||
|
M = 1 << int(0.5 * Fs - 1).bit_length()
|
||||||
|
window = np.kaiser(M, 6.0)
|
||||||
|
|
||||||
|
def _specgram(x):
|
||||||
|
return spectrogram(
|
||||||
|
x, fs=Fs, window=window, nperseg=M, noverlap=M // 2, detrend='constant', scaling='density', mode='psd'
|
||||||
|
)
|
||||||
|
|
||||||
|
d = {'x': data[:, 1], 'y': data[:, 2], 'z': data[:, 3]}
|
||||||
|
f, t, pdata = _specgram(d['x'])
|
||||||
|
for axis in 'yz':
|
||||||
|
pdata += _specgram(d[axis])[2]
|
||||||
|
return pdata, t, f
|
||||||
|
|
||||||
|
|
||||||
|
# Compute natural resonant frequency and damping ratio by using the half power bandwidth method with interpolated frequencies
|
||||||
|
def compute_mechanical_parameters(psd, freqs, min_freq=None):
|
||||||
|
max_under_min_freq = False
|
||||||
|
|
||||||
|
if min_freq is not None:
|
||||||
|
min_freq_index = np.searchsorted(freqs, min_freq, side='left')
|
||||||
|
if min_freq_index >= len(freqs):
|
||||||
|
return None, None, None, max_under_min_freq
|
||||||
|
if np.argmax(psd) < min_freq_index:
|
||||||
|
max_under_min_freq = True
|
||||||
|
else:
|
||||||
|
min_freq_index = 0
|
||||||
|
|
||||||
|
# Consider only the part of the signal above min_freq
|
||||||
|
psd_above_min_freq = psd[min_freq_index:]
|
||||||
|
if len(psd_above_min_freq) == 0:
|
||||||
|
return None, None, None, max_under_min_freq
|
||||||
|
|
||||||
|
max_power_index_above_min_freq = np.argmax(psd_above_min_freq)
|
||||||
|
max_power_index = max_power_index_above_min_freq + min_freq_index
|
||||||
|
fr = freqs[max_power_index]
|
||||||
|
max_power = psd[max_power_index]
|
||||||
|
|
||||||
|
half_power = max_power / math.sqrt(2)
|
||||||
|
indices_below = np.where(psd[:max_power_index] <= half_power)[0]
|
||||||
|
indices_above = np.where(psd[max_power_index:] <= half_power)[0]
|
||||||
|
|
||||||
|
# If we are not able to find points around the half power, we can't compute the damping ratio and return None instead
|
||||||
|
if len(indices_below) == 0 or len(indices_above) == 0:
|
||||||
|
return fr, None, max_power_index, max_under_min_freq
|
||||||
|
|
||||||
|
idx_below = indices_below[-1]
|
||||||
|
idx_above = indices_above[0] + max_power_index
|
||||||
|
freq_below_half_power = freqs[idx_below] + (half_power - psd[idx_below]) * (
|
||||||
|
freqs[idx_below + 1] - freqs[idx_below]
|
||||||
|
) / (psd[idx_below + 1] - psd[idx_below])
|
||||||
|
freq_above_half_power = freqs[idx_above - 1] + (half_power - psd[idx_above - 1]) * (
|
||||||
|
freqs[idx_above] - freqs[idx_above - 1]
|
||||||
|
) / (psd[idx_above] - psd[idx_above - 1])
|
||||||
|
|
||||||
|
bandwidth = freq_above_half_power - freq_below_half_power
|
||||||
|
bw1 = math.pow(bandwidth / fr, 2)
|
||||||
|
bw2 = math.pow(bandwidth / fr, 4)
|
||||||
|
|
||||||
|
try:
|
||||||
|
zeta = math.sqrt(0.5 - math.sqrt(1 / (4 + 4 * bw1 - bw2)))
|
||||||
|
except ValueError:
|
||||||
|
# If a math problem arise such as a negative sqrt term, we also return None instead for damping ratio
|
||||||
|
return fr, None, max_power_index, max_under_min_freq
|
||||||
|
|
||||||
|
return fr, zeta, max_power_index, max_under_min_freq
|
||||||
|
|
||||||
|
|
||||||
|
# This find all the peaks in a curve by looking at when the derivative term goes from positive to negative
|
||||||
|
# Then only the peaks found above a threshold are kept to avoid capturing peaks in the low amplitude noise of a signal
|
||||||
|
def detect_peaks(data, indices, detection_threshold, relative_height_threshold=None, window_size=5, vicinity=3):
|
||||||
|
# Smooth the curve using a moving average to avoid catching peaks everywhere in noisy signals
|
||||||
|
kernel = np.ones(window_size) / window_size
|
||||||
|
smoothed_data = np.convolve(data, kernel, mode='valid')
|
||||||
|
mean_pad = [np.mean(data[:window_size])] * (window_size // 2)
|
||||||
|
smoothed_data = np.concatenate((mean_pad, smoothed_data))
|
||||||
|
|
||||||
|
# Find peaks on the smoothed curve
|
||||||
|
smoothed_peaks = (
|
||||||
|
np.where((smoothed_data[:-2] < smoothed_data[1:-1]) & (smoothed_data[1:-1] > smoothed_data[2:]))[0] + 1
|
||||||
|
)
|
||||||
|
smoothed_peaks = smoothed_peaks[smoothed_data[smoothed_peaks] > detection_threshold]
|
||||||
|
|
||||||
|
# Additional validation for peaks based on relative height
|
||||||
|
valid_peaks = smoothed_peaks
|
||||||
|
if relative_height_threshold is not None:
|
||||||
|
valid_peaks = []
|
||||||
|
for peak in smoothed_peaks:
|
||||||
|
peak_height = smoothed_data[peak] - np.min(
|
||||||
|
smoothed_data[max(0, peak - vicinity) : min(len(smoothed_data), peak + vicinity + 1)]
|
||||||
|
)
|
||||||
|
if peak_height > relative_height_threshold * smoothed_data[peak]:
|
||||||
|
valid_peaks.append(peak)
|
||||||
|
|
||||||
|
# Refine peak positions on the original curve
|
||||||
|
refined_peaks = []
|
||||||
|
for peak in valid_peaks:
|
||||||
|
local_max = peak + np.argmax(data[max(0, peak - vicinity) : min(len(data), peak + vicinity + 1)]) - vicinity
|
||||||
|
refined_peaks.append(local_max)
|
||||||
|
|
||||||
|
num_peaks = len(refined_peaks)
|
||||||
|
|
||||||
|
return num_peaks, np.array(refined_peaks), indices[refined_peaks]
|
||||||
|
|
||||||
|
|
||||||
|
# The goal is to find zone outside of peaks (flat low energy zones) in a signal
|
||||||
|
def identify_low_energy_zones(power_total, detection_threshold=0.1):
|
||||||
|
valleys = []
|
||||||
|
|
||||||
|
# Calculate the a "mean + 1/4" and standard deviation of the entire power_total
|
||||||
|
mean_energy = np.mean(power_total) + (np.max(power_total) - np.min(power_total)) / 4
|
||||||
|
std_energy = np.std(power_total)
|
||||||
|
|
||||||
|
# Define a threshold value as "mean + 1/4" minus a certain number of standard deviations
|
||||||
|
threshold_value = mean_energy - detection_threshold * std_energy
|
||||||
|
|
||||||
|
# Find valleys in power_total based on the threshold
|
||||||
|
in_valley = False
|
||||||
|
start_idx = 0
|
||||||
|
for i, value in enumerate(power_total):
|
||||||
|
if not in_valley and value < threshold_value:
|
||||||
|
in_valley = True
|
||||||
|
start_idx = i
|
||||||
|
elif in_valley and value >= threshold_value:
|
||||||
|
in_valley = False
|
||||||
|
valleys.append((start_idx, i))
|
||||||
|
|
||||||
|
# If the last point is still in a valley, close the valley
|
||||||
|
if in_valley:
|
||||||
|
valleys.append((start_idx, len(power_total) - 1))
|
||||||
|
|
||||||
|
max_signal = np.max(power_total)
|
||||||
|
|
||||||
|
# Calculate mean energy for each valley as a percentage of the maximum of the signal
|
||||||
|
valley_means_percentage = []
|
||||||
|
for start, end in valleys:
|
||||||
|
if not np.isnan(np.mean(power_total[start:end])):
|
||||||
|
valley_means_percentage.append((start, end, (np.mean(power_total[start:end]) / max_signal) * 100))
|
||||||
|
|
||||||
|
# Sort valleys based on mean percentage values
|
||||||
|
sorted_valleys = sorted(valley_means_percentage, key=lambda x: x[2])
|
||||||
|
|
||||||
|
return sorted_valleys
|
||||||
|
|
||||||
|
|
||||||
|
# Calculate or estimate a "similarity" factor between two PSD curves and scale it to a percentage. This is
|
||||||
|
# used here to quantify how close the two belts path behavior and responses are close together.
|
||||||
|
def compute_curve_similarity_factor(x1, y1, x2, y2, sim_sigmoid_k=0.6):
|
||||||
|
# Interpolate PSDs to match the same frequency bins and do a cross-correlation
|
||||||
|
y2_interp = np.interp(x1, x2, y2)
|
||||||
|
cross_corr = np.correlate(y1, y2_interp, mode='full')
|
||||||
|
|
||||||
|
# Find the peak of the cross-correlation and compute a similarity normalized by the energy of the signals
|
||||||
|
peak_value = np.max(cross_corr)
|
||||||
|
similarity = peak_value / (np.sqrt(np.sum(y1**2) * np.sum(y2_interp**2)))
|
||||||
|
|
||||||
|
# Apply sigmoid scaling to get better numbers and get a final percentage value
|
||||||
|
scaled_similarity = sigmoid_scale(-np.log(1 - similarity), sim_sigmoid_k)
|
||||||
|
|
||||||
|
return scaled_similarity
|
||||||
|
|
||||||
|
|
||||||
|
# Simple helper to compute a sigmoid scalling (from 0 to 100%)
|
||||||
|
def sigmoid_scale(x, k=1):
|
||||||
|
return 1 / (1 + np.exp(-k * x)) * 100
|
||||||
38
src/helpers/filemanager.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Common file management functions for the Shake&Tune package
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def wait_file_ready(filepath: Path, timeout: int = 60) -> None:
|
||||||
|
file_busy = True
|
||||||
|
loop_count = 0
|
||||||
|
|
||||||
|
while file_busy:
|
||||||
|
if loop_count >= timeout:
|
||||||
|
raise TimeoutError(f'Klipper is taking too long to release the CSV file ({filepath})!')
|
||||||
|
|
||||||
|
# Try to open the file in write-only mode to check if it is in use
|
||||||
|
# If we successfully open and close the file, it is not in use
|
||||||
|
try:
|
||||||
|
fd = os.open(filepath, os.O_WRONLY)
|
||||||
|
os.close(fd)
|
||||||
|
file_busy = False
|
||||||
|
except OSError:
|
||||||
|
# If OSError is caught, it indicates the file is still being used
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
# If another exception is raised, it's not a problem, we just loop again
|
||||||
|
pass
|
||||||
|
|
||||||
|
loop_count += 1
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_folders_exist(folders: list[Path]) -> None:
|
||||||
|
for folder in folders:
|
||||||
|
folder.mkdir(parents=True, exist_ok=True)
|
||||||
34
src/helpers/locale_utils.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Special utility functions to manage locale settings and printing
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
|
||||||
|
import locale
|
||||||
|
|
||||||
|
|
||||||
|
# Set the best locale for time and date formating (generation of the titles)
|
||||||
|
def set_locale():
|
||||||
|
try:
|
||||||
|
current_locale = locale.getlocale(locale.LC_TIME)
|
||||||
|
if current_locale is None or current_locale[0] is None:
|
||||||
|
locale.setlocale(locale.LC_TIME, 'C')
|
||||||
|
except locale.Error:
|
||||||
|
locale.setlocale(locale.LC_TIME, 'C')
|
||||||
|
|
||||||
|
|
||||||
|
# Print function to avoid problem in Klipper console (that doesn't support special characters) due to locale settings
|
||||||
|
def print_with_c_locale(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
original_locale = locale.getlocale()
|
||||||
|
locale.setlocale(locale.LC_ALL, 'C')
|
||||||
|
except locale.Error as e:
|
||||||
|
print(
|
||||||
|
'Warning: Failed to set a basic locale. Special characters may not display correctly in Klipper console:', e
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
print(*args, **kwargs) # Proceed with printing regardless of locale setting success
|
||||||
|
try:
|
||||||
|
locale.setlocale(locale.LC_ALL, original_locale)
|
||||||
|
except locale.Error as e:
|
||||||
|
print('Warning: Failed to restore the original locale setting:', e)
|
||||||
205
src/helpers/motorlogparser.py
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Classes to parse the Klipper log and parse the TMC dump to extract the relevant information
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
|
|
||||||
|
class Motor:
|
||||||
|
def __init__(self, name: str):
|
||||||
|
self._name: str = name
|
||||||
|
self._registers: Dict[str, Dict[str, Any]] = {}
|
||||||
|
self._properties: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
def set_register(self, register: str, value: Any) -> None:
|
||||||
|
# Special parsing for CHOPCONF to extract meaningful values
|
||||||
|
if register == 'CHOPCONF':
|
||||||
|
# Add intpol=0 if missing from the register dump
|
||||||
|
if 'intpol=' not in value:
|
||||||
|
value += ' intpol=0'
|
||||||
|
# Simplify the microstep resolution format
|
||||||
|
mres_match = re.search(r'mres=\d+\((\d+)usteps\)', value)
|
||||||
|
if mres_match:
|
||||||
|
value = re.sub(r'mres=\d+\(\d+usteps\)', f'mres={mres_match.group(1)}', value)
|
||||||
|
|
||||||
|
# Special parsing for CHOPCONF to avoid pwm_ before each values
|
||||||
|
if register == 'PWMCONF':
|
||||||
|
parts = value.split()
|
||||||
|
new_parts = []
|
||||||
|
for part in parts:
|
||||||
|
key, val = part.split('=', 1)
|
||||||
|
if key.startswith('pwm_'):
|
||||||
|
key = key[4:]
|
||||||
|
new_parts.append(f'{key}={val}')
|
||||||
|
value = ' '.join(new_parts)
|
||||||
|
|
||||||
|
# General cleaning to remove extraneous labels and colons and parse the whole into Motor _registers
|
||||||
|
cleaned_values = re.sub(r'\b\w+:\s+\S+\s+', '', value)
|
||||||
|
|
||||||
|
# Then fill the registers while merging all the thresholds into the same THRS virtual register
|
||||||
|
if register in ['TPWMTHRS', 'TCOOLTHRS']:
|
||||||
|
existing_thrs = self._registers.get('THRS', {})
|
||||||
|
new_values = self._parse_register_values(cleaned_values)
|
||||||
|
merged_values = {**existing_thrs, **new_values}
|
||||||
|
self._registers['THRS'] = merged_values
|
||||||
|
else:
|
||||||
|
self._registers[register] = self._parse_register_values(cleaned_values)
|
||||||
|
|
||||||
|
def _parse_register_values(self, register_string: str) -> Dict[str, Any]:
|
||||||
|
parsed = {}
|
||||||
|
parts = register_string.split()
|
||||||
|
for part in parts:
|
||||||
|
if '=' in part:
|
||||||
|
k, v = part.split('=', 1)
|
||||||
|
parsed[k] = v
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
def get_register(self, register: str) -> Optional[Dict[str, Any]]:
|
||||||
|
return self._registers.get(register)
|
||||||
|
|
||||||
|
def get_registers(self) -> Dict[str, Dict[str, Any]]:
|
||||||
|
return self._registers
|
||||||
|
|
||||||
|
def set_property(self, property: str, value: Any) -> None:
|
||||||
|
self._properties[property] = value
|
||||||
|
|
||||||
|
def get_property(self, property: str) -> Optional[Any]:
|
||||||
|
return self._properties.get(property)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'Stepper: {self._name}\nKlipper config: {self._properties}\nTMC Registers: {self._registers}'
|
||||||
|
|
||||||
|
# Return the other motor properties and registers that are different from the current motor
|
||||||
|
def compare_to(self, other: 'Motor') -> Optional[Dict[str, Dict[str, Any]]]:
|
||||||
|
differences = {'properties': {}, 'registers': {}}
|
||||||
|
|
||||||
|
# Compare properties
|
||||||
|
all_keys = self._properties.keys() | other._properties.keys()
|
||||||
|
for key in all_keys:
|
||||||
|
val1 = self._properties.get(key)
|
||||||
|
val2 = other._properties.get(key)
|
||||||
|
if val1 != val2:
|
||||||
|
differences['properties'][key] = val2
|
||||||
|
|
||||||
|
# Compare registers
|
||||||
|
all_keys = self._registers.keys() | other._registers.keys()
|
||||||
|
for key in all_keys:
|
||||||
|
reg1 = self._registers.get(key, {})
|
||||||
|
reg2 = other._registers.get(key, {})
|
||||||
|
if reg1 != reg2:
|
||||||
|
reg_diffs = {}
|
||||||
|
sub_keys = reg1.keys() | reg2.keys()
|
||||||
|
for sub_key in sub_keys:
|
||||||
|
reg_val1 = reg1.get(sub_key)
|
||||||
|
reg_val2 = reg2.get(sub_key)
|
||||||
|
if reg_val1 != reg_val2:
|
||||||
|
reg_diffs[sub_key] = reg_val2
|
||||||
|
if reg_diffs:
|
||||||
|
differences['registers'][key] = reg_diffs
|
||||||
|
|
||||||
|
# Clean up: remove empty sections if there are no differences
|
||||||
|
if not differences['properties']:
|
||||||
|
del differences['properties']
|
||||||
|
if not differences['registers']:
|
||||||
|
del differences['registers']
|
||||||
|
|
||||||
|
if not differences:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return differences
|
||||||
|
|
||||||
|
|
||||||
|
class MotorLogParser:
|
||||||
|
_section_pattern: str = r'DUMP_TMC stepper_(x|y)'
|
||||||
|
_register_patterns: Dict[str, str] = {
|
||||||
|
'CHOPCONF': r'CHOPCONF:\s+\S+\s+(.*)',
|
||||||
|
'PWMCONF': r'PWMCONF:\s+\S+\s+(.*)',
|
||||||
|
'COOLCONF': r'COOLCONF:\s+(.*)',
|
||||||
|
'TPWMTHRS': r'TPWMTHRS:\s+\S+\s+(.*)',
|
||||||
|
'TCOOLTHRS': r'TCOOLTHRS:\s+\S+\s+(.*)',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, filepath: Path, config_string: Optional[str] = None):
|
||||||
|
self._filepath = filepath
|
||||||
|
|
||||||
|
self._motors: List[Motor] = []
|
||||||
|
self._config = self._parse_config(config_string) if config_string else {}
|
||||||
|
|
||||||
|
self._parse_registers()
|
||||||
|
|
||||||
|
def _parse_config(self, config_string: str) -> Dict[str, Any]:
|
||||||
|
config = {}
|
||||||
|
entries = config_string.split('|')
|
||||||
|
for entry in entries:
|
||||||
|
if entry:
|
||||||
|
key, value = entry.split(':')
|
||||||
|
config[key.strip()] = self._convert_value(value.strip())
|
||||||
|
return config
|
||||||
|
|
||||||
|
def _convert_value(self, value: str) -> Union[int, float, bool, str]:
|
||||||
|
if value.isdigit():
|
||||||
|
return int(value)
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except ValueError:
|
||||||
|
if value.lower() in ['true', 'false']:
|
||||||
|
return value.lower() == 'true'
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _parse_registers(self) -> None:
|
||||||
|
with open(self._filepath, 'r') as file:
|
||||||
|
log_content = file.read()
|
||||||
|
|
||||||
|
sections = re.split(self._section_pattern, log_content)
|
||||||
|
|
||||||
|
# Detect only the latest dumps from the log (to ignore potential previous and outdated dumps)
|
||||||
|
last_sections: Dict[str, int] = {}
|
||||||
|
for i in range(1, len(sections), 2):
|
||||||
|
stepper_name = 'stepper_' + sections[i].strip()
|
||||||
|
last_sections[stepper_name] = i
|
||||||
|
|
||||||
|
for stepper_name, index in last_sections.items():
|
||||||
|
content = sections[index + 1]
|
||||||
|
motor = Motor(stepper_name)
|
||||||
|
|
||||||
|
# Apply general properties from config string
|
||||||
|
for key, value in self._config.items():
|
||||||
|
if stepper_name in key:
|
||||||
|
prop_key = key.replace(stepper_name + '_', '')
|
||||||
|
motor.set_property(prop_key, value)
|
||||||
|
elif 'autotune' in key:
|
||||||
|
motor.set_property(key, value)
|
||||||
|
|
||||||
|
# Parse TMC registers
|
||||||
|
for key, pattern in self._register_patterns.items():
|
||||||
|
match = re.search(pattern, content)
|
||||||
|
if match:
|
||||||
|
values = match.group(1).strip()
|
||||||
|
motor.set_register(key, values)
|
||||||
|
|
||||||
|
self._motors.append(motor)
|
||||||
|
|
||||||
|
# Find and return the motor by its name
|
||||||
|
def get_motor(self, motor_name: str) -> Optional[Motor]:
|
||||||
|
for motor in self._motors:
|
||||||
|
if motor._name == motor_name:
|
||||||
|
return motor
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get all the motor list at once
|
||||||
|
def get_motors(self) -> List[Motor]:
|
||||||
|
return self._motors
|
||||||
|
|
||||||
|
|
||||||
|
# # Usage example:
|
||||||
|
# config_string = "stepper_x_tmc:tmc2240|stepper_x_run_current:0.9|stepper_x_hold_current:0.9|stepper_y_tmc:tmc2240|stepper_y_run_current:0.9|stepper_y_hold_current:0.9|autotune_enabled:True|stepper_x_motor:ldo-35sth48-1684ah|stepper_x_voltage:|stepper_y_motor:ldo-35sth48-1684ah|stepper_y_voltage:|"
|
||||||
|
# parser = MotorLogParser('/path/to/your/logfile.log', config_string)
|
||||||
|
|
||||||
|
# stepper_x = parser.get_motor('stepper_x')
|
||||||
|
# stepper_y = parser.get_motor('stepper_y')
|
||||||
|
|
||||||
|
# print(stepper_x)
|
||||||
|
# print(stepper_y)
|
||||||
424
src/is_workflow.py
Executable file
@@ -0,0 +1,424 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
############################################
|
||||||
|
###### INPUT SHAPER KLIPPAIN WORKFLOW ######
|
||||||
|
############################################
|
||||||
|
# Written by Frix_x#0161 #
|
||||||
|
|
||||||
|
# This script is designed to be used with gcode_shell_commands directly from Klipper
|
||||||
|
# Use the provided Shake&Tune macros instead!
|
||||||
|
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import argparse
|
||||||
|
import tarfile
|
||||||
|
import traceback
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Callable, Optional
|
||||||
|
|
||||||
|
from git import GitCommandError, Repo
|
||||||
|
from matplotlib.figure import Figure
|
||||||
|
|
||||||
|
import src.helpers.filemanager as fm
|
||||||
|
from src.graph_creators.analyze_axesmap import axesmap_calibration
|
||||||
|
from src.graph_creators.graph_belts import belts_calibration
|
||||||
|
from src.graph_creators.graph_shaper import shaper_calibration
|
||||||
|
from src.graph_creators.graph_vibrations import vibrations_profile
|
||||||
|
from src.helpers.locale_utils import print_with_c_locale
|
||||||
|
from src.helpers.motorlogparser import MotorLogParser
|
||||||
|
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
KLIPPER_FOLDER = Path.home() / 'klipper'
|
||||||
|
KLIPPER_LOG_FOLDER = Path.home() / 'printer_data/logs'
|
||||||
|
RESULTS_BASE_FOLDER = Path.home() / 'printer_data/config/K-ShakeTune_results'
|
||||||
|
RESULTS_SUBFOLDERS = {'belts': 'belts', 'shaper': 'inputshaper', 'vibrations': 'vibrations'}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_results_folder(type: str) -> Path:
|
||||||
|
return Config.RESULTS_BASE_FOLDER / Config.RESULTS_SUBFOLDERS[type]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_git_version() -> str:
|
||||||
|
try:
|
||||||
|
# Get the absolute path of the script, resolving any symlinks
|
||||||
|
# Then get 1 times to parent dir to be at the git root folder
|
||||||
|
script_path = Path(__file__).resolve()
|
||||||
|
repo_path = script_path.parents[1]
|
||||||
|
repo = Repo(repo_path)
|
||||||
|
try:
|
||||||
|
version = repo.git.describe('--tags')
|
||||||
|
except GitCommandError:
|
||||||
|
version = repo.head.commit.hexsha[:7] # If no tag is found, use the simplified commit SHA instead
|
||||||
|
return version
|
||||||
|
except Exception as e:
|
||||||
|
print_with_c_locale(f'Warning: unable to retrieve Shake&Tune version number: {e}')
|
||||||
|
return 'unknown'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_arguments() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser(description='Shake&Tune graphs generation script')
|
||||||
|
parser.add_argument(
|
||||||
|
'-t',
|
||||||
|
'--type',
|
||||||
|
dest='type',
|
||||||
|
choices=['belts', 'shaper', 'vibrations', 'axesmap'],
|
||||||
|
required=True,
|
||||||
|
help='Type of output graph to produce',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--accel',
|
||||||
|
type=int,
|
||||||
|
default=None,
|
||||||
|
dest='accel_used',
|
||||||
|
help='Accelerometion used for vibrations profile creation or axes map calibration',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--chip_name',
|
||||||
|
type=str,
|
||||||
|
default='adxl345',
|
||||||
|
dest='chip_name',
|
||||||
|
help='Accelerometer chip name used for vibrations profile creation or axes map calibration',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--max_smoothing',
|
||||||
|
type=float,
|
||||||
|
default=None,
|
||||||
|
dest='max_smoothing',
|
||||||
|
help='Maximum smoothing to allow for input shaper filter recommendations',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--scv',
|
||||||
|
'--square_corner_velocity',
|
||||||
|
type=float,
|
||||||
|
default=5.0,
|
||||||
|
dest='scv',
|
||||||
|
help='Square corner velocity used to compute max accel for input shapers filter recommendations',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-m',
|
||||||
|
'--kinematics',
|
||||||
|
dest='kinematics',
|
||||||
|
default='cartesian',
|
||||||
|
choices=['cartesian', 'corexy'],
|
||||||
|
help='Machine kinematics configuration used for the vibrations profile creation',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--metadata',
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
dest='metadata',
|
||||||
|
help='Motor configuration metadata printed on the vibrations profiles',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--keep_csv',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
dest='keep_csv',
|
||||||
|
help='Whether to keep the raw CSV files after processing in addition to the PNG graphs',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-n',
|
||||||
|
'--keep_results',
|
||||||
|
type=int,
|
||||||
|
default=3,
|
||||||
|
dest='keep_results',
|
||||||
|
help='Number of results to keep in the result folder after each run of the script',
|
||||||
|
)
|
||||||
|
parser.add_argument('--dpi', type=int, default=150, dest='dpi', help='DPI of the output PNG files')
|
||||||
|
parser.add_argument('-v', '--version', action='version', version=f'Shake&Tune {Config.get_git_version()}')
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
class GraphCreator(abc.ABC):
|
||||||
|
def __init__(self, keep_csv: bool, dpi: int):
|
||||||
|
self._keep_csv = keep_csv
|
||||||
|
self._dpi = dpi
|
||||||
|
|
||||||
|
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||||
|
self._version = Config.get_git_version()
|
||||||
|
|
||||||
|
self._type = None
|
||||||
|
self._folder = None
|
||||||
|
|
||||||
|
def _setup_folder(self, graph_type: str) -> None:
|
||||||
|
self._type = graph_type
|
||||||
|
self._folder = Config.get_results_folder(graph_type)
|
||||||
|
|
||||||
|
def _move_and_prepare_files(
|
||||||
|
self,
|
||||||
|
glob_pattern: str,
|
||||||
|
min_files_required: Optional[int] = None,
|
||||||
|
custom_name_func: Optional[Callable[[Path], str]] = None,
|
||||||
|
) -> list[Path]:
|
||||||
|
tmp_path = Path('/tmp')
|
||||||
|
globbed_files = list(tmp_path.glob(glob_pattern))
|
||||||
|
|
||||||
|
# If min_files_required is not set, use the number of globbed files as the minimum
|
||||||
|
min_files_required = min_files_required or len(globbed_files)
|
||||||
|
|
||||||
|
if not globbed_files:
|
||||||
|
raise FileNotFoundError(f'no CSV files found in the /tmp folder to create the {self._type} graphs!')
|
||||||
|
if len(globbed_files) < min_files_required:
|
||||||
|
raise FileNotFoundError(f'{min_files_required} CSV files are needed to create the {self._type} graphs!')
|
||||||
|
|
||||||
|
lognames = []
|
||||||
|
for filename in sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[:min_files_required]:
|
||||||
|
fm.wait_file_ready(filename)
|
||||||
|
custom_name = custom_name_func(filename) if custom_name_func else filename.name
|
||||||
|
new_file = self._folder / f'{self._type}_{self._graph_date}_{custom_name}.csv'
|
||||||
|
filename.rename(new_file)
|
||||||
|
fm.wait_file_ready(new_file)
|
||||||
|
lognames.append(new_file)
|
||||||
|
return lognames
|
||||||
|
|
||||||
|
def _save_figure_and_cleanup(self, fig: Figure, lognames: list[Path], axis_label: Optional[str] = None) -> None:
|
||||||
|
axis_suffix = f'_{axis_label}' if axis_label else ''
|
||||||
|
png_filename = self._folder / f'{self._type}_{self._graph_date}{axis_suffix}.png'
|
||||||
|
fig.savefig(png_filename, dpi=self._dpi)
|
||||||
|
|
||||||
|
if self._keep_csv:
|
||||||
|
self._archive_files(lognames)
|
||||||
|
else:
|
||||||
|
self._remove_files(lognames)
|
||||||
|
|
||||||
|
def _archive_files(self, _: list[Path]) -> None:
|
||||||
|
return
|
||||||
|
|
||||||
|
def _remove_files(self, lognames: list[Path]) -> None:
|
||||||
|
for csv in lognames:
|
||||||
|
csv.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_graph(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def clean_old_files(self, keep_results: int) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BeltsGraphCreator(GraphCreator):
|
||||||
|
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||||
|
super().__init__(keep_csv, dpi)
|
||||||
|
|
||||||
|
self._setup_folder('belts')
|
||||||
|
|
||||||
|
def create_graph(self) -> None:
|
||||||
|
lognames = self._move_and_prepare_files(
|
||||||
|
glob_pattern='raw_data_axis*.csv',
|
||||||
|
min_files_required=2,
|
||||||
|
custom_name_func=lambda f: f.stem.split('_')[3].upper(),
|
||||||
|
)
|
||||||
|
fig = belts_calibration(
|
||||||
|
lognames=[str(path) for path in lognames],
|
||||||
|
klipperdir=str(Config.KLIPPER_FOLDER),
|
||||||
|
st_version=self._version,
|
||||||
|
)
|
||||||
|
self._save_figure_and_cleanup(fig, lognames)
|
||||||
|
|
||||||
|
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||||
|
# Get all PNG files in the directory as a list of Path objects
|
||||||
|
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||||
|
|
||||||
|
if len(files) <= keep_results:
|
||||||
|
return # No need to delete any files
|
||||||
|
|
||||||
|
# Delete the older files
|
||||||
|
for old_file in files[keep_results:]:
|
||||||
|
file_date = '_'.join(old_file.stem.split('_')[1:3])
|
||||||
|
for suffix in ['A', 'B']:
|
||||||
|
csv_file = self._folder / f'belts_{file_date}_{suffix}.csv'
|
||||||
|
csv_file.unlink(missing_ok=True)
|
||||||
|
old_file.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
class ShaperGraphCreator(GraphCreator):
|
||||||
|
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||||
|
super().__init__(keep_csv, dpi)
|
||||||
|
|
||||||
|
self._max_smoothing = None
|
||||||
|
self._scv = None
|
||||||
|
|
||||||
|
self._setup_folder('shaper')
|
||||||
|
|
||||||
|
def configure(self, scv: float, max_smoothing: float = None) -> None:
|
||||||
|
self._scv = scv
|
||||||
|
self._max_smoothing = max_smoothing
|
||||||
|
|
||||||
|
def create_graph(self) -> None:
|
||||||
|
if not self._scv:
|
||||||
|
raise ValueError('scv must be set to create the input shaper graph!')
|
||||||
|
|
||||||
|
lognames = self._move_and_prepare_files(
|
||||||
|
glob_pattern='raw_data*.csv',
|
||||||
|
min_files_required=1,
|
||||||
|
custom_name_func=lambda f: f.stem.split('_')[3].upper(),
|
||||||
|
)
|
||||||
|
fig = shaper_calibration(
|
||||||
|
lognames=[str(path) for path in lognames],
|
||||||
|
klipperdir=str(Config.KLIPPER_FOLDER),
|
||||||
|
max_smoothing=self._max_smoothing,
|
||||||
|
scv=self._scv,
|
||||||
|
st_version=self._version,
|
||||||
|
)
|
||||||
|
self._save_figure_and_cleanup(fig, lognames, lognames[0].stem.split('_')[-1])
|
||||||
|
|
||||||
|
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||||
|
# Get all PNG files in the directory as a list of Path objects
|
||||||
|
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||||
|
|
||||||
|
if len(files) <= 2 * keep_results:
|
||||||
|
return # No need to delete any files
|
||||||
|
|
||||||
|
# Delete the older files
|
||||||
|
for old_file in files[2 * keep_results :]:
|
||||||
|
csv_file = old_file.with_suffix('.csv')
|
||||||
|
csv_file.unlink(missing_ok=True)
|
||||||
|
old_file.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
class VibrationsGraphCreator(GraphCreator):
|
||||||
|
def __init__(self, keep_csv: bool = False, dpi: int = 150):
|
||||||
|
super().__init__(keep_csv, dpi)
|
||||||
|
|
||||||
|
self._kinematics = None
|
||||||
|
self._accel = None
|
||||||
|
self._chip_name = None
|
||||||
|
self._motors = None
|
||||||
|
|
||||||
|
self._setup_folder('vibrations')
|
||||||
|
|
||||||
|
def configure(self, kinematics: str, accel: float, chip_name: str, metadata: str) -> None:
|
||||||
|
self._kinematics = kinematics
|
||||||
|
self._accel = accel
|
||||||
|
self._chip_name = chip_name
|
||||||
|
|
||||||
|
parser = MotorLogParser(Config.KLIPPER_LOG_FOLDER / 'klippy.log', metadata)
|
||||||
|
self._motors = parser.get_motors()
|
||||||
|
|
||||||
|
def _archive_files(self, lognames: list[Path]) -> None:
|
||||||
|
tar_path = self._folder / f'{self._type}_{self._graph_date}.tar.gz'
|
||||||
|
with tarfile.open(tar_path, 'w:gz') as tar:
|
||||||
|
for csv_file in lognames:
|
||||||
|
tar.add(csv_file, arcname=csv_file.name, recursive=False)
|
||||||
|
|
||||||
|
def create_graph(self) -> None:
|
||||||
|
if not self._accel or not self._chip_name or not self._kinematics:
|
||||||
|
raise ValueError('accel, chip_name and kinematics must be set to create the vibrations profile graph!')
|
||||||
|
|
||||||
|
lognames = self._move_and_prepare_files(
|
||||||
|
glob_pattern=f'{self._chip_name}-*.csv',
|
||||||
|
min_files_required=None,
|
||||||
|
custom_name_func=lambda f: f.name.replace(self._chip_name, self._type),
|
||||||
|
)
|
||||||
|
fig = vibrations_profile(
|
||||||
|
lognames=[str(path) for path in lognames],
|
||||||
|
klipperdir=str(Config.KLIPPER_FOLDER),
|
||||||
|
kinematics=self._kinematics,
|
||||||
|
accel=self._accel,
|
||||||
|
st_version=self._version,
|
||||||
|
motors=self._motors,
|
||||||
|
)
|
||||||
|
self._save_figure_and_cleanup(fig, lognames)
|
||||||
|
|
||||||
|
def clean_old_files(self, keep_results: int = 3) -> None:
|
||||||
|
# Get all PNG files in the directory as a list of Path objects
|
||||||
|
files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True)
|
||||||
|
|
||||||
|
if len(files) <= keep_results:
|
||||||
|
return # No need to delete any files
|
||||||
|
|
||||||
|
# Delete the older files
|
||||||
|
for old_file in files[keep_results:]:
|
||||||
|
old_file.unlink()
|
||||||
|
tar_file = old_file.with_suffix('.tar.gz')
|
||||||
|
tar_file.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
class AxesMapFinder:
|
||||||
|
def __init__(self, accel: float, chip_name: str):
|
||||||
|
self._accel = accel
|
||||||
|
self._chip_name = chip_name
|
||||||
|
|
||||||
|
self._graph_date = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||||
|
|
||||||
|
self._type = 'axesmap'
|
||||||
|
self._folder = Config.RESULTS_BASE_FOLDER
|
||||||
|
|
||||||
|
def find_axesmap(self) -> None:
|
||||||
|
tmp_folder = Path('/tmp')
|
||||||
|
globbed_files = list(tmp_folder.glob(f'{self._chip_name}-*.csv'))
|
||||||
|
|
||||||
|
if not globbed_files:
|
||||||
|
raise FileNotFoundError('no CSV files found in the /tmp folder to find the axes map!')
|
||||||
|
|
||||||
|
# Find the CSV files with the latest timestamp and wait for it to be released by Klipper
|
||||||
|
logname = sorted(globbed_files, key=lambda f: f.stat().st_mtime, reverse=True)[0]
|
||||||
|
fm.wait_file_ready(logname)
|
||||||
|
|
||||||
|
results = axesmap_calibration(
|
||||||
|
lognames=[str(logname)],
|
||||||
|
accel=self._accel,
|
||||||
|
)
|
||||||
|
|
||||||
|
result_filename = self._folder / f'{self._type}_{self._graph_date}.txt'
|
||||||
|
with result_filename.open('w') as f:
|
||||||
|
f.write(results)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
options = Config.parse_arguments()
|
||||||
|
fm.ensure_folders_exist(
|
||||||
|
folders=[Config.RESULTS_BASE_FOLDER / subfolder for subfolder in Config.RESULTS_SUBFOLDERS.values()]
|
||||||
|
)
|
||||||
|
|
||||||
|
print_with_c_locale(f'Shake&Tune version: {Config.get_git_version()}')
|
||||||
|
|
||||||
|
graph_creators = {
|
||||||
|
'belts': (BeltsGraphCreator, None),
|
||||||
|
'shaper': (ShaperGraphCreator, lambda gc: gc.configure(options.scv, options.max_smoothing)),
|
||||||
|
'vibrations': (
|
||||||
|
VibrationsGraphCreator,
|
||||||
|
lambda gc: gc.configure(options.kinematics, options.accel_used, options.chip_name, options.metadata),
|
||||||
|
),
|
||||||
|
'axesmap': (AxesMapFinder, None),
|
||||||
|
}
|
||||||
|
|
||||||
|
creator_info = graph_creators.get(options.type)
|
||||||
|
if not creator_info:
|
||||||
|
print_with_c_locale('Error: invalid graph type specified!')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Instantiate the graph creator
|
||||||
|
graph_creator_class, configure_func = creator_info
|
||||||
|
graph_creator = graph_creator_class(options.keep_csv, options.dpi)
|
||||||
|
|
||||||
|
# Configure it if needed
|
||||||
|
if configure_func:
|
||||||
|
configure_func(graph_creator)
|
||||||
|
|
||||||
|
# And then run it
|
||||||
|
try:
|
||||||
|
graph_creator.create_graph()
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
print_with_c_locale(f'FileNotFound error: {e}')
|
||||||
|
return
|
||||||
|
except TimeoutError as e:
|
||||||
|
print_with_c_locale(f'Timeout error: {e}')
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
print_with_c_locale(f'Error while generating the graphs: {e}')
|
||||||
|
traceback.print_exc()
|
||||||
|
return
|
||||||
|
|
||||||
|
print_with_c_locale(f'{options.type} graphs created successfully!')
|
||||||
|
graph_creator.clean_old_files(options.keep_results)
|
||||||
|
print_with_c_locale(f'Cleaned output folder to keep only the last {options.keep_results} results!')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
9
system-dependencies.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"debian": [
|
||||||
|
"python3-venv",
|
||||||
|
"python3-numpy",
|
||||||
|
"python3-matplotlib",
|
||||||
|
"libopenblas-dev",
|
||||||
|
"libatlas-base-dev"
|
||||||
|
]
|
||||||
|
}
|
||||||