Compare commits

...

357 Commits
v1.3.5 ... main

Author SHA1 Message Date
8a558c3121 refactor: remove unnecessary delay in MQTT setup and add delay before restart 2025-03-03 16:58:24 +01:00
5afb60df32 fix: correct typo in console log for total length 2025-03-02 20:21:27 +01:00
3394e6eb01 feat: add new 3D print file for Filaman scale 2025-03-02 08:06:59 +01:00
3818c2c059 refactor: remove redundant scale calibration checks and enhance task management 2025-03-01 18:50:20 +01:00
0afc543b5f refactor: enhance AMS data handling and streamline spool auto-setting logic 2025-03-01 18:44:35 +01:00
adee46e3fc refactor: adjust stack size and improve scale calibration logic 2025-03-01 18:44:29 +01:00
1db74867e6 refactor: update labels and input types for better clarity and functionality 2025-03-01 18:44:17 +01:00
0f24a63d32 added Discord Server 2025-03-01 15:33:39 +01:00
3640809502 update documentation for clarity and accuracy 2025-03-01 13:04:28 +01:00
289d5357be docs: update changelog and header for version v1.4.0
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-03-01 12:46:18 +01:00
315530d1ea update NFC tag references to include NTAG213 and clarify storage capacity 2025-03-01 12:45:55 +01:00
f36773a4c4 bump version to 1.4.0 2025-03-01 12:37:50 +01:00
b35163936f add support for Spoolman Octoprint Plugin in README files 2025-03-01 12:33:26 +01:00
7a2c9d6d17 add OctoPrint integration with configurable fields and update functionality 2025-03-01 12:18:33 +01:00
eb2a8dc128 add version comparison function and check for outdated versions before updates 2025-03-01 12:18:21 +01:00
bec2c91331 remove unused version and protocol fields from JSON output; add error message for insufficient memory 2025-03-01 10:42:06 +01:00
c6e727de06 remove unused version and protocol fields from NFC data packet 2025-03-01 10:41:51 +01:00
3253e7d407 sort vendors alphabetically in the dropdown list 2025-03-01 10:41:44 +01:00
bce2ad2ed8
Merge pull request #10 from janecker/nfc-improvements
Improves NFC Tag handling
2025-03-01 10:03:46 +01:00
Jan Philipp Ecker
0eff29ef4a Improves NFC Tag handling
Fixes memory underflow when reading tags. Reads tags with their actual data size and uses actual size instead of constnat value for tag size when writing a tag.
2025-02-28 22:35:34 +01:00
492bf6cdb8 docs: update changelog and header for version v1.3.99
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m39s
2025-02-28 18:35:16 +01:00
b0317f4001 docs: update platformio.ini for version v1.3.99 2025-02-28 18:35:16 +01:00
58ff6458b0 refactor: update workflows to build firmware with LittleFS instead of SPIFFS 2025-02-28 18:35:05 +01:00
d9c40f5124 docs: update changelog and header for version v1.3.98
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m51s
2025-02-28 18:12:56 +01:00
68bc31e29a docs: update platformio.ini for version v1.3.98 2025-02-28 18:12:56 +01:00
9b23ac5fd2 refactor: migrate from SPIFFS to LittleFS for file handling 2025-02-28 18:12:42 +01:00
d31bff14c3 chore: remove unused VSCode settings file 2025-02-28 09:29:34 +01:00
150f92484a refactor: remove commented-out spoolman and filaman data from api.cpp 2025-02-28 09:26:09 +01:00
fa74832fb9 docs: update changelog and header for version v1.3.97
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m47s
2025-02-28 08:53:08 +01:00
2eab3db77d docs: update platformio.ini for version v1.3.97 2025-02-28 08:53:07 +01:00
0a1bf22f7e feat: füge Bestätigungsmeldung für Spool-Einstellung hinzu 2025-02-27 22:07:47 +01:00
d58244c1f8 fix: Speichernutzung optimiert 2025-02-27 21:56:31 +01:00
db626ea516 fix: behebe doppelte http.end() Aufrufe in checkSpoolmanExtraFields 2025-02-27 21:54:47 +01:00
fd8f7685a1 fix: optimiere Verzögerungen und Stackgrößen in NFC-Task-Funktionen 2025-02-27 21:54:32 +01:00
944b156528 feat: verbessere WLAN-Konfiguration und füge mDNS-Unterstützung hinzu 2025-02-27 21:53:48 +01:00
76100593cc refactor: entferne ungenutzte Bibliotheken und Debug-Ausgaben aus main.cpp 2025-02-27 15:50:04 +01:00
732d590344 feat: aktualisiere OLED-Anzeige mit Versionsnummer und verbessere Textausrichtung 2025-02-27 14:35:53 +01:00
46cd953b80 feat: füge regelmäßige WLAN-Verbindungsüberprüfung hinzu 2025-02-27 09:38:54 +01:00
c645035bbe feat: aktualisiere Schaltplan-Bild 2025-02-26 18:29:37 +01:00
9e76620cd3 style: entferne text-shadow von deaktivierten Schaltflächen 2025-02-26 18:07:22 +01:00
faddda6201 feat: zeige Versionsnummer im OLED-Display an 2025-02-26 18:01:35 +01:00
de9c1706c0 docs: füge Link zum Wiki für detaillierte Informationen über die Nutzung hinzu 2025-02-25 20:19:04 +01:00
9f7ee13e78 docs: update changelog and header for version v1.3.96
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m1s
2025-02-25 16:29:37 +01:00
cf3f6f6741 docs: update platformio.ini for version v1.3.96 2025-02-25 16:29:37 +01:00
b87d43c64e feat: füge Unterstützung für Spoolman-Einstellungen hinzu und aktualisiere die Benutzeroberfläche 2025-02-25 16:24:22 +01:00
3d0411e3c1 feat: entferne die sendAmsData-Funktion aus der API-Schnittstelle 2025-02-25 14:52:47 +01:00
9c61b708aa fix: aktualisiere Bedingungen für die AMS-Datenaktualisierung und entferne unnötige Aufrufe 2025-02-25 14:52:27 +01:00
90f800d042 fix: aktualisiere Bedingung für den Fortschritt der OTA-Update-Nachricht 2025-02-25 12:19:24 +01:00
a7b1721e1d feat: erweitere Bambu-Credentials um AutoSend-Zeit und aktualisiere die Benutzeroberfläche 2025-02-25 12:17:20 +01:00
e4825d2905 feat: erweitere Bambu-Credentials mit AutoSend-Wartezeit und aktualisiere die Benutzeroberfläche 2025-02-25 11:32:57 +01:00
c1733848d3 feat: add espRestart function and replace delay with vTaskDelay for OTA update process 2025-02-25 11:02:54 +01:00
484c95523d feat: implement OTA update functionality with backup and restore for configurations 2025-02-25 10:57:49 +01:00
8499613215 fix: update auto set logic to check RFID tag before setting Bambu spool 2025-02-25 10:57:36 +01:00
08f37186b4 feat: add own_filaments.json and integrate custom filament loading in bambu.cpp 2025-02-25 09:02:11 +01:00
2948a35fa8 docs: update changelog and header for version v1.3.95
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-02-24 19:56:19 +01:00
730724fe58 docs: update webpages for version v1.3.95 2025-02-24 19:56:18 +01:00
714b7065e7 fix: bind autoSendToBambu variable to checkbox in spoolman.html 2025-02-24 19:56:01 +01:00
2d8aec515d docs: update changelog and header for version v1.3.94
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-24 19:47:24 +01:00
b245a206ce docs: update webpages for version v1.3.94 2025-02-24 19:47:24 +01:00
f1489e75cc fix: correct payload type check in NFC write event handling 2025-02-24 19:46:58 +01:00
d9ae829503 docs: update changelog and header for version v1.3.93
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m1s
2025-02-24 19:15:03 +01:00
2247b8ed6c docs: update webpages for version v1.3.93 2025-02-24 19:15:03 +01:00
d70b187bf9 feat: implement auto send feature for Bambu spool management and update related configurations 2025-02-24 19:14:51 +01:00
1ade007473 fix: remove debug output from splitTextIntoLines and update weight display logic in scanRfidTask 2025-02-24 19:14:45 +01:00
0af14e2f7d docs: add debug mode instructions for Spoolman in README 2025-02-24 19:14:28 +01:00
de67cdbff3 fix: enhance weight display logic for negative values 2025-02-24 12:28:18 +01:00
98fce15ccc refactor: simplify filament names in JSON configuration 2025-02-24 12:21:27 +01:00
ab417ba64b refactor: update findFilamentIdx to return structured result and improve type searching logic 2025-02-24 12:11:27 +01:00
320057bc49 docs: add wiring diagrams to README for PN532 I2C setup 2025-02-24 10:10:15 +01:00
9007a65fc2 docs: update README to reflect PN532 I2C configuration and remove SPI pin details 2025-02-24 09:36:28 +01:00
2214f5f5de fix: remove unnecessary CPU frequency configuration from setup function 2025-02-24 09:20:44 +01:00
5c5846c52c docs: update changelog and header for version v1.3.92
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m49s
2025-02-24 07:47:58 +01:00
517fa37a3d docs: update webpages for version v1.3.92 2025-02-24 07:47:58 +01:00
aaa7a6ee9c fix: configure CPU frequency settings in setup function only for testing 2025-02-24 07:47:50 +01:00
a0b8639488 fix: update comment to clarify NVS reading process 2025-02-23 21:29:38 +01:00
a16c05287e fix: adjust weight display logic to handle cases for weight less than 2 2025-02-23 21:23:46 +01:00
ecb35a97bd fix: update weight display logic to handle negative and specific weight cases 2025-02-23 21:22:50 +01:00
ba968611ec refactor: remove commented-out code in setBambuSpool function 2025-02-23 21:17:55 +01:00
6bd11ddce3 docs: update installation instructions and formatting in README files 2025-02-23 20:35:46 +01:00
3eb313e61a docs: update changelog and header for version v1.3.91
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m51s
2025-02-23 20:29:45 +01:00
aad35dc296 docs: update webpages for version v1.3.91 2025-02-23 20:29:45 +01:00
85ac636b1e feat: update GitHub Actions workflow for FTP firmware upload with improved credential checks 2025-02-23 20:29:40 +01:00
6f1804c3fe docs: update changelog and header for version v1.3.90
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m52s
2025-02-23 20:28:25 +01:00
89716920dc docs: update webpages for version v1.3.90 2025-02-23 20:28:25 +01:00
78b5078651 feat: update index.html for improved content structure and additional links 2025-02-23 20:27:38 +01:00
6098c3b052 feat: improve UI for Spoolman and Bambu Lab printer credentials, enhancing layout and styling 2025-02-23 20:23:09 +01:00
e7537f94d4 docs: update README files with HSPI default PINs and add ESP32 pin diagram 2025-02-23 20:12:35 +01:00
37717392d0 feat: implement scale calibration checks and update start_scale function to return calibration status 2025-02-23 16:44:43 +01:00
c6da28ad6f feat: add FTP upload functionality to GitHub release workflow and update installation instructions in README 2025-02-23 16:13:42 +01:00
d6e38a4e73 fix: remove debug secrets check from Gitea release workflow 2025-02-23 16:01:42 +01:00
4e0d9353c8 docs: update changelog and header for version v1.3.89
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m59s
2025-02-23 15:57:14 +01:00
7059826659 docs: update webpages for version v1.3.89 2025-02-23 15:57:13 +01:00
41faa8bb1c fix: update Gitea release workflow to use vars for FTP credentials 2025-02-23 15:57:09 +01:00
b38e3fa5ef docs: update changelog and header for version v1.3.88
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m48s
2025-02-23 15:49:37 +01:00
5280d7e341 docs: update webpages for version v1.3.88 2025-02-23 15:49:37 +01:00
2f95c66d39 fix: update Gitea release workflow to use secrets for FTP credentials 2025-02-23 15:49:33 +01:00
df1b87465c docs: update changelog and header for version v1.3.87
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m43s
2025-02-23 15:39:09 +01:00
84f1420999 docs: update webpages for version v1.3.87 2025-02-23 15:39:09 +01:00
b14dd5475d fix: enhance FTP upload workflow with credential checks and version output 2025-02-23 15:38:59 +01:00
975845421b docs: update changelog and header for version v1.3.86
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m55s
2025-02-23 15:29:14 +01:00
044ddbe0eb docs: update webpages for version v1.3.86 2025-02-23 15:29:14 +01:00
c385544d67 fix: streamline FTP credentials usage in Gitea release workflow 2025-02-23 15:29:10 +01:00
c6cfd85687 docs: update changelog and header for version v1.3.85
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m41s
2025-02-23 15:20:13 +01:00
84632322e2 docs: update webpages for version v1.3.85 2025-02-23 15:20:13 +01:00
86e55a8696 fix: add FTP_USER variable for Gitea release workflow 2025-02-23 15:20:09 +01:00
d2b40daaca docs: update changelog and header for version v1.3.84
Some checks failed
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m47s
2025-02-23 15:13:49 +01:00
9d58cbc31c docs: update webpages for version v1.3.84 2025-02-23 15:13:48 +01:00
d09aeaf47c fix: add FTP_HOST variable for firmware upload in Gitea release workflow 2025-02-23 15:13:45 +01:00
9fb82fe51e docs: update changelog and header for version v1.3.83
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m57s
2025-02-23 15:07:40 +01:00
5e0e2c5f6b docs: update webpages for version v1.3.83 2025-02-23 15:07:40 +01:00
a8460503ff fix: correct variable interpolation for FTP credentials in Gitea release workflow 2025-02-23 15:07:35 +01:00
6700a1761f docs: update changelog and header for version v1.3.82
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m51s
2025-02-23 14:59:03 +01:00
7207f36e06 docs: update webpages for version v1.3.82 2025-02-23 14:59:03 +01:00
e79bee3381 feat: update Gitea release workflow to use variable interpolation for FTP credentials 2025-02-23 14:58:57 +01:00
c3918f075b docs: update changelog and header for version v1.3.81
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m41s
2025-02-23 14:53:03 +01:00
0c384219c5 docs: update webpages for version v1.3.81 2025-02-23 14:53:03 +01:00
42b9daf4be feat: update Gitea release workflow to use environment variables for FTP credentials and version 2025-02-23 14:53:00 +01:00
13a771682f docs: update changelog and header for version v1.3.80
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m39s
2025-02-23 14:43:41 +01:00
f79f87bf09 docs: update webpages for version v1.3.80 2025-02-23 14:43:41 +01:00
9fe3f6c0ff feat: add FTP_USER and FTP_PASSWORD secrets for firmware upload in Gitea release workflow 2025-02-23 14:43:36 +01:00
55e89948bb docs: update changelog and header for version v1.3.79
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m47s
2025-02-23 14:36:19 +01:00
6c5e8c4d07 docs: update webpages for version v1.3.79 2025-02-23 14:36:19 +01:00
4f79700d74 feat: add FTP_USER input for firmware upload in Gitea release workflow 2025-02-23 14:36:14 +01:00
1b4fecf409 docs: update changelog and header for version v1.3.78
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m7s
2025-02-23 12:35:14 +01:00
89a6101d97 docs: update webpages for version v1.3.78 2025-02-23 12:35:14 +01:00
ee45a74fee fix: change FTP protocol from FTPS to FTP for file upload in workflow 2025-02-23 12:35:09 +01:00
db365aba3c docs: update changelog and header for version v1.3.77
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m49s
2025-02-23 12:05:24 +01:00
63cdfaee6c docs: update webpages for version v1.3.77 2025-02-23 12:05:24 +01:00
eb2e360c35 fix: replace ncftp with lftp for secure firmware upload 2025-02-23 12:05:19 +01:00
7d578640e2 docs: update changelog and header for version v1.3.76
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m44s
2025-02-23 12:00:38 +01:00
b006533a91 docs: update webpages for version v1.3.76 2025-02-23 12:00:37 +01:00
9fa7526623 fix: replace FTP action with curl for secure firmware upload and install ncftp 2025-02-23 12:00:33 +01:00
dfbb2fbd9b docs: update changelog and header for version v1.3.75
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m40s
2025-02-23 11:55:20 +01:00
0302158449 docs: update webpages for version v1.3.75 2025-02-23 11:55:20 +01:00
68c385f9d7 fix: update FTP user and enhance SSL options in gitea-release workflow 2025-02-23 11:55:11 +01:00
9a8bd58cb3 docs: update changelog and header for version v1.3.74
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-23 11:39:55 +01:00
0d8b8918c1 docs: update webpages for version v1.3.74 2025-02-23 11:39:54 +01:00
a892b854b5 fix: update password syntax in gitea-release workflow 2025-02-23 11:39:51 +01:00
0f02f6c848 docs: update changelog and header for version v1.3.73
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m18s
2025-02-23 11:34:36 +01:00
96c054827e docs: update webpages for version v1.3.73 2025-02-23 11:34:36 +01:00
f93eedf775 chore: update version to 1.3.72 in platformio.ini 2025-02-23 11:34:32 +01:00
68a10dfeb2 docs: update changelog and header for version v1.3.72
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-23 11:17:17 +01:00
632b7a089e docs: update webpages for version v1.3.72 2025-02-23 11:17:17 +01:00
c0e3650bf4 fix: update FTP options for Gitea release workflow 2025-02-23 11:17:13 +01:00
8e3dfc93f7 docs: update changelog and header for version v1.3.71
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m37s
2025-02-23 11:09:57 +01:00
5016285dce docs: update webpages for version v1.3.71 2025-02-23 11:09:57 +01:00
9b1a232fde feat: add FTP upload step for firmware in Gitea release workflow 2025-02-23 11:09:49 +01:00
37e79b7a49 docs: update changelog and header for version v1.3.70
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-23 09:58:18 +01:00
6bd23f31c1 docs: update webpages for version v1.3.70 2025-02-23 09:58:17 +01:00
3099e9ded9 docs: update changelog and header for version v1.3.69
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-23 09:54:00 +01:00
4952ad3150 docs: update webpages for version v1.3.69 2025-02-23 09:54:00 +01:00
2055da9962 fix: update release note generation to use the second latest tag 2025-02-23 09:53:55 +01:00
459a31cad3 docs: update changelog and header for version v1.3.68
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m3s
2025-02-23 09:48:22 +01:00
4b1930209b docs: update webpages for version v1.3.68 2025-02-23 09:48:21 +01:00
7dde07b5ab fix: update release note generation to include commit hash and author 2025-02-23 09:48:15 +01:00
33a5406248 fix: remove commented test line from platformio.ini 2025-02-23 09:47:18 +01:00
b016a31ff0 docs: update changelog and header for version v1.3.67
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-23 09:18:57 +01:00
19bc4927e4 docs: update webpages for version v1.3.67 2025-02-23 09:18:57 +01:00
cd55cb86ba ci: update release note generation to use the latest tag 2025-02-23 09:18:52 +01:00
8ab16b351b docs: update changelog and header for version v1.3.66
All checks were successful
Release Workflow / detect-provider (push) Successful in 6s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m52s
2025-02-23 09:02:44 +01:00
400a37d3ac docs: update webpages for version v1.3.66 2025-02-23 09:02:44 +01:00
eb4f809435 ci: remove redundant git fetch for tags in release note generation 2025-02-23 09:02:40 +01:00
1148947b8e docs: update changelog and header for version v1.3.65
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m18s
2025-02-22 20:49:15 +01:00
3b01336999 docs: update webpages for version v1.3.65 2025-02-22 20:49:15 +01:00
44614b58dc ci: improve release note generation by fetching tags and sorting unique commits 2025-02-22 20:49:10 +01:00
ed8d618272 docs: update changelog and header for version v1.3.64
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m6s
2025-02-22 20:37:49 +01:00
cd2ac54e98 docs: update webpages for version v1.3.64 2025-02-22 20:37:49 +01:00
92f675b24c style: remove unnecessary closing tags from header.html 2025-02-22 20:37:46 +01:00
c342877558 docs: update changelog and header for version v1.3.63
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-22 20:33:55 +01:00
f5743cbd7b docs: update webpages for version v1.3.63 2025-02-22 20:33:55 +01:00
8a62597705 style: update release note generation for initial release handling 2025-02-22 20:33:51 +01:00
374721d1e5 style: update update-form background and add glass border effect 2025-02-22 20:30:30 +01:00
ea6f708c6e docs: update changelog and header for version v1.3.62
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m37s
2025-02-22 20:25:08 +01:00
78169dfdb1 docs: update webpages for version v1.3.62 2025-02-22 20:25:08 +01:00
074bfb658d style: update background colors and improve layout for update sections 2025-02-22 20:25:00 +01:00
989076e794 docs: update changelog and header for version v1.3.61
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m47s
2025-02-22 20:14:35 +01:00
aa0d056d10 docs: update webpages for version v1.3.61 2025-02-22 20:14:35 +01:00
cd619b8f2a feat: update release notes generation to use previous tag for changes 2025-02-22 20:13:15 +01:00
6d8358cbb9 docs: update changelog and header for version v1.3.60
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m48s
2025-02-22 20:06:25 +01:00
1f3a67634f docs: update webpages for version v1.3.60 2025-02-22 20:06:25 +01:00
09969b644e feat: remove automatic git push from changelog update script 2025-02-22 20:06:20 +01:00
deb7abd102 feat: implement release notes generation with categorized changes since last tag 2025-02-22 20:00:42 +01:00
1b059c35f1 docs: update changelog for version 1.3.59
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-22 19:57:13 +01:00
e098d71f6f docs: update webpages for version v1.3.59 2025-02-22 19:57:13 +01:00
4b25b72b2e feat: implement enhanced update progress handling and WebSocket notifications 2025-02-22 19:50:12 +01:00
5c59016f94 feat: improve update progress reporting and enhance WebSocket notifications 2025-02-22 18:49:45 +01:00
d2da501b94 feat: enhance update progress handling and add WebSocket closure notification 2025-02-22 18:19:21 +01:00
4135073623 feat: implement WebSocket for update progress and enhance update response handling 2025-02-22 18:12:27 +01:00
fe7b57fe0e docs: update changelog for version 1.3.58
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m25s
2025-02-22 17:59:59 +01:00
c1ae6b7295 docs: update webpages for version v1.3.58 2025-02-22 17:59:59 +01:00
9eee89fac7 feat: implement backup and restore functionality for Bambu credentials and Spoolman URL 2025-02-22 17:58:20 +01:00
8c5e7e26ac docs: update upgrade page message and improve progress display logic 2025-02-22 17:53:51 +01:00
7b52066378 docs: update changelog for version 1.3.57
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m59s
2025-02-22 17:36:09 +01:00
d5afa38ded docs: update webpages for version v1.3.57 2025-02-22 17:36:09 +01:00
cf50baba2d docs: update header title to 'Filament Management Tool' in multiple HTML files 2025-02-22 17:36:02 +01:00
aa9e7da94b docs: update changelog for version 1.3.56
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-22 17:31:33 +01:00
71cd3ba4fc docs: update webpages for version v1.3.56 2025-02-22 17:31:33 +01:00
73e240e879 docs: update header title and improve SPIFFS update error handling 2025-02-22 17:31:28 +01:00
0d34e1d718 docs: clarify comments in Gitea and GitHub release workflows 2025-02-22 17:18:11 +01:00
84cc8beb9b docs: update changelog for version 1.3.55
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m38s
2025-02-22 17:00:00 +01:00
fd70e3179d docs: update webpages for version v1.3.55 2025-02-22 16:59:59 +01:00
c553640ad8 docs: update component descriptions in README files 2025-02-22 16:59:56 +01:00
807eca3c43 docs: update changelog for version 1.3.54
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m12s
2025-02-22 16:47:52 +01:00
b52730bf67 docs: update webpages for version v1.3.54 2025-02-22 16:47:52 +01:00
9a59b91e88 workflow: update SPIFFS binary creation to exclude header 2025-02-22 16:47:27 +01:00
a5af4013d8 docs: update changelog for version 1.3.53
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m34s
2025-02-22 16:22:30 +01:00
e54ce58ec4 version: update to version 1.3.53 2025-02-22 16:22:27 +01:00
142eafd232 docs: update changelog for version 1.3.51 2025-02-22 16:22:04 +01:00
63ab9e0993 docs: update changelog for version 1.3.51 2025-02-22 16:21:54 +01:00
aaa5506d40 workflow: update SPIFFS binary magic byte and revert version to 1.3.51 2025-02-22 16:21:51 +01:00
8037adc045 docs: update changelog for version 1.3.52
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m20s
2025-02-22 16:09:46 +01:00
6e7c728cd8 docs: update webpages for version v1.3.52 2025-02-22 16:09:46 +01:00
3fe8271344 workflow: update SPIFFS binary creation to use correct chip revision (0xEB for Rev 3) 2025-02-22 16:09:41 +01:00
f2bc6eab92 docs: update changelog for version 1.3.51
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m35s
2025-02-22 15:50:55 +01:00
37df492339 docs: update webpages for version v1.3.51 2025-02-22 15:50:54 +01:00
c4b425403f config: update platformio.ini to specify correct chip revision and remove unused dependencies 2025-02-22 15:50:49 +01:00
73244689dd docs: update changelog for version 1.3.50
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m31s
2025-02-22 15:16:53 +01:00
27296104d2 docs: update webpages for version v1.3.50 2025-02-22 15:16:53 +01:00
5f99773897 docs: update changelog for version 1.3.49
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m43s
2025-02-22 14:54:23 +01:00
7416285fb9 docs: update webpages for version v1.3.49 2025-02-22 14:54:23 +01:00
85928e358d workflow: update SPIFFS binary header to use correct chip revision 2025-02-22 14:54:19 +01:00
092b4fd8ec docs: update changelog for version 1.3.48
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m52s
2025-02-22 14:31:23 +01:00
399645a2b3 docs: update webpages for version v1.3.48 2025-02-22 14:31:23 +01:00
164bb241b7 workflow: update SPIFFS binary header for firmware release 2025-02-22 14:29:33 +01:00
e564c6eeae docs: update changelog for version 1.3.47
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-22 14:04:37 +01:00
4288dd0cd4 docs: update webpages for version v1.3.47 2025-02-22 14:04:37 +01:00
37d43b2d7d workflow: optimize firmware and SPIFFS update process, improve progress handling and logging 2025-02-22 14:04:34 +01:00
adb354ddcd docs: update changelog for version 1.3.46
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m39s
2025-02-22 13:55:42 +01:00
15d5e5edce docs: update webpages for version v1.3.46 2025-02-22 13:55:42 +01:00
c6edf30245 docs: update changelog for version 1.3.45
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m44s
2025-02-22 12:21:35 +01:00
65ac207f36 docs: update webpages for version v1.3.45 2025-02-22 12:21:35 +01:00
698abbd669 workflow: update SPIFFS binary creation to include minimal header and adjust update validation logic 2025-02-22 12:21:33 +01:00
04a7c2cce3 docs: update changelog for version 1.3.44
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m48s
2025-02-22 12:11:30 +01:00
78f54b72fd docs: update webpages for version v1.3.44 2025-02-22 12:11:30 +01:00
f4eee9af91 docs: update header title to 'Hollo Lollo Trollo' 2025-02-22 12:11:26 +01:00
cad14b3bc2 docs: update header title to 'Filament Management Tool' and improve update response messages 2025-02-22 12:10:57 +01:00
312f75fc5f docs: update changelog for version 1.3.43
All checks were successful
Release Workflow / detect-provider (push) Successful in 7s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m28s
2025-02-22 12:03:29 +01:00
b8714e93e2 docs: update webpages for version v1.3.43 2025-02-22 12:03:28 +01:00
cd9da0fe4f docs: update header title to 'Hollo Lollo Trollo' 2025-02-22 12:03:25 +01:00
2b620ef5ed docs: update changelog for version 1.3.42
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m27s
2025-02-22 11:52:21 +01:00
3f63a01b8b docs: update webpages for version v1.3.42 2025-02-22 11:52:21 +01:00
22bb16b6a4 fix: correct path for SPIFFS binary creation in Gitea release workflow 2025-02-22 11:52:19 +01:00
53ceee7816 docs: update changelog for version 1.3.41
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m32s
2025-02-22 11:48:12 +01:00
d48b002806 docs: update webpages for version v1.3.41 2025-02-22 11:48:12 +01:00
dd905b6c6e fix: remove redundant buffer size setting in NFC initialization 2025-02-22 11:47:35 +01:00
77b9eda110 fix: update SPIFFS binary creation and enhance NFC buffer size 2025-02-22 11:46:17 +01:00
32a6e9dcd3 docs: update changelog for version 1.3.40
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m30s
2025-02-22 11:31:19 +01:00
6cd5539e60 docs: update webpages for version v1.3.40 2025-02-22 11:31:19 +01:00
903b697912 fix: update SPIFFS binary header and enhance WebSocket error handling 2025-02-22 11:31:15 +01:00
72c2fb70c2 docs: update changelog for version 1.3.39
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m44s
2025-02-22 11:26:27 +01:00
f2f3f0ab9f docs: update webpages for version v1.3.39 2025-02-22 11:26:27 +01:00
c07692c218 workflow: update SPIFFS binary creation to set chip version to max supported 2025-02-22 11:26:24 +01:00
a184903b66 docs: update changelog for version 1.3.38
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m58s
2025-02-22 11:21:08 +01:00
af1640383d docs: update webpages for version v1.3.38 2025-02-22 11:21:08 +01:00
c00e54b145 workflow: update SPIFFS binary creation with minimal ESP32 image header 2025-02-22 11:20:41 +01:00
f6c92c686b docs: update changelog for version 1.3.37
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m46s
2025-02-22 11:13:40 +01:00
b8db01529b docs: update webpages for version v1.3.37 2025-02-22 11:13:40 +01:00
55db6d76ab workflow: update ESP32-WROOM image header for SPIFFS binary creation 2025-02-22 11:13:07 +01:00
a18749a1ff docs: update changelog for version 1.3.36
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-22 10:54:13 +01:00
1811fd9159 docs: update webpages for version v1.3.36 2025-02-22 10:54:13 +01:00
b550760427 partition: update SPIFFS binary header and offsets in workflow files 2025-02-22 10:53:50 +01:00
c5033acadc docs: update changelog for version 1.3.35
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m50s
2025-02-22 10:48:53 +01:00
7de4189c83 docs: update webpages for version v1.3.35 2025-02-22 10:48:53 +01:00
f43f2a15b2 partition: update SPIFFS binary header and offsets in workflow files 2025-02-22 10:48:44 +01:00
858192c6cb docs: update changelog for version 1.3.34
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m46s
2025-02-22 10:40:03 +01:00
e2bd39922d docs: update webpages for version v1.3.34 2025-02-22 10:40:03 +01:00
c86cc7173e partition: update SPIFFS binary creation and offsets in workflow files 2025-02-22 10:38:34 +01:00
16362e66a3 docs: update changelog for version 1.3.33
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m46s
2025-02-22 10:09:03 +01:00
48d9ba8f71 docs: update webpages for version v1.3.33 2025-02-22 10:09:03 +01:00
e2bea5a0c3 partition: update spiffs offset and app sizes in partition files 2025-02-22 10:08:47 +01:00
3e11f65188 partition: update spiffs offset in partition files 2025-02-22 10:06:43 +01:00
df59c42c8a partition: update app sizes and offsets in partitions.csv 2025-02-22 10:04:11 +01:00
abe1d7c930 docs: update changelog for version 1.3.32
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m31s
2025-02-22 09:52:44 +01:00
ca614c3cc4 docs: update webpages for version v1.3.32 2025-02-22 09:52:44 +01:00
5153374093 workflow: update magic byte for SPIFFS binary creation 2025-02-22 09:52:39 +01:00
66db4d7a85 docs: update changelog for version 1.3.31
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m58s
2025-02-22 09:47:27 +01:00
90e71922b1 docs: update webpages for version v1.3.31 2025-02-22 09:47:27 +01:00
e8e5c0bd3d workflow: remove unnecessary data and SPIFFS change checks from release workflows 2025-02-22 09:47:15 +01:00
7e53e1ccb0 docs: update changelog for version 1.3.30
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-22 09:38:14 +01:00
e49e812b13 docs: update webpages for version v1.3.30 2025-02-22 09:38:14 +01:00
b1e0fcfadf workflow: update Gitea and GitHub release workflows to create SPIFFS binary with magic byte 2025-02-22 09:37:59 +01:00
31ef3ac8df docs: update changelog for version 1.3.29
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-02-21 23:44:36 +01:00
8cf3f87c89 docs: update webpages for version v1.3.29 2025-02-21 23:44:35 +01:00
c446188311 workflow: update Gitea release workflow to create release before file uploads 2025-02-21 23:44:30 +01:00
8e2a8d597d docs: update changelog for version 1.3.28
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m25s
2025-02-21 23:37:15 +01:00
7d3b1c34f6 docs: update webpages for version v1.3.28 2025-02-21 23:37:15 +01:00
b95c61118b workflow: update Gitea release workflow to use file uploads with curl 2025-02-21 23:37:12 +01:00
0dfb158959 docs: update changelog for version 1.3.27
Some checks failed
Release Workflow / detect-provider (push) Successful in 7s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m29s
2025-02-21 23:23:19 +01:00
75c774bb24 docs: update webpages for version v1.3.27 2025-02-21 23:23:19 +01:00
cf80adb43c workflow: add GITEA_TOKEN secret for Gitea API access in release workflows 2025-02-21 23:23:16 +01:00
36d50cbe7f docs: update changelog for version 1.3.26
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m51s
2025-02-21 23:14:34 +01:00
9148d207c7 docs: update webpages for version v1.3.26 2025-02-21 23:14:34 +01:00
5f6fef9448 workflow: improve Gitea release workflow with enhanced error handling and debug outputs 2025-02-21 23:14:30 +01:00
946202de0e docs: update changelog for version 1.3.25
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m33s
2025-02-21 23:07:41 +01:00
41a3717347 docs: update webpages for version v1.3.25 2025-02-21 23:07:41 +01:00
255c820439 workflow: update Gitea release workflow to include RUNNER_NAME and improve error handling 2025-02-21 23:07:38 +01:00
aef3ba77ba docs: update changelog for version 1.3.24
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m33s
2025-02-21 22:59:00 +01:00
2592c3a497 docs: update webpages for version v1.3.24 2025-02-21 22:59:00 +01:00
a48c5dfef0 workflow: rename update files to upgrade in GitHub release workflow 2025-02-21 22:58:54 +01:00
00554d0b09 workflow: aktualisiere bestehende Einträge im Changelog für vorhandene Versionen 2025-02-21 22:58:38 +01:00
05a91cd8d8 workflow: improve Gitea release process with dynamic URL determination and debug outputs 2025-02-21 22:58:24 +01:00
7cf113eaff docs: update changelog for version 1.3.23
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m57s
2025-02-21 22:48:04 +01:00
44d27adab2 docs: update webpages for version v1.3.23 2025-02-21 22:48:04 +01:00
e0a2dff5fe workflow: enhance Gitea release process with debug outputs and API connection checks 2025-02-21 22:47:30 +01:00
519a089684 docs: update changelog for version 1.3.22
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m27s
2025-02-21 22:39:28 +01:00
ef053bb2b6 docs: update webpages for version v1.3.22 2025-02-21 22:39:28 +01:00
0a91c7b269 workflow: improve Gitea release process with additional environment variables and error handling 2025-02-21 22:39:24 +01:00
875d9d2b70 docs: update changelog for version 1.3.21
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m53s
2025-02-21 22:34:28 +01:00
52840b9b0b docs: update webpages for version v1.3.21 2025-02-21 22:34:28 +01:00
da1fc7678f workflow: enhance Gitea release process with API integration and token management 2025-02-21 22:34:18 +01:00
982bb5aa21 docs: update changelog for version 1.3.20
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m44s
2025-02-21 22:22:10 +01:00
007737db13 docs: update webpages for version v1.3.20 2025-02-21 22:22:10 +01:00
17e5949201 workflow: enable git tagging and pushing for Gitea releases 2025-02-21 22:22:06 +01:00
6a57186091 docs: update changelog for version 1.3.19
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m42s
2025-02-21 22:17:32 +01:00
babd3f47a0 docs: update webpages for version v1.3.19 2025-02-21 22:17:32 +01:00
5372fe10fe workflow: enable git push for version tagging in Gitea release 2025-02-21 22:17:22 +01:00
e0c9d90892 docs: update changelog for version 1.3.18
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m49s
2025-02-21 22:11:13 +01:00
e5f5d1961b docs: update webpages for version v1.3.18 2025-02-21 22:11:13 +01:00
31a960fb9e docs: add note about filaman_full.bin installation in changelog 2025-02-21 22:11:07 +01:00
3c2e75b77a docs: update changelog for version 1.3.18 and enhance update script for existing entries 2025-02-21 22:10:32 +01:00
367143c456 docs: update changelog for version 1.3.17
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m39s
2025-02-21 21:54:28 +01:00
fbde4b764f docs: update webpages for version v1.3.17 2025-02-21 21:54:28 +01:00
e57f4216d4 ci: comment out git tag and push commands in gitea-release workflow 2025-02-21 21:54:24 +01:00
b8beb992d6 config: update platformio.ini for version 1.3.16
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m32s
2025-02-21 21:49:48 +01:00
4234b2254e docs: update changelog for version 1.3.16 2025-02-21 21:44:40 +01:00
b8faf79163 docs: update webpages for version v1.3.16 2025-02-21 21:44:40 +01:00
d35afaff46 ci: update filenames for firmware and website binaries in release workflows 2025-02-21 21:44:33 +01:00
a8a00372b5 docs: update changelog for version 1.3.15 2025-02-21 21:34:36 +01:00
72f4eab588 docs: update webpages for version v1.3.15 2025-02-21 21:34:36 +01:00
afa4eddc00 ci: fix missing 'fi' in GitHub release workflow script 2025-02-21 21:34:32 +01:00
b0888e7e63 docs: update changelog for version 1.3.14
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m32s
2025-02-21 21:25:29 +01:00
238a84a8a2 docs: update webpages for version v1.3.14 2025-02-21 21:25:29 +01:00
59cc00ca13 ci: update GitHub release workflow to improve file upload handling 2025-02-21 21:25:16 +01:00
ab083f5f57 docs: update changelog for version 1.3.13
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m39s
2025-02-21 21:17:06 +01:00
c111573206 docs: update webpages for version v1.3.13 2025-02-21 21:17:06 +01:00
52b2494e52 ci: update GitHub release workflow to use RELEASE_TOKEN for improved security 2025-02-21 21:17:02 +01:00
069ec2d7a1 docs: update changelog for version 1.3.12
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m45s
2025-02-21 21:13:40 +01:00
94e35ae86e docs: update webpages for version v1.3.12 2025-02-21 21:13:40 +01:00
d71e3d8184 ci: enhance GitHub release workflow with token handling and file upload improvements 2025-02-21 21:13:36 +01:00
bb166aa29f docs: update changelog for version 1.3.11
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m55s
2025-02-21 21:07:19 +01:00
0d718023f8 docs: update webpages for version v1.3.11 2025-02-21 21:07:19 +01:00
b16781043f ci: refactor Gitea release workflow by simplifying input handling and removing unnecessary checks 2025-02-21 21:07:15 +01:00
dff184ff25 docs: update changelog for version 1.3.10
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-21 21:03:56 +01:00
0ce281221d docs: update webpages for version v1.3.10 2025-02-21 21:03:56 +01:00
bc26c160e8 ci: simplify GitHub release workflow by removing provider verification step 2025-02-21 21:03:52 +01:00
c25f41db75 docs: update changelog for version 1.3.9
All checks were successful
Release Workflow / route (push) Successful in 16s
Release Workflow / verify-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m51s
2025-02-21 21:00:21 +01:00
e107c17f50 docs: update webpages for version v1.3.9 2025-02-21 21:00:21 +01:00
85b9d03ebd ci: comment out permissions for GitHub release workflow 2025-02-21 20:59:37 +01:00
17b188626a docs: update changelog for version 1.3.8
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m35s
2025-02-21 20:56:07 +01:00
a534c5f872 docs: update webpages for version v1.3.8 2025-02-21 20:56:07 +01:00
93f7582790 feat: add Gitea and GitHub release workflows 2025-02-21 20:56:02 +01:00
46acc63756 docs: update changelog for version 1.3.7
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-21 20:54:16 +01:00
67a9e1bdce docs: update webpages for version v1.3.7 2025-02-21 20:54:16 +01:00
2b75b64b4a feat: add GitHub and Gitea release workflows 2025-02-21 20:54:11 +01:00
8d003295e7 docs: update changelog for version 1.3.6
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-21 20:45:33 +01:00
f89500946a docs: update webpages for version v1.3.6 2025-02-21 20:45:33 +01:00
14e745ff06 fix: update GitHub token reference and correct file path in release workflow 2025-02-21 20:45:25 +01:00
49 changed files with 17863 additions and 1348 deletions

View File

@ -2,22 +2,25 @@ name: Gitea Release
on:
workflow_call:
inputs:
gitea_server_url:
required: true
type: string
gitea_repository:
required: true
type: string
secrets:
GITEA_TOKEN:
description: 'Token für Gitea API-Zugriff'
required: true
outputs:
version:
description: 'The version that was released'
value: ${{ jobs.create-release.outputs.version }}
jobs:
create-release:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.VERSION }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
@ -34,50 +37,22 @@ jobs:
sudo apt-get update
sudo apt-get install xxd
- name: Check for SPIFFS changes
id: check_spiffs
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/\|^html/"; then
echo "SPIFFS_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Check for Data changes
id: check_data
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/"; then
echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Build Firmware
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
# Always build firmware and SPIFFS
echo "Building firmware and SPIFFS..."
# Build firmware and LittleFS
echo "Building firmware and LittleFS..."
pio run -e esp32dev
pio run -t buildfs
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
# Always create SPIFFS binary
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
# Create LittleFS binary - direct copy without header
cp .pio/build/esp32dev/littlefs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
# Create full binary (always)
# Create full binary
(cd .pio/build/esp32dev &&
esptool.py --chip esp32 merge_bin \
--fill-flash-size 4MB \
@ -88,95 +63,146 @@ jobs:
0x1000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x390000 spiffs.bin)
0x3D0000 littlefs.bin)
# Verify file sizes
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Prepare binaries
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
# Create full binary only if SPIFFS changed
if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then
echo "Creating full binary..."
cd .pio/build/esp32dev && \
esptool.py --chip esp32 merge_bin \
--fill-flash-size 4MB \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o filaman_full_${VERSION}.bin \
0x0000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x390000 spiffs.bin
fi
# Verify file sizes
echo "File sizes:"
cd .pio/build/esp32dev && ls -lh *.bin
- name: Read CHANGELOG.md
id: changelog
- name: Generate Release Notes
id: release_notes
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
CHANGELOG=$(awk "/## \\[$VERSION\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
# Get the latest tag
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
if [ -n "$LATEST_TAG" ]; then
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Get all commits since last release with commit hash and author
echo "### Added" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
- name: Create Release
env:
TOKEN: ${{ secrets.GITEA_TOKEN }}
run: |
API_URL="${{ inputs.gitea_server_url }}/api/v1"
REPO="${{ inputs.gitea_repository }}"
VERSION=${{ steps.get_version.outputs.VERSION }}
# Create release
RESPONSE=$(curl -k -s \
-X POST \
-H "Authorization: token ${TOKEN}" \
-H "Content-Type: application/json" \
-d "{
\"tag_name\":\"v${VERSION}\",
\"name\":\"Release ${VERSION}\",
\"body\":\"${{ steps.changelog.outputs.CHANGES }}\"
}" \
"${API_URL}/repos/${REPO}/releases")
RELEASE_ID=$(echo "$RESPONSE" | grep -o '"id":[0-9]*' | cut -d':' -f2 | head -n1)
if [ -n "$RELEASE_ID" ]; then
echo "Release created with ID: $RELEASE_ID"
# Always upload firmware and full binary
for file in filaman_${VERSION}.bin filaman_full_${VERSION}.bin; do
if [ -f ".pio/build/esp32dev/$file" ]; then
echo "Uploading $file..."
curl -k -s \
-X POST \
-H "Authorization: token ${TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary "@.pio/build/esp32dev/$file" \
"${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=$file"
fi
done
# Upload SPIFFS binary only if it exists (data changes)
if [ -f ".pio/build/esp32dev/webpage_${VERSION}.bin" ]; then
echo "Uploading webpage binary..."
curl -k -s \
-X POST \
-H "Authorization: token ${TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary "@.pio/build/esp32dev/webpage_${VERSION}.bin" \
"${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=webpage_${VERSION}.bin"
fi
else
echo "Failed to create release. Response:"
echo "$RESPONSE"
# First release
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Initial Release" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Add all commits for initial release
echo "### Added" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Determine Gitea URL
id: gitea_url
run: |
echo "Debug Environment:"
echo "GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-not set}"
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
echo "GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-not set}"
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
# Set API URL based on environment
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
GITEA_API_URL="${GITHUB_SERVER_URL}"
GITEA_REPO=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f2)
GITEA_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1)
else
echo "Error: This workflow is only for Gitea"
exit 1
fi
echo "GITEA_API_URL=${GITEA_API_URL}" >> $GITHUB_OUTPUT
echo "GITEA_REPO=${GITEA_REPO}" >> $GITHUB_OUTPUT
echo "GITEA_OWNER=${GITEA_OWNER}" >> $GITHUB_OUTPUT
- name: Create Gitea Release
env:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
GITEA_API_URL: ${{ steps.gitea_url.outputs.GITEA_API_URL }}
GITEA_REPO: ${{ steps.gitea_url.outputs.GITEA_REPO }}
GITEA_OWNER: ${{ steps.gitea_url.outputs.GITEA_OWNER }}
run: |
# Debug Token (nur Länge ausgeben für Sicherheit)
echo "Debug: Token length: ${#GITEA_TOKEN}"
if [ -z "$GITEA_TOKEN" ]; then
echo "Error: GITEA_TOKEN is empty"
exit 1
fi
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
# Debug-Ausgaben
echo "Debug: API URL: ${GITEA_API_URL}"
echo "Debug: Repository: ${GITEA_OWNER}/${GITEA_REPO}"
# Erstelle zuerst den Release ohne Dateien
echo "Debug: Creating release..."
RELEASE_DATA="{\"tag_name\":\"v${VERSION}\",\"name\":\"v${VERSION}\",\"body\":\"${{ steps.release_notes.outputs.CHANGES }}\"}"
RELEASE_RESPONSE=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d "${RELEASE_DATA}" \
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases")
RELEASE_STATUS=$(echo "$RELEASE_RESPONSE" | tail -n1)
RELEASE_BODY=$(echo "$RELEASE_RESPONSE" | head -n -1)
if [ "$RELEASE_STATUS" != "201" ]; then
echo "Error: Failed to create release"
echo "Response: $RELEASE_BODY"
exit 1
fi
# Extrahiere die Release-ID aus der Antwort
RELEASE_ID=$(echo "$RELEASE_BODY" | grep -o '"id":[0-9]*' | cut -d':' -f2)
# Lade die Dateien einzeln hoch
for file in upgrade_filaman_firmware_v${VERSION}.bin upgrade_filaman_website_v${VERSION}.bin filaman_full_${VERSION}.bin; do
if [ -f "$file" ]; then
echo "Debug: Uploading $file..."
UPLOAD_RESPONSE=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary @"$file" \
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases/${RELEASE_ID}/assets?name=${file}")
UPLOAD_STATUS=$(echo "$UPLOAD_RESPONSE" | tail -n1)
if [ "$UPLOAD_STATUS" != "201" ]; then
echo "Warning: Failed to upload $file"
echo "Response: $(echo "$UPLOAD_RESPONSE" | head -n -1)"
else
echo "Successfully uploaded $file"
fi
fi
done

View File

@ -2,6 +2,13 @@ name: GitHub Release
on:
workflow_call:
secrets:
RELEASE_TOKEN:
description: 'GitHub token for release creation'
required: true
permissions:
contents: write
jobs:
create-release:
@ -10,6 +17,8 @@ jobs:
contents: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
@ -26,42 +35,20 @@ jobs:
sudo apt-get update
sudo apt-get install xxd
- name: Check for Data changes
id: check_data
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/"; then
echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Check for SPIFFS changes
id: check_spiffs
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/\|^html/"; then
echo "SPIFFS_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Build Firmware
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
# Always build firmware and SPIFFS
echo "Building firmware and SPIFFS..."
# Always build firmware and LittleFS
echo "Building firmware and LittleFS..."
pio run -e esp32dev
pio run -t buildfs
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
# Always create SPIFFS binary
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
# Create LittleFS binary - direct copy without header
cp .pio/build/esp32dev/littlefs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
# Create full binary (always)
(cd .pio/build/esp32dev &&
@ -74,7 +61,7 @@ jobs:
0x1000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x390000 spiffs.bin)
0x3D0000 littlefs.bin)
# Verify file sizes
echo "File sizes:"
@ -86,33 +73,67 @@ jobs:
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Read CHANGELOG.md
id: changelog
- name: Generate Release Notes
id: release_notes
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
CHANGELOG=$(awk "/## \\[$VERSION\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
# Get the latest tag
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
if [ -n "$LATEST_TAG" ]; then
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Get all commits since last release with commit hash and author
echo "### Added" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
else
# First release
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Initial Release" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Add all commits for initial release
echo "### Added" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Create GitHub Release
env:
GH_TOKEN: ${{ github.token }}
GH_TOKEN: ${{ secrets.RELEASE_TOKEN }}
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
# Create release with available files
cd .pio/build/esp32dev
FILES_TO_UPLOAD=""
# Always add firmware
if [ -f "filaman_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_${VERSION}.bin"
if [ -f "upgrade_filaman_firmware_v${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_firmware_v${VERSION}.bin"
fi
# Add SPIFFS and full binary only if they exist
if [ -f "webpage_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD webpage_${VERSION}.bin"
# Add LittleFS and full binary only if they exist
if [ -f "upgrade_filaman_website_v${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_website_v${VERSION}.bin"
fi
if [ -f "filaman_full_${VERSION}.bin" ]; then
@ -123,9 +144,42 @@ jobs:
if [ -n "$FILES_TO_UPLOAD" ]; then
gh release create "v${VERSION}" \
--title "Release ${VERSION}" \
--notes "${{ steps.changelog.outputs.CHANGES }}" \
--notes "${{ steps.release_notes.outputs.CHANGES }}" \
$FILES_TO_UPLOAD
else
echo "Error: No files found to upload"
exit 1
fi
- name: Install lftp
run: sudo apt-get install -y lftp
- name: Upload Firmware via FTP
if: success()
env:
FTP_PASSWORD: ${{ vars.FTP_PASSWORD }}
FTP_USER: ${{ vars.FTP_USER }}
FTP_HOST: ${{ vars.FTP_HOST }}
VERSION: ${{ steps.get_version.outputs.VERSION }}
run: |
echo "Environment variables:"
env | grep -E '^FTP_' | while read -r line; do
var_name=$(echo "$line" | cut -d= -f1)
var_value=$(echo "$line" | cut -d= -f2-)
echo "$var_name is $(if [ -n "$var_value" ]; then echo "set"; else echo "empty"; fi)"
done
cd .pio/build/esp32dev
if [ -n "$FTP_USER" ] && [ -n "$FTP_PASSWORD" ] && [ -n "$FTP_HOST" ]; then
echo "All FTP credentials are present, attempting upload..."
lftp -c "set ssl:verify-certificate no; \
set ftp:ssl-protect-data true; \
set ftp:ssl-force true; \
set ssl:check-hostname false; \
set ftp:ssl-auth TLS; \
open -u $FTP_USER,$FTP_PASSWORD $FTP_HOST; \
put -O / filaman_full_${VERSION}.bin -o filaman_full.bin"
else
echo "Error: Some FTP credentials are missing"
exit 1
fi

View File

@ -5,254 +5,37 @@ on:
tags:
- 'v*'
permissions:
contents: write
jobs:
route:
detect-provider:
runs-on: ubuntu-latest
outputs:
provider: ${{ steps.provider.outputs.provider }}
gitea_ref_name: ${{ steps.provider.outputs.gitea_ref_name }}
gitea_server_url: ${{ steps.provider.outputs.gitea_server_url }}
gitea_repository: ${{ steps.provider.outputs.gitea_repository }}
steps:
- name: Checkout Repository
uses: actions/checkout@v3
- name: Debug Environment
run: |
echo "CI Environment Details:"
echo "GITHUB_ACTIONS=${GITHUB_ACTIONS:-not set}"
echo "GITEA_ACTIONS=${GITEA_ACTIONS:-not set}"
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
- name: Determine CI Provider
id: provider
shell: bash
run: |
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
echo "provider=gitea" >> "$GITHUB_OUTPUT"
echo "gitea_ref_name=${GITHUB_REF_NAME}" >> "$GITHUB_OUTPUT"
echo "gitea_server_url=${GITHUB_SERVER_URL}" >> "$GITHUB_OUTPUT"
echo "gitea_repository=${GITHUB_REPOSITORY}" >> "$GITHUB_OUTPUT"
elif [ "${GITHUB_ACTIONS}" = "true" ]; then
echo "provider=github" >> "$GITHUB_OUTPUT"
else
echo "provider=unknown" >> "$GITHUB_OUTPUT"
fi
verify-provider:
needs: route
runs-on: ubuntu-latest
steps:
- name: Echo detected provider
run: |
echo "Detected CI Provider: ${{ needs.route.outputs.provider }}"
if [ "${{ needs.route.outputs.provider }}" = "unknown" ]; then
echo "::error::Failed to detect CI provider!"
exit 1
echo "provider=github" >> "$GITHUB_OUTPUT"
fi
github-release:
needs: [route, verify-provider]
if: needs.route.outputs.provider == 'github'
runs-on: ubuntu-latest
needs: detect-provider
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio esptool
- name: Install xxd
run: |
sudo apt-get update
sudo apt-get install xxd
- name: Check for Data changes
id: check_data
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/"; then
echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Check for SPIFFS changes
id: check_spiffs
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/\|^html/"; then
echo "SPIFFS_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Build Firmware
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "Building firmware and SPIFFS..."
pio run -e esp32dev
pio run -t buildfs
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
(cd .pio/build/esp32dev && esptool.py --chip esp32 merge_bin --fill-flash-size 4MB --flash_mode dio --flash_freq 40m --flash_size 4MB -o filaman_full_${VERSION}.bin 0x1000 bootloader.bin 0x8000 partitions.bin 0x10000 firmware.bin 0x390000 spiffs.bin)
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Read CHANGELOG.md
id: changelog
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
CHANGELOG=$(awk "/## \[$VERSION\]/{p=1;print;next} /## \[/{p=0} p" CHANGELOG.md)
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create GitHub Release
env:
GH_TOKEN: ${{ github.token }}
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
FILES_TO_UPLOAD=""
if [ -f "filaman_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_${VERSION}.bin"
fi
if [ -f "webpage_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD webpage_${VERSION}.bin"
fi
if [ -f "filaman_full_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_full_${VERSION}.bin"
fi
if [ -n "$FILES_TO_UPLOAD" ]; then
if [ "${{ needs.route.outputs.provider }}" = "github" ]; then
gh release create "v${VERSION}" --title "Release ${VERSION}" --notes "${{ steps.changelog.outputs.CHANGES }}" $FILES_TO_UPLOAD
elif [ "${{ needs.route.outputs.provider }}" = "gitea" ]; then
API_URL="${{ needs.route.outputs.gitea_server_url }}/api/v1"
REPO="${{ needs.route.outputs.gitea_repository }}"
RESPONSE=$(curl -k -s -X POST -H "Authorization: token ${TOKEN}" -H "Content-Type: application/json" -d "{\"tag_name\":\"v${VERSION}\",\"name\":\"Release ${VERSION}\",\"body\":\"${{ steps.changelog.outputs.CHANGES }}\"}" "${API_URL}/repos/${REPO}/releases")
RELEASE_ID=$(echo "$RESPONSE" | grep -o '"id":[0-9]*' | cut -d':' -f2 | head -n1)
if [ -n "$RELEASE_ID" ]; then
echo "Release created with ID: $RELEASE_ID"
for file in $FILES_TO_UPLOAD; do
echo "Uploading $file..."
curl -k -s -X POST -H "Authorization: token ${TOKEN}" -H "Content-Type: application/octet-stream" --data-binary "@.pio/build/esp32dev/$file" "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=$file"
done
else
echo "Failed to create release. Response:"
echo "$RESPONSE"
exit 1
fi
else
echo "Error: Unknown provider"
exit 1
fi
else
echo "Error: No files found to upload"
exit 1
if: needs.detect-provider.outputs.provider == 'github'
uses: ./.github/workflows/github-release.yml
secrets:
RELEASE_TOKEN: ${{ secrets.GITHUB_TOKEN }}
gitea-release:
needs: [route, verify-provider]
if: needs.route.outputs.provider == 'gitea'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio esptool
- name: Install xxd
run: |
sudo apt-get update
sudo apt-get install xxd
- name: Check for SPIFFS changes
id: check_spiffs
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/\|^html/"; then
echo "SPIFFS_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Check for Data changes
id: check_data
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/"; then
echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Build Firmware
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
echo "Building firmware and SPIFFS..."
pio run -e esp32dev
pio run -t buildfs
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
(cd .pio/build/esp32dev && esptool.py --chip esp32 merge_bin --fill-flash-size 4MB --flash_mode dio --flash_freq 40m --flash_size 4MB -o filaman_full_${VERSION}.bin 0x1000 bootloader.bin 0x8000 partitions.bin 0x10000 firmware.bin 0x390000 spiffs.bin)
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Prepare binaries
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then
echo "Creating full binary..."
cd .pio/build/esp32dev && esptool.py --chip esp32 merge_bin --fill-flash-size 4MB --flash_mode dio --flash_freq 40m --flash_size 4MB -o filaman_full_${VERSION}.bin 0x0000 bootloader.bin 0x8000 partitions.bin 0x10000 firmware.bin 0x390000 spiffs.bin
fi
echo "File sizes:"
cd .pio/build/esp32dev && ls -lh *.bin
- name: Read CHANGELOG.md
id: changelog
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
CHANGELOG=$(awk "/## \[$VERSION\]/{p=1;print;next} /## \[/{p=0} p" CHANGELOG.md)
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create Release
env:
TOKEN: ${{ secrets.GITEA_TOKEN }}
run: |
API_URL="${{ needs.route.outputs.gitea_server_url }}/api/v1"
REPO="${{ needs.route.outputs.gitea_repository }}"
VERSION=${{ steps.get_version.outputs.VERSION }}
RESPONSE=$(curl -k -s -X POST -H "Authorization: token ${TOKEN}" -H "Content-Type: application/json" -d "{\"tag_name\":\"v${VERSION}\",\"name\":\"Release ${VERSION}\",\"body\":\"${{ steps.changelog.outputs.CHANGES }}\"}" "${API_URL}/repos/${REPO}/releases")
RELEASE_ID=$(echo "$RESPONSE" | grep -o '"id":[0-9]*' | cut -d':' -f2 | head -n1)
if [ -n "$RELEASE_ID" ]; then
echo "Release created with ID: $RELEASE_ID"
for file in filaman_${VERSION}.bin filaman_full_${VERSION}.bin; do
if [ -f ".pio/build/esp32dev/$file" ]; then
echo "Uploading $file..."
curl -k -s -X POST -H "Authorization: token ${TOKEN}" -H "Content-Type: application/octet-stream" --data-binary "@.pio/build/esp32dev/$file" "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=$file"
fi
done
if [ -f ".pio/build/esp32dev/webpage_${VERSION}.bin" ]; then
echo "Uploading webpage binary..."
curl -k -s -X POST -H "Authorization: token ${TOKEN}" -H "Content-Type: application/octet-stream" --data-binary "@.pio/build/esp32dev/webpage_${VERSION}.bin" "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=webpage_${VERSION}.bin"
fi
else
echo "Failed to create release. Response:"
echo "$RESPONSE"
exit 1
needs: detect-provider
if: needs.detect-provider.outputs.provider == 'gitea'
uses: ./.github/workflows/gitea-release.yml
secrets:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}

54
.vscode/settings.json vendored
View File

@ -1,54 +0,0 @@
{
"files.associations": {
"algorithm": "cpp",
"vector": "cpp",
"cmath": "cpp",
"array": "cpp",
"atomic": "cpp",
"*.tcc": "cpp",
"bitset": "cpp",
"cctype": "cpp",
"clocale": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdint": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"deque": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"exception": "cpp",
"functional": "cpp",
"iterator": "cpp",
"map": "cpp",
"memory": "cpp",
"memory_resource": "cpp",
"numeric": "cpp",
"optional": "cpp",
"random": "cpp",
"regex": "cpp",
"string": "cpp",
"string_view": "cpp",
"system_error": "cpp",
"tuple": "cpp",
"type_traits": "cpp",
"utility": "cpp",
"fstream": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"istream": "cpp",
"limits": "cpp",
"new": "cpp",
"ostream": "cpp",
"sstream": "cpp",
"stdexcept": "cpp",
"streambuf": "cpp",
"cinttypes": "cpp",
"typeinfo": "cpp"
}
}

View File

@ -1,5 +1,744 @@
# Changelog
## [1.4.0] - 2025-03-01
### Added
- add support for Spoolman Octoprint Plugin in README files
- add OctoPrint integration with configurable fields and update functionality
- add version comparison function and check for outdated versions before updates
- remove unused version and protocol fields from JSON output; add error message for insufficient memory
### Changed
- update NFC tag references to include NTAG213 and clarify storage capacity
- bump version to 1.4.0
- remove unused version and protocol fields from NFC data packet
- sort vendors alphabetically in the dropdown list
- Merge pull request #10 from janecker/nfc-improvements
- Improves NFC Tag handling
## [1.3.99] - 2025-02-28
### Changed
- update platformio.ini for version v1.3.99
- update workflows to build firmware with LittleFS instead of SPIFFS
## [1.3.98] - 2025-02-28
### Changed
- update platformio.ini for version v1.3.98
- migrate from SPIFFS to LittleFS for file handling
- remove unused VSCode settings file
- remove commented-out spoolman and filaman data from api.cpp
## [1.3.97] - 2025-02-28
### Added
- füge Bestätigungsmeldung für Spool-Einstellung hinzu
- verbessere WLAN-Konfiguration und füge mDNS-Unterstützung hinzu
- aktualisiere OLED-Anzeige mit Versionsnummer und verbessere Textausrichtung
- füge regelmäßige WLAN-Verbindungsüberprüfung hinzu
- aktualisiere Schaltplan-Bild
- zeige Versionsnummer im OLED-Display an
### Changed
- update platformio.ini for version v1.3.97
- entferne text-shadow von deaktivierten Schaltflächen
- füge Link zum Wiki für detaillierte Informationen über die Nutzung hinzu
### Fixed
- Speichernutzung optimiert
- behebe doppelte http.end() Aufrufe in checkSpoolmanExtraFields
- optimiere Verzögerungen und Stackgrößen in NFC-Task-Funktionen
- entferne ungenutzte Bibliotheken und Debug-Ausgaben aus main.cpp
## [1.3.96] - 2025-02-25
### Added
- füge Unterstützung für Spoolman-Einstellungen hinzu und aktualisiere die Benutzeroberfläche
- entferne die sendAmsData-Funktion aus der API-Schnittstelle
- erweitere Bambu-Credentials um AutoSend-Zeit und aktualisiere die Benutzeroberfläche
- erweitere Bambu-Credentials mit AutoSend-Wartezeit und aktualisiere die Benutzeroberfläche
- add espRestart function and replace delay with vTaskDelay for OTA update process
- implement OTA update functionality with backup and restore for configurations
- add own_filaments.json and integrate custom filament loading in bambu.cpp
### Changed
- update platformio.ini for version v1.3.96
### Fixed
- aktualisiere Bedingungen für die AMS-Datenaktualisierung und entferne unnötige Aufrufe
- aktualisiere Bedingung für den Fortschritt der OTA-Update-Nachricht
- update auto set logic to check RFID tag before setting Bambu spool
## [1.3.95] - 2025-02-24
### Changed
- update webpages for version v1.3.95
### Fixed
- bind autoSendToBambu variable to checkbox in spoolman.html
## [1.3.94] - 2025-02-24
### Changed
- update webpages for version v1.3.94
### Fixed
- correct payload type check in NFC write event handling
## [1.3.93] - 2025-02-24
### Added
- implement auto send feature for Bambu spool management and update related configurations
- add debug mode instructions for Spoolman in README
- add wiring diagrams to README for PN532 I2C setup
### Changed
- update webpages for version v1.3.93
- simplify filament names in JSON configuration
- update findFilamentIdx to return structured result and improve type searching logic
- update README to reflect PN532 I2C configuration and remove SPI pin details
### Fixed
- remove debug output from splitTextIntoLines and update weight display logic in scanRfidTask
- enhance weight display logic for negative values
- remove unnecessary CPU frequency configuration from setup function
## [1.3.92] - 2025-02-24
### Changed
- update webpages for version v1.3.92
- remove commented-out code in setBambuSpool function
- update installation instructions and formatting in README files
### Fixed
- configure CPU frequency settings in setup function only for testing
- update comment to clarify NVS reading process
- adjust weight display logic to handle cases for weight less than 2
- update weight display logic to handle negative and specific weight cases
## [1.3.91] - 2025-02-23
### Added
- update GitHub Actions workflow for FTP firmware upload with improved credential checks
### Changed
- update webpages for version v1.3.91
## [1.3.90] - 2025-02-23
### Added
- update index.html for improved content structure and additional links
- improve UI for Spoolman and Bambu Lab printer credentials, enhancing layout and styling
- update README files with HSPI default PINs and add ESP32 pin diagram
- implement scale calibration checks and update start_scale function to return calibration status
- add FTP upload functionality to GitHub release workflow and update installation instructions in README
### Changed
- update webpages for version v1.3.90
### Fixed
- remove debug secrets check from Gitea release workflow
## [1.3.89] - 2025-02-23
### Changed
- update webpages for version v1.3.89
### Fixed
- update Gitea release workflow to use vars for FTP credentials
## [1.3.88] - 2025-02-23
### Changed
- update webpages for version v1.3.88
### Fixed
- update Gitea release workflow to use secrets for FTP credentials
## [1.3.87] - 2025-02-23
### Changed
- update webpages for version v1.3.87
### Fixed
- enhance FTP upload workflow with credential checks and version output
## [1.3.86] - 2025-02-23
### Changed
- update webpages for version v1.3.86
### Fixed
- streamline FTP credentials usage in Gitea release workflow
## [1.3.85] - 2025-02-23
### Added
- add FTP_USER variable for Gitea release workflow
### Changed
- update webpages for version v1.3.85
## [1.3.84] - 2025-02-23
### Added
- add FTP_HOST variable for firmware upload in Gitea release workflow
### Changed
- update webpages for version v1.3.84
## [1.3.83] - 2025-02-23
### Changed
- update webpages for version v1.3.83
### Fixed
- correct variable interpolation for FTP credentials in Gitea release workflow
## [1.3.82] - 2025-02-23
### Added
- update Gitea release workflow to use variable interpolation for FTP credentials
### Changed
- update webpages for version v1.3.82
## [1.3.81] - 2025-02-23
### Added
- update Gitea release workflow to use environment variables for FTP credentials and version
### Changed
- update webpages for version v1.3.81
## [1.3.80] - 2025-02-23
### Added
- add FTP_USER and FTP_PASSWORD secrets for firmware upload in Gitea release workflow
### Changed
- update webpages for version v1.3.80
## [1.3.79] - 2025-02-23
### Added
- add FTP_USER input for firmware upload in Gitea release workflow
### Changed
- update webpages for version v1.3.79
## [1.3.78] - 2025-02-23
### Changed
- update webpages for version v1.3.78
### Fixed
- change FTP protocol from FTPS to FTP for file upload in workflow
## [1.3.77] - 2025-02-23
### Changed
- update webpages for version v1.3.77
### Fixed
- replace ncftp with lftp for secure firmware upload
## [1.3.76] - 2025-02-23
### Changed
- update webpages for version v1.3.76
### Fixed
- replace FTP action with curl for secure firmware upload and install ncftp
## [1.3.75] - 2025-02-23
### Changed
- update webpages for version v1.3.75
### Fixed
- update FTP user and enhance SSL options in gitea-release workflow
## [1.3.74] - 2025-02-23
### Changed
- update webpages for version v1.3.74
### Fixed
- update password syntax in gitea-release workflow
## [1.3.73] - 2025-02-23
### Changed
- update webpages for version v1.3.73
- update version to 1.3.72 in platformio.ini
## [1.3.72] - 2025-02-23
### Changed
- update webpages for version v1.3.72
### Fixed
- update FTP options for Gitea release workflow
## [1.3.71] - 2025-02-23
### Added
- add FTP upload step for firmware in Gitea release workflow
### Changed
- update webpages for version v1.3.71
## [1.3.70] - 2025-02-23
### Changed
- update webpages for version v1.3.70
## [1.3.69] - 2025-02-23
### Changed
- update webpages for version v1.3.69
### Fixed
- update release note generation to use the second latest tag
## [1.3.68] - 2025-02-23
### Changed
- update webpages for version v1.3.68
### Fixed
- update release note generation to include commit hash and author
- remove commented test line from platformio.ini
## [1.3.67] - 2025-02-23
### Changed
- update webpages for version v1.3.67
- ci: update release note generation to use the latest tag
## [1.3.66] - 2025-02-23
### Changed
- update webpages for version v1.3.66
- ci: remove redundant git fetch for tags in release note generation
## [1.3.65] - 2025-02-22
### Changed
- update webpages for version v1.3.65
- ci: improve release note generation by fetching tags and sorting unique commits
## [1.3.64] - 2025-02-22
### Changed
- update webpages for version v1.3.64
- remove unnecessary closing tags from header.html
## [1.3.63] - 2025-02-22
### Added
- update update-form background and add glass border effect
### Changed
- update webpages for version v1.3.63
- update release note generation for initial release handling
## [1.3.62] - 2025-02-22
### Changed
- update webpages for version v1.3.62
- update background colors and improve layout for update sections
## [1.3.61] - 2025-02-22
### Added
- update release notes generation to use previous tag for changes
### Changed
- update webpages for version v1.3.61
## [1.3.60] - 2025-02-22
### Added
- remove automatic git push from changelog update script
- implement release notes generation with categorized changes since last tag
### Changed
- update webpages for version v1.3.60
## [1.3.59] - 2025-02-22
### Added
- implement enhanced update progress handling and WebSocket notifications
- improve update progress reporting and enhance WebSocket notifications
- enhance update progress handling and add WebSocket closure notification
- implement WebSocket for update progress and enhance update response handling
### Changed
- update webpages for version v1.3.59
## [1.3.58] - 2025-02-22
### Added
- implement backup and restore functionality for Bambu credentials and Spoolman URL
### Changed
- update webpages for version v1.3.58
- update upgrade page message and improve progress display logic
## [1.3.57] - 2025-02-22
### Changed
- update webpages for version v1.3.57
- update header title to 'Filament Management Tool' in multiple HTML files
## [1.3.56] - 2025-02-22
### Changed
- update webpages for version v1.3.56
- update header title and improve SPIFFS update error handling
- clarify comments in Gitea and GitHub release workflows
## [1.3.55] - 2025-02-22
### Changed
- update webpages for version v1.3.55
- update component descriptions in README files
## [1.3.54] - 2025-02-22
### Changed
- update webpages for version v1.3.54
- workflow: update SPIFFS binary creation to exclude header
## [1.3.53] - 2025-02-22
### Changed
- version: update to version 1.3.53
- update changelog for version 1.3.51
- update changelog for version 1.3.51
- workflow: update SPIFFS binary magic byte and revert version to 1.3.51
## [1.3.52] - 2025-02-22
### Changed
- update webpages for version v1.3.52
- workflow: update SPIFFS binary creation to use correct chip revision (0xEB for Rev 3)
## [1.3.51] - 2025-02-22
### Changed
- update changelog for version 1.3.51
- workflow: update SPIFFS binary magic byte and revert version to 1.3.51
## [1.3.50] - 2025-02-22
### Changed
- update webpages for version v1.3.50
## [1.3.49] - 2025-02-22
### Changed
- update webpages for version v1.3.49
- workflow: update SPIFFS binary header to use correct chip revision
## [1.3.48] - 2025-02-22
### Changed
- update webpages for version v1.3.48
- workflow: update SPIFFS binary header for firmware release
## [1.3.47] - 2025-02-22
### Changed
- update webpages for version v1.3.47
- workflow: optimize firmware and SPIFFS update process, improve progress handling and logging
## [1.3.46] - 2025-02-22
### Changed
- update webpages for version v1.3.46
## [1.3.45] - 2025-02-22
### Changed
- update webpages for version v1.3.45
- workflow: update SPIFFS binary creation to include minimal header and adjust update validation logic
## [1.3.44] - 2025-02-22
### Changed
- update webpages for version v1.3.44
- update header title to 'Hollo Lollo Trollo'
- update header title to 'Filament Management Tool' and improve update response messages
## [1.3.43] - 2025-02-22
### Changed
- update webpages for version v1.3.43
- update header title to 'Hollo Lollo Trollo'
## [1.3.42] - 2025-02-22
### Changed
- update webpages for version v1.3.42
### Fixed
- correct path for SPIFFS binary creation in Gitea release workflow
## [1.3.41] - 2025-02-22
### Changed
- update webpages for version v1.3.41
### Fixed
- remove redundant buffer size setting in NFC initialization
- update SPIFFS binary creation and enhance NFC buffer size
## [1.3.40] - 2025-02-22
### Changed
- update webpages for version v1.3.40
### Fixed
- update SPIFFS binary header and enhance WebSocket error handling
## [1.3.39] - 2025-02-22
### Changed
- update webpages for version v1.3.39
- workflow: update SPIFFS binary creation to set chip version to max supported
## [1.3.38] - 2025-02-22
### Changed
- update webpages for version v1.3.38
- workflow: update SPIFFS binary creation with minimal ESP32 image header
## [1.3.37] - 2025-02-22
### Changed
- update webpages for version v1.3.37
- workflow: update ESP32-WROOM image header for SPIFFS binary creation
## [1.3.36] - 2025-02-22
### Changed
- update webpages for version v1.3.36
- partition: update SPIFFS binary header and offsets in workflow files
## [1.3.35] - 2025-02-22
### Changed
- update webpages for version v1.3.35
- partition: update SPIFFS binary header and offsets in workflow files
## [1.3.34] - 2025-02-22
### Changed
- update webpages for version v1.3.34
- partition: update SPIFFS binary creation and offsets in workflow files
## [1.3.33] - 2025-02-22
### Changed
- update webpages for version v1.3.33
- partition: update spiffs offset and app sizes in partition files
- partition: update spiffs offset in partition files
- partition: update app sizes and offsets in partitions.csv
## [1.3.32] - 2025-02-22
### Changed
- update webpages for version v1.3.32
- workflow: update magic byte for SPIFFS binary creation
## [1.3.31] - 2025-02-22
### Changed
- update webpages for version v1.3.31
- workflow: remove unnecessary data and SPIFFS change checks from release workflows
## [1.3.30] - 2025-02-22
### Changed
- update webpages for version v1.3.30
- workflow: update Gitea and GitHub release workflows to create SPIFFS binary with magic byte
## [1.3.29] - 2025-02-21
### Changed
- update webpages for version v1.3.29
- workflow: update Gitea release workflow to create release before file uploads
## [1.3.28] - 2025-02-21
### Changed
- update webpages for version v1.3.28
- workflow: update Gitea release workflow to use file uploads with curl
## [1.3.27] - 2025-02-21
### Added
- workflow: add GITEA_TOKEN secret for Gitea API access in release workflows
### Changed
- update webpages for version v1.3.27
## [1.3.26] - 2025-02-21
### Changed
- update webpages for version v1.3.26
### Fixed
- workflow: improve Gitea release workflow with enhanced error handling and debug outputs
## [1.3.25] - 2025-02-21
### Changed
- update webpages for version v1.3.25
- workflow: update Gitea release workflow to include RUNNER_NAME and improve error handling
## [1.3.24] - 2025-02-21
### Changed
- update webpages for version v1.3.24
- workflow: rename update files to upgrade in GitHub release workflow
- workflow: aktualisiere bestehende Einträge im Changelog für vorhandene Versionen
### Fixed
- workflow: improve Gitea release process with dynamic URL determination and debug outputs
## [1.3.23] - 2025-02-21
### Changed
- update webpages for version v1.3.23
### Fixed
- workflow: enhance Gitea release process with debug outputs and API connection checks
## [1.3.22] - 2025-02-21
### Added
- workflow: improve Gitea release process with additional environment variables and error handling
### Changed
- update webpages for version v1.3.22
## [1.3.21] - 2025-02-21
### Changed
- update webpages for version v1.3.21
- workflow: enhance Gitea release process with API integration and token management
## [1.3.20] - 2025-02-21
### Changed
- update webpages for version v1.3.20
- workflow: enable git tagging and pushing for Gitea releases
## [1.3.19] - 2025-02-21
### Changed
- update webpages for version v1.3.19
- workflow: enable git push for version tagging in Gitea release
## [1.3.18] - 2025-02-21
### Changed
- ACHTUNG: Installiere einmal das filaman_full.bin danach kannst du über die upgrade Files aktualisieren und deine Settings bleiben auch erhalten.
- ATTENTION: Install the filaman_full.bin once, then you can update via the upgrade files and your settings will also be retained.
## [1.3.18] - 2025-02-21
### Added
- add note about filaman_full.bin installation in changelog
### Changed
- update webpages for version v1.3.18
- update changelog for version 1.3.18 and enhance update script for existing entries
## [1.3.17] - 2025-02-21
### Changed
- update webpages for version v1.3.17
- ci: comment out git tag and push commands in gitea-release workflow
## [1.3.16] - 2025-02-21
### Changed
- update webpages for version v1.3.16
- ci: update filenames for firmware and website binaries in release workflows
## [1.3.15] - 2025-02-21
### Changed
- update webpages for version v1.3.15
### Fixed
- ci: fix missing 'fi' in GitHub release workflow script
## [1.3.14] - 2025-02-21
### Changed
- update webpages for version v1.3.14
- ci: update GitHub release workflow to improve file upload handling
## [1.3.13] - 2025-02-21
### Changed
- update webpages for version v1.3.13
- ci: update GitHub release workflow to use RELEASE_TOKEN for improved security
## [1.3.12] - 2025-02-21
### Changed
- update webpages for version v1.3.12
- ci: enhance GitHub release workflow with token handling and file upload improvements
## [1.3.11] - 2025-02-21
### Changed
- update webpages for version v1.3.11
- ci: refactor Gitea release workflow by simplifying input handling and removing unnecessary checks
## [1.3.10] - 2025-02-21
### Changed
- update webpages for version v1.3.10
- ci: simplify GitHub release workflow by removing provider verification step
## [1.3.9] - 2025-02-21
### Changed
- update webpages for version v1.3.9
- ci: comment out permissions for GitHub release workflow
## [1.3.8] - 2025-02-21
### Added
- add Gitea and GitHub release workflows
### Changed
- update webpages for version v1.3.8
## [1.3.7] - 2025-02-21
### Added
- add GitHub and Gitea release workflows
### Changed
- update webpages for version v1.3.7
## [1.3.6] - 2025-02-21
### Changed
- update webpages for version v1.3.6
### Fixed
- update GitHub token reference and correct file path in release workflow
## [1.3.5] - 2025-02-21
### Added
- enhance release workflow to support Gitea alongside GitHub

View File

@ -9,6 +9,9 @@ Das System integriert sich nahtlos mit der [Spoolman](https://github.com/Donkie/
Weitere Bilder finden Sie im [img Ordner](/img/)
oder auf meiner Website: [FilaMan Website](https://www.filaman.app)
Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaOHU)
Discord Server: [https://discord.gg/vMAx2gf5](https://discord.gg/vMAx2gf5)
### Es gibt jetzt auch ein Wiki, dort sind nochmal alle Funktionen beschrieben: [Wiki](https://github.com/ManuelW77/Filaman/wiki)
### ESP32 Hardware-Funktionen
- **Gewichtsmessung:** Verwendung einer Wägezelle mit HX711-Verstärker für präzise Gewichtsverfolgung.
@ -16,7 +19,7 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- **OLED-Display:** Zeigt aktuelles Gewicht, Verbindungsstatus (WiFi, Bambu Lab, Spoolman).
- **WLAN-Konnektivität:** WiFiManager für einfache Netzwerkkonfiguration.
- **MQTT-Integration:** Verbindet sich mit Bambu Lab Drucker für AMS-Steuerung.
- **NFC-Tag NTAG215:** Verwendung von NTAG215 wegen ausreichendem Speicherplatz auf dem Tag
- **NFC-Tag NTAG213 NTAG215:** Verwendung von NTAG213, besser NTAG215 wegen ausreichendem Speicherplatz auf dem Tag
### Weboberflächen-Funktionen
- **Echtzeit-Updates:** WebSocket-Verbindung für Live-Daten-Updates.
@ -33,6 +36,7 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- Filtern und Auswählen von Filamenten.
- Automatische Aktualisierung der Spulengewichte.
- Verfolgung von NFC-Tag-Zuweisungen.
- Unterstützt das Spoolman Octoprint Plugin
### Wenn Sie meine Arbeit unterstützen möchten, freue ich mich über einen Kaffee
<a href="https://www.buymeacoffee.com/manuelw" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 108px !important;" ></a>
@ -53,14 +57,14 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
### Komponenten
- **ESP32 Entwicklungsboard:** Jede ESP32-Variante.
[Amazon Link](https://amzn.eu/d/aXThslf)
- **HX711 Wägezellen-Verstärker:** Für Gewichtsmessung.
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
- **OLED Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/dozAYDU)
- **PN532 NFC Modul:** Für NFC-Tag-Operationen.
[Amazon Link](https://amzn.eu/d/8205DDh)
- **NFC-Tag:** NTAG215
[Amazon Link](https://amzn.eu/d/fywy4c4)
- **HX711 5kg Wägezellen-Verstärker:** Für Gewichtsmessung.
[Amazon Link](https://amzn.eu/d/06A0DLb)
- **OLED 0.96 Zoll I2C weiß/gelb Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/0AuBp2c)
- **PN532 NFC NXP RFID-Modul V3:** Für NFC-Tag-Operationen.
[Amazon Link](https://amzn.eu/d/jfIuQXb)
- **NFC Tags NTAG213 NTA215:** RFID Tag
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
### Pin-Konfiguration
| Komponente | ESP32 Pin |
@ -71,10 +75,15 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
| OLED SCL | 22 |
| PN532 IRQ | 32 |
| PN532 RESET | 33 |
| PN532 SCK | 14 |
| PN532 MOSI | 13 |
| PN532 MISO | 12 |
| PN532 CS/SS | 15 |
| PN532 SDA | 21 |
| PN532 SCL | 22 |
**Achte darauf, dass am PN532 die DIP-Schalter auf I2C gestellt sind**
![Wiring](./img/Schaltplan.png)
![myWiring](./img/IMG_2589.jpeg)
![myWiring](./img/IMG_2590.jpeg)
## Software-Abhängigkeiten
@ -101,7 +110,31 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- PN532 NFC Modul
- Verbindungskabel
### Schritt-für-Schritt Installation
## Wichtiger Hinweis
Du musst Spoolman auf DEBUG Modus setzten, da man bisher in Spoolman keine CORS Domains setzen kann!
```
# Enable debug mode
# If enabled, the client will accept requests from any host
# This can be useful when developing, but is also a security risk
# Default: FALSE
#SPOOLMAN_DEBUG_MODE=TRUE
```
## Schritt-für-Schritt Installation
### Einfache Installation
1. **Gehe auf [FilaMan Installer](https://www.filaman.app/installer.html)**
2. **Stecke dein ESP an den Rechner und klicke Connect**
3. **Wähle dein Device Port und klicke Intall**
4. **Ersteinrichtung:**
- Mit dem "FilaMan" WLAN-Zugangspunkt verbinden.
- WLAN-Einstellungen über das Konfigurationsportal vornehmen.
- Weboberfläche unter `http://filaman.local` oder der IP-Adresse aufrufen.
### Compile by yourself
1. **Repository klonen:**
```bash
git clone https://github.com/ManuelW77/Filaman.git

View File

@ -6,12 +6,16 @@ FilaMan is a filament management system for 3D printing. It uses ESP32 hardware
Users can manage filament spools, monitor the status of the Automatic Material System (AMS) and make settings via a web interface.
The system integrates seamlessly with [Bambulab](https://bambulab.com/en-us) 3D printers and [Spoolman](https://github.com/Donkie/Spoolman) filament management as well as the [Openspool](https://github.com/spuder/OpenSpool) NFC-TAG format.
![Scale](./img/scale_trans.png)
More Images can be found in the [img Folder](/img/)
or my website:[FilaMan Website](https://www.filaman.app)
or my website: [FilaMan Website](https://www.filaman.app)
german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaOHU)
Discord Server: [https://discord.gg/vMAx2gf5](https://discord.gg/vMAx2gf5)
### Now more detailed informations about the usage: [Wiki](https://github.com/ManuelW77/Filaman/wiki)
### ESP32 Hardware Features
- **Weight Measurement:** Using a load cell with HX711 amplifier for precise weight tracking.
@ -19,7 +23,7 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- **OLED Display:** Shows current weight, connection status (WiFi, Bambu Lab, Spoolman).
- **WiFi Connectivity:** WiFiManager for easy network configuration.
- **MQTT Integration:** Connects to Bambu Lab printer for AMS control.
- **NFC-Tag NTAG215:** Use NTAG215 because of enaught space on the Tag
- **NFC-Tag NTAG213 NTAG215:** Use NTAG213, better NTAG215 because of enaught space on the Tag
### Web Interface Features
- **Real-time Updates:** WebSocket connection for live data updates.
@ -36,6 +40,7 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- Filter and select filaments.
- Update spool weights automatically.
- Track NFC tag assignments.
- Supports Spoolman Octoprint Plugin
### If you want to support my work, i would be happy to get a coffe
<a href="https://www.buymeacoffee.com/manuelw" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 108px !important;" ></a>
@ -56,14 +61,14 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
### Components
- **ESP32 Development Board:** Any ESP32 variant.
[Amazon Link](https://amzn.eu/d/aXThslf)
- **HX711 Load Cell Amplifier:** For weight measurement.
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
- **OLED Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/dozAYDU)
- **PN532 NFC Module:** For NFC tag operations.
[Amazon Link](https://amzn.eu/d/8205DDh)
- **NFC-Tag:** NTAG215
[Amazon Link](https://amzn.eu/d/fywy4c4)
- **HX711 5kg Load Cell Amplifier:** For weight measurement.
[Amazon Link](https://amzn.eu/d/06A0DLb)
- **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/0AuBp2c)
- **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
[Amazon Link](https://amzn.eu/d/jfIuQXb)
- **NFC Tags NTAG213 NTAG215:** RFID Tag
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
### Pin Configuration
@ -75,10 +80,15 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
| OLED SCL | 22 |
| PN532 IRQ | 32 |
| PN532 RESET | 33 |
| PN532 SCK | 14 |
| PN532 MOSI | 13 |
| PN532 MISO | 12 |
| PN532 CS/SS | 15 |
| PN532 SDA | 21 |
| PN532 SCL | 22 |
**Make sure that the DIP switches on the PN532 are set to I2C**
![Wiring](./img/Schaltplan.png)
![myWiring](./img/IMG_2589.jpeg)
![myWiring](./img/IMG_2590.jpeg)
## Software Dependencies
@ -91,9 +101,9 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- `Adafruit_SSD1306`: OLED display control
- `HX711`: Load cell communication
## Installation
### Installation
### Prerequisites
## Prerequisites
- **Software:**
- [PlatformIO](https://platformio.org/) in VS Code
- [Spoolman](https://github.com/Donkie/Spoolman) instance
@ -105,7 +115,32 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- PN532 NFC Module
- Connecting wires
### Step-by-Step Installation
## Important Note
You have to activate Spoolman in debug mode, because you are not able to set CORS Domains in Spoolman yet.
```
# Enable debug mode
# If enabled, the client will accept requests from any host
# This can be useful when developing, but is also a security risk
# Default: FALSE
#SPOOLMAN_DEBUG_MODE=TRUE
```
## Step-by-Step Installation
### Easy Installation
1. **Go to [FilaMan Installer](https://www.filaman.app/installer.html)**
2. **Plug you device in and push Connect button**
3. **Select your Device Port and push Intall**
4. **Initial Setup:**
- Connect to the "FilaMan" WiFi access point.
- Configure WiFi settings through the captive portal.
- Access the web interface at `http://filaman.local` or the IP address.
### Compile by yourself
1. **Clone the Repository:**
```bash
git clone https://github.com/ManuelW77/Filaman.git
@ -124,25 +159,6 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- Configure WiFi settings through the captive portal.
- Access the web interface at `http://filaman.local` or the IP address.
## GitHub Actions Configuration
### Required Secrets for Gitea Releases
When using Gitea as your repository host, you need to configure the following secrets in your repository:
- `GITEA_API_URL`: The base URL of your Gitea instance, including protocol (e.g., `https://git.example.com`)
- `GITEA_TOKEN`: Your Gitea access token with permissions to create releases
- `GITEA_REPOSITORY`: The repository name in format `owner/repo` (e.g., `username/filaman`)
Example values:
```
GITEA_API_URL=https://git.example.com
GITEA_TOKEN=abcdef1234567890
GITEA_REPOSITORY=username/filaman
```
Make sure to set these secrets in your repository settings under Settings > Secrets and Variables > Actions.
## Documentation
### Relevant Links

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@ -1,7 +1,31 @@
{
"GFU99": "Generic TPU",
"GFN99": "Generic PA",
"GFN98": "Generic PA-CF",
"GFU99": "TPU",
"GFN99": "PA",
"GFN98": "PA-CF",
"GFL99": "PLA",
"GFL96": "PLA Silk",
"GFL98": "PLA-CF",
"GFL95": "PLA High Speed",
"GFG99": "PETG",
"GFG98": "PETG-CF",
"GFG97": "PCTG",
"GFB99": "ABS",
"GFC99": "PC",
"GFB98": "ASA",
"GFS99": "PVA",
"GFS98": "HIPS",
"GFT98": "PPS-CF",
"GFT97": "PPS",
"GFN97": "PPA-CF",
"GFN96": "PPA-GF",
"GFP99": "PE",
"GFP98": "PE-CF",
"GFP97": "PP",
"GFP96": "PP-CF",
"GFP95": "PP-GF",
"GFR99": "EVA",
"GFR98": "PHA",
"GFS97": "BVOH",
"GFA01": "Bambu PLA Matte",
"GFA00": "Bambu PLA Basic",
"GFA09": "Bambu PLA Tough",
@ -13,15 +37,11 @@
"GFL03": "eSUN PLA+",
"GFL01": "PolyTerra PLA",
"GFL00": "PolyLite PLA",
"GFL99": "Generic PLA",
"GFL96": "Generic PLA Silk",
"GFL98": "Generic PLA-CF",
"GFA50": "Bambu PLA-CF",
"GFS02": "Bambu Support For PLA",
"GFA11": "Bambu PLA Aero",
"GFL04": "Overture PLA",
"GFL05": "Overture Matte PLA",
"GFL95": "Generic PLA High Speed",
"GFA12": "Bambu PLA Glow",
"GFA13": "Bambu PLA Dynamic",
"GFA15": "Bambu PLA Galaxy",
@ -30,41 +50,21 @@
"GFU00": "Bambu TPU 95A HF",
"GFG00": "Bambu PETG Basic",
"GFT01": "Bambu PET-CF",
"GFG99": "Generic PETG",
"GFG98": "Generic PETG-CF",
"GFG50": "Bambu PETG-CF",
"GFG60": "PolyLite PETG",
"GFG01": "Bambu PETG Translucent",
"GFG97": "Generic PCTG",
"GFB00": "Bambu ABS",
"GFB99": "Generic ABS",
"GFB60": "PolyLite ABS",
"GFB50": "Bambu ABS-GF",
"GFC00": "Bambu PC",
"GFC99": "Generic PC",
"GFB98": "Generic ASA",
"GFB01": "Bambu ASA",
"GFB61": "PolyLite ASA",
"GFB02": "Bambu ASA-Aero",
"GFS99": "Generic PVA",
"GFS04": "Bambu PVA",
"GFS01": "Bambu Support G",
"GFN03": "Bambu PA-CF",
"GFN04": "Bambu PAHT-CF",
"GFS03": "Bambu Support For PA/PET",
"GFN05": "Bambu PA6-CF",
"GFN08": "Bambu PA6-GF",
"GFS98": "Generic HIPS",
"GFT98": "Generic PPS-CF",
"GFT97": "Generic PPS",
"GFN97": "Generic PPA-CF",
"GFN96": "Generic PPA-GF",
"GFP99": "Generic PE",
"GFP98": "Generic PE-CF",
"GFP97": "Generic PP",
"GFP96": "Generic PP-CF",
"GFP95": "Generic PP-GF",
"GFR99": "Generic EVA",
"GFR98": "Generic PHA",
"GFS97": "Generic BVOH"
"GFN08": "Bambu PA6-GF"
}

View File

@ -44,6 +44,4 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>

View File

@ -44,12 +44,10 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>
<!-- head -->
<div class="container">
<div class="content">
<h1>FilaMan</h1>
<p>Filament Management Tool</p>
<p>Your smart solution for <strong>Filament Management</strong> in 3D printing.</p>
@ -57,10 +55,11 @@
<h2>About FilaMan</h2>
<p>
FilaMan is a tool designed to simplify filament spool management. It allows you to identify and weigh filament spools,
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform,
and ensure compatibility with <a href="https://github.com/spuder/OpenSpool" target="_blank">OpenSpool</a> for Bambu printers.
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform.
</p>
<p>Get more information at <a href="https://www.filaman.app" target="_blank">https://www.filaman.app</a> and <a href="https://github.com/ManuelW77/Filaman" target="_blank">https://github.com/ManuelW77/Filaman</a>.</p>
<div class="features">
<div class="feature">
<h3>Spool Identification</h3>
@ -75,12 +74,6 @@
<p>Works with OpenSpool to recognize and activate spools on Bambu printers.</p>
</div>
</div>
<h2>Future Plans</h2>
<p>
We are working on expanding compatibility to support smaller NFC tags like NTag213
and developing custom software to enhance the OpenSpool experience.
</p>
</div>
</body>
</html>

31
html/own_filaments.json Normal file
View File

@ -0,0 +1,31 @@
{
"TPU": "GFU99",
"PA": "GFN99",
"PA-CF": "GFN98",
"PLA": "GFL99",
"PLA Silk": "GFL96",
"PLA-CF": "GFL98",
"PLA High Speed": "GFL95",
"PETG": "GFG99",
"PETG-CF": "GFG98",
"PCTG": "GFG97",
"ABS": "GFB99",
"ABS+HS": "GFB99",
"PC": "GFC99",
"PC/ABS": "GFC99",
"ASA": "GFB98",
"PVA": "GFS99",
"HIPS": "GFS98",
"PPS-CF": "GFT98",
"PPS": "GFT97",
"PPA-CF": "GFN97",
"PPA-GF": "GFN96",
"PE": "GFP99",
"PE-CF": "GFP98",
"PP": "GFP97",
"PP-CF": "GFP96",
"PP-GF": "GFP95",
"EVA": "GFR99",
"PHA": "GFR98",
"BVOH": "GFS97"
}

View File

@ -44,8 +44,6 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>
<!-- head -->

View File

@ -150,6 +150,13 @@ function initWebSocket() {
ramStatus.textContent = `${data.freeHeap}k`;
}
}
else if (data.type === 'setSpoolmanSettings') {
if (data.payload == 'success') {
showNotification(`Spoolman Settings set successfully`, true);
} else {
showNotification(`Error setting Spoolman Settings`, false);
}
}
};
} catch (error) {
isConnected = false;
@ -285,6 +292,14 @@ function displayAmsData(amsData) {
<img src="spool_in.png" alt="Spool In" style="width: 48px; height: 48px; transform: rotate(180deg) scaleX(-1);">
</button>`;
const spoolmanButtonHtml = `
<button class="spool-button" onclick="handleSpoolmanSettings('${tray.tray_info_idx}', '${tray.setting_id}', '${tray.cali_idx}', '${tray.nozzle_temp_min}', '${tray.nozzle_temp_max}')"
style="position: absolute; bottom: 0px; right: 0px;
background: none; border: none; padding: 0;
cursor: pointer; display: none;">
<img src="set_spoolman.png" alt="Spool In" style="width: 38px; height: 38px;">
</button>`;
if (!hasAnyContent) {
return `
<div class="tray">
@ -348,6 +363,7 @@ function displayAmsData(amsData) {
${trayDetails}
${tempHTML}
${(ams.ams_id === 255 && tray.tray_type !== '') ? outButtonHtml : ''}
${(tray.setting_id != "" && tray.setting_id != "null") ? spoolmanButtonHtml : ''}
</div>
</div>`;
@ -373,6 +389,36 @@ function updateSpoolButtons(show) {
});
}
function handleSpoolmanSettings(tray_info_idx, setting_id, cali_idx, nozzle_temp_min, nozzle_temp_max) {
// Hole das ausgewählte Filament
const selectedText = document.getElementById("selected-filament").textContent;
// Finde die ausgewählte Spule in den Daten
const selectedSpool = spoolsData.find(spool =>
`${spool.id} | ${spool.filament.name} (${spool.filament.material})` === selectedText
);
const payload = {
type: 'setSpoolmanSettings',
payload: {
filament_id: selectedSpool.filament.id,
tray_info_idx: tray_info_idx,
setting_id: setting_id,
cali_idx: cali_idx,
temp_min: nozzle_temp_min,
temp_max: nozzle_temp_max
}
};
try {
socket.send(JSON.stringify(payload));
showNotification(`Setting send to Spoolman`, true);
} catch (error) {
console.error("Error while sending settings to Spoolman:", error);
showNotification("Error while sending!", false);
}
}
function handleSpoolOut() {
// Erstelle Payload
const payload = {
@ -594,8 +640,6 @@ function writeNfcTag() {
// Erstelle das NFC-Datenpaket mit korrekten Datentypen
const nfcData = {
version: "2.0",
protocol: "openspool",
color_hex: selectedSpool.filament.color_hex || "FFFFFF",
type: selectedSpool.filament.material,
min_temp: minTemp,

BIN
html/set_spoolman.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

@ -44,8 +44,6 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>
<!-- head -->
@ -54,11 +52,18 @@
if (spoolmanUrl && spoolmanUrl.trim() !== "") {
document.getElementById('spoolmanUrl').value = spoolmanUrl;
}
// Initialize OctoPrint fields visibility
toggleOctoFields();
};
function checkSpoolmanInstance() {
const url = document.getElementById('spoolmanUrl').value;
fetch(`/api/checkSpoolman?url=${encodeURIComponent(url)}`)
const spoolmanOctoEnabled = document.getElementById('spoolmanOctoEnabled').checked;
const spoolmanOctoUrl = document.getElementById('spoolmanOctoUrl').value;
const spoolmanOctoToken = document.getElementById('spoolmanOctoToken').value;
fetch(`/api/checkSpoolman?url=${encodeURIComponent(url)}&octoEnabled=${spoolmanOctoEnabled}&octoUrl=${spoolmanOctoUrl}&octoToken=${spoolmanOctoToken}`)
.then(response => response.json())
.then(data => {
if (data.healthy) {
@ -76,8 +81,10 @@
const ip = document.getElementById('bambuIp').value;
const serial = document.getElementById('bambuSerial').value;
const code = document.getElementById('bambuCode').value;
const autoSend = document.getElementById('autoSend').checked;
const autoSendTime = document.getElementById('autoSendTime').value;
fetch(`/api/bambu?bambu_ip=${encodeURIComponent(ip)}&bambu_serialnr=${encodeURIComponent(serial)}&bambu_accesscode=${encodeURIComponent(code)}`)
fetch(`/api/bambu?bambu_ip=${encodeURIComponent(ip)}&bambu_serialnr=${encodeURIComponent(serial)}&bambu_accesscode=${encodeURIComponent(code)}&autoSend=${autoSend}&autoSendTime=${autoSendTime}`)
.then(response => response.json())
.then(data => {
if (data.healthy) {
@ -90,6 +97,15 @@
document.getElementById('bambuStatusMessage').innerText = 'Error while saving: ' + error.message;
});
}
/**
* Controls visibility of OctoPrint configuration fields based on checkbox state
* Called on page load and when checkbox changes
*/
function toggleOctoFields() {
const octoEnabled = document.getElementById('spoolmanOctoEnabled').checked;
document.getElementById('octoFields').style.display = octoEnabled ? 'block' : 'none';
}
</script>
<script>
var spoolmanUrl = "{{spoolmanUrl}}";
@ -97,12 +113,30 @@
<div class="content">
<h1>Spoolman API URL / Bambu Credentials</h1>
<label for="spoolmanUrl">Set URL/IP to your Spoolman-Instanz:</label>
<div class="card">
<div class="card-body">
<h5 class="card-title">Set URL/IP to your Spoolman-Instanz</h5>
<input type="text" id="spoolmanUrl" placeholder="http://ip-or-url-of-your-spoolman-instanz:port">
<h5 class="card-title">If you want to enable sending Spool to Spoolman Octoprint Plugin:</h5>
<p>
<input type="checkbox" id="spoolmanOctoEnabled" {{spoolmanOctoEnabled}} onchange="toggleOctoFields()"> Send to Octo-Plugin
</p>
<div id="octoFields" style="display: none;">
<p>
<input type="text" id="spoolmanOctoUrl" placeholder="http://ip-or-url-of-your-octoprint-instanz:port" value="{{spoolmanOctoUrl}}">
<input type="text" id="spoolmanOctoToken" placeholder="Your Octoprint Token" value="{{spoolmanOctoToken}}">
</p>
</div>
<button onclick="checkSpoolmanInstance()">Save Spoolman URL</button>
<p id="statusMessage"></p>
</div>
</div>
<h2>Bambu Lab Printer Credentials</h2>
<div class="card">
<div class="card-body">
<h5 class="card-title">Bambu Lab Printer Credentials</h5>
<div class="bambu-settings">
<div class="input-group">
<label for="bambuIp">Bambu Drucker IP-Adresse:</label>
@ -116,9 +150,22 @@
<label for="bambuCode">Access Code:</label>
<input type="text" id="bambuCode" placeholder="Access Code vom Drucker" value="{{bambuCode}}">
</div>
<button onclick="saveBambuCredentials()">Save Bambu Credentials</button>
<hr>
<p>If activated, FilaMan will automatically update the next filled tray with the last scanned and weighed spool.</p>
<div class="input-group" style="display: flex; margin-bottom: 0;">
<label for="autoSend" style="width: 250px; margin-right: 5px;">Auto Send to Bambu:</label>
<label for="autoSendTime" style="width: 250px; margin-right: 5px;">Wait for Spool in Sec:</label>
</div>
<div class="input-group" style="display: flex;">
<input type="checkbox" id="autoSend" {{autoSendToBambu}} style="width: 190px; margin-right: 10px;">
<input type="number" min="60" id="autoSendTime" placeholder="Time to wait" value="{{autoSendTime}}" style="width: 100px;">
</div>
<button style="margin: 0;" onclick="saveBambuCredentials()">Save Bambu Credentials</button>
<p id="bambuStatusMessage"></p>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@ -86,7 +86,7 @@ function populateVendorDropdown(data, selectedSmId = null) {
});
// Nach der Schleife: Formatierung der Gesamtlänge
console.log("Total Lenght: ", totalLength);
console.log("Total Length: ", totalLength);
const formattedLength = totalLength > 1000
? (totalLength / 1000).toFixed(2) + " km"
: totalLength.toFixed(2) + " m";
@ -97,8 +97,10 @@ function populateVendorDropdown(data, selectedSmId = null) {
? (weightInKg / 1000).toFixed(2) + " t"
: weightInKg.toFixed(2) + " kg";
// Dropdown mit gefilterten Herstellern befüllen
Object.entries(filteredVendors).forEach(([id, name]) => {
// Dropdown mit gefilterten Herstellern befüllen - alphabetisch sortiert
Object.entries(filteredVendors)
.sort(([, nameA], [, nameB]) => nameA.localeCompare(nameB)) // Sort vendors alphabetically by name
.forEach(([id, name]) => {
const option = document.createElement("option");
option.value = id;
option.textContent = name;

View File

@ -188,14 +188,18 @@ label {
font-weight: bold;
}
input[type="text"], input[type="submit"] {
input[type="text"], input[type="submit"], input[type="number"] {
padding: 10px;
border: 1px solid #ccc;
border-radius: 5px;
font-size: 16px;
}
input[type="text"]:focus {
input[type="number"] {
width: 108px !important;
}
input[type="text"]:focus, input[type="number"]:focus {
border-color: #007bff;
outline: none;
}
@ -279,9 +283,10 @@ a:hover {
/* Karten-Stil für optische Trennung */
.card {
background: #f9f9f9;
background: var(--primary-color);
width: 500px;
padding: 15px;
margin: 20px 0;
margin: 20px auto;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
@ -760,17 +765,19 @@ a:hover {
right: 20px;
padding: 15px 25px;
border-radius: 4px;
color: white;
color: black;
z-index: 1000;
animation: slideIn 0.3s ease-out;
}
.notification.success {
background-color: #28a745;
color: black !important;
}
.notification.error {
background-color: #dc3545;
color: white !important;
}
.notification.fade-out {
@ -959,7 +966,6 @@ input[type="submit"]:disabled,
/* Bambu Settings Erweiterung */
.bambu-settings {
background: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
@ -1013,6 +1019,7 @@ input[type="submit"]:disabled,
color: #000;
vertical-align: middle;
margin-left: 0.5rem;
text-shadow: none !important;
}
.progress-container {
@ -1051,9 +1058,10 @@ input[type="submit"]:disabled,
}
.update-form {
background: var(--primary-color);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.05);
border: var(--glass-border);
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
margin: 0 auto;
width: 400px;
text-align: center;
@ -1064,7 +1072,7 @@ input[type="submit"]:disabled,
padding: 8px;
border: 1px solid #ddd;
border-radius: 4px;
background: white;
background-color: #4CAF50;
}
.update-form input[type="submit"] {
background-color: #4CAF50;
@ -1086,10 +1094,66 @@ input[type="submit"]:disabled,
.warning {
background-color: var(--primary-color);
border: 1px solid #ffe0b2;
color: white;
padding: 15px;
margin: 20px auto;
border-radius: 4px;
max-width: 600px;
text-align: center;
color: #e65100;
padding: 15px;
}
.update-options {
display: flex;
gap: 2rem;
margin: 2rem 0;
}
.update-section {
flex: 1;
background: var(--background-green);
padding: 1.5rem;
border-radius: 8px;
}
.update-section h2 {
margin-top: 0;
color: #333;
}
.update-section p {
color: #666;
margin-bottom: 1rem;
}
.progress-container {
margin: 20px 0;
background: #f0f0f0;
border-radius: 4px;
overflow: hidden;
}
.progress-bar {
width: 0;
height: 20px;
background: #4CAF50;
transition: width 0.3s ease-in-out;
text-align: center;
line-height: 20px;
color: white;
}
.status {
margin-top: 20px;
padding: 10px;
border-radius: 4px;
display: none;
}
.status.success {
background: #e8f5e9;
color: #2e7d32;
}
.status.error {
background: #ffebee;
color: #c62828;
}
.warning {
background: #fff3e0;
color: #e65100;
padding: 15px;
border-radius: 4px;
margin-bottom: 20px;
}

View File

@ -44,8 +44,6 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>
<!-- head -->
@ -86,64 +84,6 @@
<div class="status"></div>
</div>
<style>
.update-options {
display: flex;
gap: 2rem;
margin: 2rem 0;
}
.update-section {
flex: 1;
background: #f5f5f5;
padding: 1.5rem;
border-radius: 8px;
}
.update-section h2 {
margin-top: 0;
color: #333;
}
.update-section p {
color: #666;
margin-bottom: 1rem;
}
.progress-container {
margin: 20px 0;
background: #f0f0f0;
border-radius: 4px;
overflow: hidden;
}
.progress-bar {
width: 0;
height: 20px;
background: #4CAF50;
transition: width 0.3s ease-in-out;
text-align: center;
line-height: 20px;
color: white;
}
.status {
margin-top: 20px;
padding: 10px;
border-radius: 4px;
display: none;
}
.status.success {
background: #e8f5e9;
color: #2e7d32;
}
.status.error {
background: #ffebee;
color: #c62828;
}
.warning {
background: #fff3e0;
color: #e65100;
padding: 15px;
border-radius: 4px;
margin-bottom: 20px;
}
</style>
<script>
// Hide status indicators during update
const statusContainer = document.querySelector('.status-container');
@ -154,6 +94,96 @@
const progress = document.querySelector('.progress-bar');
const progressContainer = document.querySelector('.progress-container');
const status = document.querySelector('.status');
let updateInProgress = false;
let lastReceivedProgress = 0;
// WebSocket Handling
let ws = null;
let wsReconnectTimer = null;
function connectWebSocket() {
ws = new WebSocket('ws://' + window.location.host + '/ws');
ws.onmessage = function(event) {
try {
const data = JSON.parse(event.data);
if (data.type === "updateProgress" && updateInProgress) {
// Zeige Fortschrittsbalken
progressContainer.style.display = 'block';
// Aktualisiere den Fortschritt nur wenn er größer ist
const newProgress = parseInt(data.progress);
if (!isNaN(newProgress) && newProgress >= lastReceivedProgress) {
progress.style.width = newProgress + '%';
progress.textContent = newProgress + '%';
lastReceivedProgress = newProgress;
}
// Zeige Status-Nachricht
if (data.message || data.status) {
status.textContent = data.message || getStatusMessage(data.status);
status.className = 'status success';
status.style.display = 'block';
// Starte Reload wenn Update erfolgreich
if (data.status === 'success' || lastReceivedProgress >= 98) {
clearTimeout(wsReconnectTimer);
setTimeout(() => {
window.location.href = '/';
}, 30000);
}
}
}
} catch (e) {
console.error('WebSocket message error:', e);
}
};
ws.onclose = function() {
if (updateInProgress) {
// Wenn der Fortschritt hoch genug ist, gehen wir von einem erfolgreichen Update aus
if (lastReceivedProgress >= 85) {
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
status.className = 'status success';
status.style.display = 'block';
clearTimeout(wsReconnectTimer);
setTimeout(() => {
window.location.href = '/';
}, 30000);
} else {
// Versuche Reconnect bei niedrigem Fortschritt
wsReconnectTimer = setTimeout(connectWebSocket, 1000);
}
}
};
ws.onerror = function(err) {
console.error('WebSocket error:', err);
if (updateInProgress && lastReceivedProgress >= 85) {
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
status.className = 'status success';
status.style.display = 'block';
setTimeout(() => {
window.location.href = '/';
}, 30000);
}
};
}
// Initial WebSocket connection
connectWebSocket();
function getStatusMessage(status) {
switch(status) {
case 'starting': return 'Starting update...';
case 'uploading': return 'Uploading...';
case 'finalizing': return 'Finalizing update...';
case 'restoring': return 'Restoring configurations...';
case 'preparing': return 'Preparing for restart...';
case 'success': return 'Update successful! Device is restarting... Page will reload in 30 seconds.';
default: return 'Updating...';
}
}
function handleUpdate(e) {
e.preventDefault();
@ -167,85 +197,48 @@
}
// Validate file name pattern
if (updateType === 'firmware' && !file.name.startsWith('filaman_')) {
alert('Please select a valid firmware file (filaman_*.bin)');
if (updateType === 'firmware' && !file.name.startsWith('upgrade_filaman_firmware_')) {
alert('Please select a valid firmware file (upgrade_filaman_firmware_*.bin)');
return;
}
if (updateType === 'webpage' && !file.name.startsWith('webpage_')) {
alert('Please select a valid webpage file (webpage_*.bin)');
if (updateType === 'webpage' && !file.name.startsWith('upgrade_filaman_website_')) {
alert('Please select a valid webpage file (upgrade_filaman_website_*.bin)');
return;
}
// Reset UI
updateInProgress = true;
progressContainer.style.display = 'block';
status.style.display = 'none';
status.className = 'status';
// Reset progress bar
progress.style.width = '0%';
progress.textContent = '0%';
// Disable both forms during update
// Disable submit buttons
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true);
// Send update
const xhr = new XMLHttpRequest();
xhr.open('POST', '/update', true);
xhr.upload.onprogress = (e) => {
if (e.lengthComputable) {
const percentComplete = (e.loaded / e.total) * 100;
progress.style.width = percentComplete + '%';
progress.textContent = Math.round(percentComplete) + '%';
}
};
xhr.onload = function() {
try {
let response = this.responseText;
try {
const jsonResponse = JSON.parse(response);
response = jsonResponse.message;
if (jsonResponse.restart) {
status.textContent = response + " Redirecting in 20 seconds...";
let countdown = 20;
const timer = setInterval(() => {
countdown--;
if (countdown <= 0) {
clearInterval(timer);
window.location.href = '/';
} else {
status.textContent = response + ` Redirecting in ${countdown} seconds...`;
}
}, 1000);
}
} catch (e) {
if (!isNaN(response)) {
const percent = parseInt(response);
progress.style.width = percent + '%';
progress.textContent = percent + '%';
return;
}
}
status.textContent = response;
status.classList.add(xhr.status === 200 ? 'success' : 'error');
status.style.display = 'block';
if (xhr.status !== 200) {
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
}
} catch (error) {
status.textContent = 'Error: ' + error.message;
status.classList.add('error');
if (xhr.status !== 200 && !progress.textContent.startsWith('100')) {
status.textContent = "Update failed: " + (xhr.responseText || "Unknown error");
status.className = 'status error';
status.style.display = 'block';
updateInProgress = false;
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
}
};
xhr.onerror = function() {
status.textContent = 'Update failed: Network error';
status.classList.add('error');
if (!progress.textContent.startsWith('100')) {
status.textContent = "Network error during update";
status.className = 'status error';
status.style.display = 'block';
updateInProgress = false;
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
}
};
const formData = new FormData();

View File

@ -44,8 +44,6 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>
<!-- head -->

View File

@ -44,8 +44,6 @@
<div class="ram-status" id="ramStatus"></div>
</div>
</div>
</body>
</html>
<!-- head -->

BIN
img/7-enable.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

BIN
img/ESP32-SPI-Pins.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

BIN
img/IMG_2589.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

BIN
img/IMG_2590.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

BIN
img/Schaltplan.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 283 KiB

View File

@ -1,6 +1,6 @@
# Name, Type, SubType, Offset, Size, Flags
nvs, data, nvs, 0x9000, 0x5000,
otadata, data, ota, 0xe000, 0x2000,
app0, app, ota_0, 0x10000, 0x180000,
app1, app, ota_1, 0x190000, 0x180000,
spiffs, data, spiffs, 0x310000, 0xE0000,
app0, app, ota_0, 0x10000, 0x1E0000,
app1, app, ota_1, 0x1F0000, 0x1E0000,
spiffs, data, spiffs, 0x3D0000, 0x30000,
1 # Name Type SubType Offset Size Flags
2 nvs data nvs 0x9000 0x5000
3 otadata data ota 0xe000 0x2000
4 app0 app ota_0 0x10000 0x180000 0x1E0000
5 app1 app ota_1 0x190000 0x1F0000 0x180000 0x1E0000
6 spiffs data spiffs 0x310000 0x3D0000 0xE0000 0x30000

View File

@ -9,8 +9,10 @@
; https://docs.platformio.org/page/projectconf.html
[common]
version = "1.3.5"
version = "1.4.0"
to_old_version = "1.4.0"
##
[env:esp32dev]
platform = espressif32
board = esp32dev
@ -33,7 +35,8 @@ lib_deps =
digitaldragon/SSLClient @ ^1.3.2
; Enable SPIFFS upload
board_build.filesystem = spiffs
#board_build.filesystem = spiffs
board_build.filesystem = littlefs
; Update partition settings
board_build.partitions = partitions.csv
board_upload.flash_size = 4MB
@ -44,20 +47,17 @@ build_flags =
-Os
-ffunction-sections
-fdata-sections
-DNDEBUG
#-DNDEBUG
-mtext-section-literals
-DVERSION=\"${common.version}\"
-DTOOLDVERSION=\"${common.to_old_version}\"
-DASYNCWEBSERVER_REGEX
-DCORE_DEBUG_LEVEL=3
#-DCORE_DEBUG_LEVEL=3
-DCONFIG_ARDUHAL_LOG_COLORS=1
-DOTA_DEBUG=1
#-DOTA_DEBUG=1
-DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1
-DCONFIG_ESP32_PANIC_PRINT_REBOOT
-DBOOT_APP_PARTITION_OTA_0=1
-DCONFIG_LOG_DEFAULT_LEVEL=3
-DCONFIG_LWIP_TCP_MSL=60000
-DCONFIG_LWIP_TCP_WND_DEFAULT=8192
-DCONFIG_LWIP_TCP_SND_BUF_DEFAULT=4096
-DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096
-DCONFIG_LWIP_MAX_ACTIVE_TCP=16

View File

@ -64,29 +64,10 @@ def get_changes_from_git():
return changes
def push_changes(version):
"""Push changes to upstream"""
try:
# Stage the CHANGELOG.md
subprocess.run(['git', 'add', 'CHANGELOG.md'], check=True)
# Commit the changelog
commit_msg = f"docs: update changelog for version {version}"
subprocess.run(['git', 'commit', '-m', commit_msg], check=True)
# Push to origin (local)
subprocess.run(['git', 'push', 'origin'], check=True)
print("Successfully pushed to origin")
except subprocess.CalledProcessError as e:
print(f"Error during git operations: {e}")
return False
return True
def update_changelog():
print("Starting changelog update...") # Add this line
print("Starting changelog update...")
version = get_version()
print(f"Current version: {version}") # Add this line
print(f"Current version: {version}")
today = datetime.now().strftime('%Y-%m-%d')
script_dir = os.path.dirname(os.path.abspath(__file__))
@ -111,7 +92,7 @@ def update_changelog():
if not os.path.exists(changelog_path):
with open(changelog_path, 'w') as f:
f.write(f"# Changelog\n\n{changelog_entry}")
push_changes(version)
print(f"Created new changelog file with version {version}")
else:
with open(changelog_path, 'r') as f:
content = f.read()
@ -120,9 +101,30 @@ def update_changelog():
updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}")
with open(changelog_path, 'w') as f:
f.write(updated_content)
push_changes(version)
print(f"Added new version {version} to changelog")
else:
print(f"Version {version} already exists in changelog")
# Version existiert bereits, aktualisiere die bestehenden Einträge
version_pattern = f"## \\[{version}\\] - \\d{{4}}-\\d{{2}}-\\d{{2}}"
next_version_pattern = "## \\[.*?\\] - \\d{4}-\\d{2}-\\d{2}"
# Finde den Start der aktuellen Version
version_match = re.search(version_pattern, content)
if version_match:
version_start = version_match.start()
# Suche nach der nächsten Version
next_version_match = re.search(next_version_pattern, content[version_start + 1:])
if next_version_match:
# Ersetze den Inhalt zwischen aktueller und nächster Version
next_version_pos = version_start + 1 + next_version_match.start()
updated_content = content[:version_start] + changelog_entry + content[next_version_pos:]
else:
# Wenn keine nächste Version existiert, ersetze bis zum Ende
updated_content = content[:version_start] + changelog_entry + "\n"
with open(changelog_path, 'w') as f:
f.write(updated_content)
print(f"Updated entries for version {version}")
if __name__ == "__main__":
update_changelog()

View File

@ -5,41 +5,20 @@
bool spoolman_connected = false;
String spoolmanUrl = "";
bool octoEnabled = false;
String octoUrl = "";
String octoToken = "";
struct SendToApiParams {
String httpType;
String spoolsUrl;
String updatePayload;
String octoToken;
};
/*
// Spoolman Data
{
"version":"1.0",
"protocol":"openspool",
"color_hex":"AF7933",
"type":"ABS",
"min_temp":175,
"max_temp":275,
"brand":"Overture"
}
// FilaMan Data
{
"version":"1.0",
"protocol":"openspool",
"color_hex":"AF7933",
"type":"ABS",
"min_temp":175,
"max_temp":275,
"brand":"Overture",
"sm_id":
}
*/
JsonDocument fetchSpoolsForWebsite() {
JsonDocument fetchSingleSpoolInfo(int spoolId) {
HTTPClient http;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool";
String spoolsUrl = spoolmanUrl + apiUrl + "/spool/" + spoolId;
Serial.print("Rufe Spool-Daten von: ");
Serial.println(spoolsUrl);
@ -56,84 +35,45 @@ JsonDocument fetchSpoolsForWebsite() {
Serial.print("Fehler beim Parsen der JSON-Antwort: ");
Serial.println(error.c_str());
} else {
JsonArray spools = doc.as<JsonArray>();
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
String filamentType = doc["filament"]["material"].as<String>();
String filamentBrand = doc["filament"]["vendor"]["name"].as<String>();
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
int nozzle_temp_min = 0;
int nozzle_temp_max = 0;
if (doc["filament"]["extra"]["nozzle_temperature"].is<String>()) {
String tempString = doc["filament"]["extra"]["nozzle_temperature"].as<String>();
tempString.replace("[", "");
tempString.replace("]", "");
int commaIndex = tempString.indexOf(',');
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
filament["sm_id"] = spool["id"];
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
filament["color_hex"] = spool["filament"]["color_hex"];
filament["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"]; // [190,230]
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
JsonObject vendor = filament["vendor"].to<JsonObject>();
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
if (commaIndex != -1) {
nozzle_temp_min = tempString.substring(0, commaIndex).toInt();
nozzle_temp_max = tempString.substring(commaIndex + 1).toInt();
}
}
} else {
Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: ");
Serial.println(httpCode);
}
http.end();
return filteredDoc;
}
String filamentColor = doc["filament"]["color_hex"].as<String>();
filamentColor.toUpperCase();
JsonDocument fetchAllSpoolsInfo() {
HTTPClient http;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool";
String tray_info_idx = doc["filament"]["extra"]["bambu_idx"].as<String>();
tray_info_idx.replace("\"", "");
Serial.print("Rufe Spool-Daten von: ");
Serial.println(spoolsUrl);
String cali_idx = doc["filament"]["extra"]["bambu_cali_id"].as<String>(); // "\"153\""
cali_idx.replace("\"", "");
http.begin(spoolsUrl);
int httpCode = http.GET();
String bambu_setting_id = doc["filament"]["extra"]["bambu_setting_id"].as<String>(); // "\"PFUSf40e9953b40d3d\""
bambu_setting_id.replace("\"", "");
JsonDocument filteredDoc;
if (httpCode == HTTP_CODE_OK) {
String payload = http.getString();
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (error) {
Serial.print("Fehler beim Parsen der JSON-Antwort: ");
Serial.println(error.c_str());
} else {
JsonArray spools = doc.as<JsonArray>();
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
doc.clear();
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
filteredSpool["price"] = spool["price"];
filteredSpool["remaining_weight"] = spool["remaining_weight"];
filteredSpool["used_weight"] = spool["used_weight"];
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
filament["density"] = spool["filament"]["density"];
filament["diameter"] = spool["filament"]["diameter"];
filament["spool_weight"] = spool["filament"]["spool_weight"];
filament["color_hex"] = spool["filament"]["color_hex"];
JsonObject vendor = filament["vendor"].to<JsonObject>();
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
JsonObject extra = filament["extra"].to<JsonObject>();
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
}
filteredDoc["color"] = filamentColor;
filteredDoc["type"] = filamentType;
filteredDoc["nozzle_temp_min"] = nozzle_temp_min;
filteredDoc["nozzle_temp_max"] = nozzle_temp_max;
filteredDoc["brand"] = filamentBrand;
filteredDoc["tray_info_idx"] = tray_info_idx;
filteredDoc["cali_idx"] = cali_idx;
filteredDoc["bambu_setting_id"] = bambu_setting_id;
}
} else {
Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: ");
@ -151,19 +91,21 @@ void sendToApi(void *parameter) {
String httpType = params->httpType;
String spoolsUrl = params->spoolsUrl;
String updatePayload = params->updatePayload;
String octoToken = params->octoToken;
HTTPClient http;
http.begin(spoolsUrl);
http.addHeader("Content-Type", "application/json");
if (octoEnabled && octoToken != "") http.addHeader("X-Api-Key", octoToken);
int httpCode = http.PUT(updatePayload);
if (httpType == "PATCH") httpCode = http.PATCH(updatePayload);
if (httpType == "POST") httpCode = http.POST(updatePayload);
if (httpCode == HTTP_CODE_OK) {
Serial.println("Gewicht der Spule erfolgreich aktualisiert");
Serial.println("Spoolman erfolgreich aktualisiert");
} else {
Serial.println("Fehler beim Aktualisieren des Gewichts der Spule");
Serial.println("Fehler beim Senden an Spoolman!");
oledShowMessage("Spoolman update failed");
vTaskDelay(2000 / portTICK_PERIOD_MS);
}
@ -262,6 +204,89 @@ uint8_t updateSpoolWeight(String spoolId, uint16_t weight) {
return 1;
}
bool updateSpoolOcto(int spoolId) {
String spoolsUrl = octoUrl + "/plugin/Spoolman/selectSpool";
Serial.print("Update Spule in Octoprint mit URL: ");
Serial.println(spoolsUrl);
JsonDocument updateDoc;
updateDoc["spool_id"] = spoolId;
updateDoc["tool"] = "tool0";
String updatePayload;
serializeJson(updateDoc, updatePayload);
Serial.print("Update Payload: ");
Serial.println(updatePayload);
SendToApiParams* params = new SendToApiParams();
if (params == nullptr) {
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
return false;
}
params->httpType = "POST";
params->spoolsUrl = spoolsUrl;
params->updatePayload = updatePayload;
params->octoToken = octoToken;
// Erstelle die Task
BaseType_t result = xTaskCreate(
sendToApi, // Task-Funktion
"SendToApiTask", // Task-Name
4096, // Stackgröße in Bytes
(void*)params, // Parameter
0, // Priorität
NULL // Task-Handle (nicht benötigt)
);
return true;
}
bool updateSpoolBambuData(String payload) {
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (error) {
Serial.print("Fehler beim JSON-Parsing: ");
Serial.println(error.c_str());
return false;
}
String spoolsUrl = spoolmanUrl + apiUrl + "/filament/" + doc["filament_id"].as<String>();
Serial.print("Update Spule mit URL: ");
Serial.println(spoolsUrl);
JsonDocument updateDoc;
updateDoc["extra"]["bambu_setting_id"] = "\"" + doc["setting_id"].as<String>() + "\"";
updateDoc["extra"]["bambu_cali_id"] = "\"" + doc["cali_idx"].as<String>() + "\"";
updateDoc["extra"]["bambu_idx"] = "\"" + doc["tray_info_idx"].as<String>() + "\"";
updateDoc["extra"]["nozzle_temperature"] = "[" + doc["temp_min"].as<String>() + "," + doc["temp_max"].as<String>() + "]";
String updatePayload;
serializeJson(updateDoc, updatePayload);
Serial.print("Update Payload: ");
Serial.println(updatePayload);
SendToApiParams* params = new SendToApiParams();
if (params == nullptr) {
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
return false;
}
params->httpType = "PATCH";
params->spoolsUrl = spoolsUrl;
params->updatePayload = updatePayload;
// Erstelle die Task
BaseType_t result = xTaskCreate(
sendToApi, // Task-Funktion
"SendToApiTask", // Task-Name
4096, // Stackgröße in Bytes
(void*)params, // Parameter
0, // Priorität
NULL // Task-Handle (nicht benötigt)
);
return true;
}
// #### Spoolman init
bool checkSpoolmanExtraFields() {
HTTPClient http;
@ -403,12 +428,13 @@ bool checkSpoolmanExtraFields() {
}
}
}
http.end();
}
Serial.println("-------- ENDE Prüfe Felder --------");
Serial.println();
http.end();
return true;
}
@ -452,17 +478,24 @@ bool checkSpoolmanInstance(const String& url) {
return false;
}
bool saveSpoolmanUrl(const String& url) {
bool saveSpoolmanUrl(const String& url, bool octoOn, const String& octoWh, const String& octoTk) {
if (!checkSpoolmanInstance(url)) return false;
JsonDocument doc;
doc["url"] = url;
Serial.print("Speichere URL in Datei: ");
Serial.println(url);
doc["octoEnabled"] = octoOn;
doc["octoUrl"] = octoWh;
doc["octoToken"] = octoTk;
Serial.print("Speichere Spoolman Data in Datei: ");
Serial.println(doc.as<String>());
if (!saveJsonValue("/spoolman_url.json", doc)) {
Serial.println("Fehler beim Speichern der Spoolman-URL.");
return false;
}
spoolmanUrl = url;
octoEnabled = octoOn;
octoUrl = octoWh;
octoToken = octoTk;
return true;
}
@ -470,6 +503,13 @@ bool saveSpoolmanUrl(const String& url) {
String loadSpoolmanUrl() {
JsonDocument doc;
if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) {
octoEnabled = (doc["octoEnabled"].is<bool>()) ? doc["octoEnabled"].as<bool>() : false;
if (octoEnabled && doc["octoToken"].is<String>() && doc["octoUrl"].is<String>())
{
octoUrl = doc["octoUrl"].as<String>();
octoToken = doc["octoToken"].as<String>();
}
return doc["url"].as<String>();
}
Serial.println("Keine gültige Spoolman-URL gefunden.");

View File

@ -9,16 +9,19 @@
extern bool spoolman_connected;
extern String spoolmanUrl;
extern bool octoEnabled;
extern String octoUrl;
extern String octoToken;
bool checkSpoolmanInstance(const String& url);
bool saveSpoolmanUrl(const String& url);
bool saveSpoolmanUrl(const String& url, bool octoOn, const String& octoWh, const String& octoTk);
String loadSpoolmanUrl(); // Neue Funktion zum Laden der URL
bool checkSpoolmanExtraFields(); // Neue Funktion zum Überprüfen der Extrafelder
JsonDocument fetchSpoolsForWebsite(); // API-Funktion für die Webseite
JsonDocument fetchAllSpoolsInfo();
void sendAmsData(AsyncWebSocketClient *client); // Neue Funktion zum Senden von AMS-Daten
JsonDocument fetchSingleSpoolInfo(int spoolId); // API-Funktion für die Webseite
bool updateSpoolTagId(String uidString, const char* payload); // Neue Funktion zum Aktualisieren eines Spools
uint8_t updateSpoolWeight(String spoolId, uint16_t weight); // Neue Funktion zum Aktualisieren des Gewichts
bool initSpoolman(); // Neue Funktion zum Initialisieren von Spoolman
bool updateSpoolBambuData(String payload); // Neue Funktion zum Aktualisieren der Bambu-Daten
bool updateSpoolOcto(int spoolId); // Neue Funktion zum Aktualisieren der Octo-Daten
#endif

View File

@ -23,14 +23,21 @@ const char* bambu_username = "bblp";
const char* bambu_ip = nullptr;
const char* bambu_accesscode = nullptr;
const char* bambu_serialnr = nullptr;
String g_bambu_ip = "";
String g_bambu_accesscode = "";
String g_bambu_serialnr = "";
bool bambu_connected = false;
bool autoSendToBambu = false;
int autoSetToBambuSpoolId = 0;
// Globale Variablen für AMS-Daten
int ams_count = 0;
String amsJsonData; // Speichert das fertige JSON für WebSocket-Clients
AMSData ams_data[MAX_AMS]; // Definition des Arrays
AMSData ams_data[MAX_AMS]; // Definition des Arrays;
bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode) {
bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode, bool autoSend, const String& autoSendTime) {
if (BambuMqttTask) {
vTaskDelete(BambuMqttTask);
}
@ -39,6 +46,8 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
doc["bambu_ip"] = ip;
doc["bambu_accesscode"] = accesscode;
doc["bambu_serialnr"] = serialnr;
doc["autoSendToBambu"] = autoSend;
doc["autoSendTime"] = (autoSendTime != "") ? autoSendTime.toInt() : autoSetBambuAmsCounter;
if (!saveJsonValue("/bambu_credentials.json", doc)) {
Serial.println("Fehler beim Speichern der Bambu-Credentials.");
@ -49,6 +58,8 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
bambu_ip = ip.c_str();
bambu_accesscode = accesscode.c_str();
bambu_serialnr = serialnr.c_str();
autoSendToBambu = autoSend;
autoSetBambuAmsCounter = autoSendTime.toInt();
vTaskDelay(100 / portTICK_PERIOD_MS);
if (!setupMqtt()) return false;
@ -64,14 +75,21 @@ bool loadBambuCredentials() {
String code = doc["bambu_accesscode"].as<String>();
String serial = doc["bambu_serialnr"].as<String>();
g_bambu_ip = ip;
g_bambu_accesscode = code;
g_bambu_serialnr = serial;
if (doc["autoSendToBambu"].is<bool>()) autoSendToBambu = doc["autoSendToBambu"].as<bool>();
if (doc["autoSendTime"].is<int>()) autoSetBambuAmsCounter = doc["autoSendTime"].as<int>();
ip.trim();
code.trim();
serial.trim();
// Dynamische Speicherallokation für die globalen Pointer
bambu_ip = strdup(ip.c_str());
bambu_accesscode = strdup(code.c_str());
bambu_serialnr = strdup(serial.c_str());
bambu_ip = g_bambu_ip.c_str();
bambu_accesscode = g_bambu_accesscode.c_str();
bambu_serialnr = g_bambu_serialnr.c_str();
report_topic = "device/" + String(bambu_serialnr) + "/report";
//request_topic = "device/" + String(bambu_serialnr) + "/request";
@ -81,19 +99,49 @@ bool loadBambuCredentials() {
return false;
}
String findFilamentIdx(String brand, String type) {
struct FilamentResult {
String key;
String type;
};
FilamentResult findFilamentIdx(String brand, String type) {
// JSON-Dokument für die Filament-Daten erstellen
JsonDocument doc;
// Laden der own_filaments.json
String ownFilament = "";
if (!loadJsonValue("/own_filaments.json", doc))
{
Serial.println("Fehler beim Laden der eigenen Filament-Daten");
}
else
{
// Durchsuche direkt nach dem Type als Schlüssel
if (doc[type].is<String>()) {
ownFilament = doc[type].as<String>();
}
doc.clear();
}
doc.clear();
// Laden der bambu_filaments.json
if (!loadJsonValue("/bambu_filaments.json", doc)) {
if (!loadJsonValue("/bambu_filaments.json", doc))
{
Serial.println("Fehler beim Laden der Filament-Daten");
return "GFL99"; // Fallback auf Generic PLA
return {"GFL99", "PLA"}; // Fallback auf Generic PLA
}
String searchKey;
// Wenn eigener Typ
if (ownFilament != "")
{
if (doc[ownFilament].is<String>())
{
return {ownFilament, doc[ownFilament].as<String>()};
}
}
// 1. Suche nach Brand + Type Kombination
// 1. Erst versuchen wir die exakte Brand + Type Kombination zu finden
String searchKey;
if (brand == "Bambu" || brand == "Bambulab") {
searchKey = "Bambu " + type;
} else if (brand == "PolyLite") {
@ -109,23 +157,46 @@ String findFilamentIdx(String brand, String type) {
// Durchsuche alle Einträge nach der Brand + Type Kombination
for (JsonPair kv : doc.as<JsonObject>()) {
if (kv.value().as<String>() == searchKey) {
return kv.key().c_str();
return {kv.key().c_str(), kv.value().as<String>()};
}
}
// 2. Wenn nicht gefunden, suche nach Generic + Type
searchKey = "Generic " + type;
// 2. Wenn nicht gefunden, zerlege den type String in Wörter und suche nach jedem Wort
// Sammle alle vorhandenen Filamenttypen aus der JSON
std::vector<String> knownTypes;
for (JsonPair kv : doc.as<JsonObject>()) {
if (kv.value().as<String>() == searchKey) {
return kv.key().c_str();
String value = kv.value().as<String>();
// Extrahiere den Typ ohne Markennamen
if (value.indexOf(" ") != -1) {
value = value.substring(value.indexOf(" ") + 1);
}
if (!value.isEmpty()) {
knownTypes.push_back(value);
}
}
// Zerlege den Input-Type in Wörter
String typeStr = type;
typeStr.trim();
// Durchsuche für jedes bekannte Filament, ob es im Input vorkommt
for (const String& knownType : knownTypes) {
if (typeStr.indexOf(knownType) != -1) {
// Suche nach diesem Typ in der Original-JSON
for (JsonPair kv : doc.as<JsonObject>()) {
String value = kv.value().as<String>();
if (value.indexOf(knownType) != -1) {
return {kv.key().c_str(), knownType};
}
}
}
}
// 3. Wenn immer noch nichts gefunden, gebe GFL99 zurück (Generic PLA)
return "GFL99";
return {"GFL99", "PLA"};
}
bool sendMqttMessage(String payload) {
bool sendMqttMessage(const String& payload) {
Serial.println("Sending MQTT message");
Serial.println(payload);
if (client.publish(report_topic.c_str(), payload.c_str()))
@ -156,15 +227,22 @@ bool setBambuSpool(String payload) {
int minTemp = doc["nozzle_temp_min"];
int maxTemp = doc["nozzle_temp_max"];
String type = doc["type"].as<String>();
(type == "PLA+") ? type = "PLA" : type;
String brand = doc["brand"].as<String>();
String tray_info_idx = (doc["tray_info_idx"].as<String>() != "-1") ? doc["tray_info_idx"].as<String>() : "";
if (tray_info_idx == "") tray_info_idx = (brand != "" && type != "") ? findFilamentIdx(brand, type) : "";
if (tray_info_idx == "") {
if (brand != "" && type != "") {
FilamentResult result = findFilamentIdx(brand, type);
tray_info_idx = result.key;
type = result.type; // Aktualisiere den type mit dem gefundenen Basistyp
}
}
String setting_id = doc["bambu_setting_id"].as<String>();
String cali_idx = doc["cali_idx"].as<String>();
doc.clear();
doc["print"]["sequence_id"] = 0;
doc["print"]["sequence_id"] = "0";
doc["print"]["command"] = "ams_filament_setting";
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
@ -172,7 +250,7 @@ bool setBambuSpool(String payload) {
doc["print"]["nozzle_temp_min"] = minTemp;
doc["print"]["nozzle_temp_max"] = maxTemp;
doc["print"]["tray_type"] = type;
doc["print"]["cali_idx"] = (cali_idx != "") ? cali_idx : "";
//doc["print"]["cali_idx"] = (cali_idx != "") ? cali_idx : "";
doc["print"]["tray_info_idx"] = tray_info_idx;
doc["print"]["setting_id"] = setting_id;
@ -194,13 +272,13 @@ bool setBambuSpool(String payload) {
if (cali_idx != "") {
yield();
doc["print"]["sequence_id"] = 0;
doc["print"]["sequence_id"] = "0";
doc["print"]["command"] = "extrusion_cali_sel";
doc["print"]["filament_id"] = tray_info_idx;
doc["print"]["nozzle_diameter"] = "0.4";
doc["print"]["cali_idx"] = cali_idx.toInt();
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
//doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
// Serialize the JSON
String output;
@ -218,44 +296,120 @@ bool setBambuSpool(String payload) {
doc.clear();
yield();
}
/*
if (setting_id != "") {
yield();
doc["print"]["sequence_id"] = 0;
doc["print"]["command"] = "ams_filament_setting";
doc["print"]["nozzle_temp_min"] = minTemp;
doc["print"]["nozzle_temp_max"] = maxTemp;
doc["print"]["setting_id"] = setting_id;
doc["print"]["tray_color"] = color.length() == 8 ? color : color+"FF";
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
doc["print"]["tray_info_idx"] = tray_info_idx;
doc["print"]["tray_type"] = type;
// Serialize the JSON
String output;
serializeJson(doc, output);
return true;
}
if (sendMqttMessage(output)) {
Serial.println("Filament Setting successfully set");
void autoSetSpool(int spoolId, uint8_t trayId) {
// wenn neue spule erkannt und autoSetToBambu > 0
JsonDocument spoolInfo = fetchSingleSpoolInfo(spoolId);
if (!spoolInfo.isNull())
{
// AMS und TRAY id ergänzen
spoolInfo["amsId"] = 0;
spoolInfo["trayId"] = trayId;
Serial.println("Auto set spool");
Serial.println(spoolInfo.as<String>());
setBambuSpool(spoolInfo.as<String>());
oledShowMessage("Spool set");
}
// id wieder zurücksetzen damit abgeschlossen
autoSetToBambuSpoolId = 0;
}
void updateAmsWsData(JsonDocument& doc, JsonArray& amsArray, int& ams_count, JsonObject& vtTray) {
// Fortfahren mit der bestehenden Verarbeitung, da Änderungen gefunden wurden
ams_count = amsArray.size();
for (int i = 0; i < ams_count && i < 16; i++) {
JsonObject amsObj = amsArray[i];
JsonArray trayArray = amsObj["tray"].as<JsonArray>();
ams_data[i].ams_id = i; // Setze die AMS-ID
for (int j = 0; j < trayArray.size() && j < 4; j++) { // Annahme: Maximal 4 Trays pro AMS
JsonObject trayObj = trayArray[j];
ams_data[i].trays[j].id = trayObj["id"].as<uint8_t>();
ams_data[i].trays[j].tray_info_idx = trayObj["tray_info_idx"].as<String>();
ams_data[i].trays[j].tray_type = trayObj["tray_type"].as<String>();
ams_data[i].trays[j].tray_sub_brands = trayObj["tray_sub_brands"].as<String>();
ams_data[i].trays[j].tray_color = trayObj["tray_color"].as<String>();
ams_data[i].trays[j].nozzle_temp_min = trayObj["nozzle_temp_min"].as<int>();
ams_data[i].trays[j].nozzle_temp_max = trayObj["nozzle_temp_max"].as<int>();
if (trayObj["tray_type"].as<String>() == "") ams_data[i].trays[j].setting_id = "";
ams_data[i].trays[j].cali_idx = trayObj["cali_idx"].as<String>();
}
}
// Setze ams_count auf die Anzahl der normalen AMS
ams_count = amsArray.size();
// Wenn externe Spule vorhanden, füge sie hinzu
if (doc["print"]["vt_tray"].is<JsonObject>()) {
//JsonObject vtTray = doc["print"]["vt_tray"];
int extIdx = ams_count; // Index für externe Spule
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
ams_data[extIdx].trays[0].id = 254; // Spezielle ID für externes Tray
ams_data[extIdx].trays[0].tray_info_idx = vtTray["tray_info_idx"].as<String>();
ams_data[extIdx].trays[0].tray_type = vtTray["tray_type"].as<String>();
ams_data[extIdx].trays[0].tray_sub_brands = vtTray["tray_sub_brands"].as<String>();
ams_data[extIdx].trays[0].tray_color = vtTray["tray_color"].as<String>();
ams_data[extIdx].trays[0].nozzle_temp_min = vtTray["nozzle_temp_min"].as<int>();
ams_data[extIdx].trays[0].nozzle_temp_max = vtTray["nozzle_temp_max"].as<int>();
if (doc["print"]["vt_tray"]["tray_type"].as<String>() != "")
{
//ams_data[extIdx].trays[0].setting_id = vtTray["setting_id"].as<String>();
ams_data[extIdx].trays[0].cali_idx = vtTray["cali_idx"].as<String>();
}
else
{
Serial.println("Failed to set Filament setting");
return false;
ams_data[extIdx].trays[0].setting_id = "";
ams_data[extIdx].trays[0].cali_idx = "";
}
ams_count++; // Erhöhe ams_count für die externe Spule
}
doc.clear();
yield();
}
*/
// Erstelle JSON für WebSocket-Clients
JsonDocument wsDoc;
JsonArray wsArray = wsDoc.to<JsonArray>();
return true;
for (int i = 0; i < ams_count; i++) {
JsonObject amsObj = wsArray.add<JsonObject>();
amsObj["ams_id"] = ams_data[i].ams_id;
JsonArray trays = amsObj["tray"].to<JsonArray>();
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
for (int j = 0; j < maxTrays; j++) {
JsonObject trayObj = trays.add<JsonObject>();
trayObj["id"] = ams_data[i].trays[j].id;
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
trayObj["tray_sub_brands"] = ams_data[i].trays[j].tray_sub_brands;
trayObj["tray_color"] = ams_data[i].trays[j].tray_color;
trayObj["nozzle_temp_min"] = ams_data[i].trays[j].nozzle_temp_min;
trayObj["nozzle_temp_max"] = ams_data[i].trays[j].nozzle_temp_max;
trayObj["setting_id"] = ams_data[i].trays[j].setting_id;
trayObj["cali_idx"] = ams_data[i].trays[j].cali_idx;
}
}
serializeJson(wsArray, amsJsonData);
wsDoc.clear();
Serial.println("AMS data updated");
sendAmsData(nullptr);
}
// init
void mqtt_callback(char* topic, byte* payload, unsigned int length) {
String message;
for (int i = 0; i < length; i++) {
message += (char)payload[i];
}
@ -263,16 +417,20 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
// JSON-Dokument parsen
JsonDocument doc;
DeserializationError error = deserializeJson(doc, message);
if (error) {
message = "";
if (error)
{
Serial.print("Fehler beim Parsen des JSON: ");
Serial.println(error.c_str());
return;
}
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
if (doc["print"]["upgrade_state"].is<String>()) {
if (doc["print"]["upgrade_state"].is<JsonObject>() || (doc["print"]["command"].is<String>() && doc["print"]["command"] == "push_status"))
{
// Prüfen ob AMS-Daten vorhanden sind
if (!doc["print"]["ams"].is<String>() || !doc["print"]["ams"]["ams"].is<String>()) {
if (!doc["print"]["ams"].is<JsonObject>() || !doc["print"]["ams"]["ams"].is<JsonArray>())
{
return;
}
@ -304,154 +462,81 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
// Vergleiche die Trays
for (int j = 0; j < trayArray.size() && j < 4 && !hasChanges; j++) {
JsonObject trayObj = trayArray[j];
if (trayObj["tray_type"].as<String>() == "") ams_data[storedIndex].trays[j].setting_id = "";
if (trayObj["setting_id"].isNull()) trayObj["setting_id"] = "";
if (trayObj["tray_info_idx"].as<String>() != ams_data[storedIndex].trays[j].tray_info_idx ||
trayObj["tray_type"].as<String>() != ams_data[storedIndex].trays[j].tray_type ||
trayObj["tray_color"].as<String>() != ams_data[storedIndex].trays[j].tray_color ||
(trayObj["setting_id"].as<String>() != "" && trayObj["setting_id"].as<String>() != ams_data[storedIndex].trays[j].setting_id) ||
trayObj["cali_idx"].as<String>() != ams_data[storedIndex].trays[j].cali_idx) {
hasChanges = true;
if (autoSendToBambu && autoSetToBambuSpoolId > 0 && hasChanges)
{
autoSetSpool(autoSetToBambuSpoolId, ams_data[storedIndex].trays[j].id);
}
break;
}
}
}
// Prüfe die externe Spule
if (!hasChanges && doc["print"]["vt_tray"].is<String>()) {
JsonObject vtTray = doc["print"]["vt_tray"];
bool foundExternal = false;
if (doc["print"]["vt_tray"].is<JsonObject>()) {
for (int i = 0; i < ams_count; i++) {
if (ams_data[i].ams_id == 255) {
foundExternal = true;
if (vtTray["tray_type"].as<String>() == "") ams_data[i].trays[0].setting_id = "";
if (vtTray["setting_id"].isNull()) vtTray["setting_id"] = "";
if (vtTray["tray_info_idx"].as<String>() != ams_data[i].trays[0].tray_info_idx ||
vtTray["tray_type"].as<String>() != ams_data[i].trays[0].tray_type ||
vtTray["tray_color"].as<String>() != ams_data[i].trays[0].tray_color ||
vtTray["cali_idx"].as<String>() != ams_data[i].trays[0].cali_idx) {
(vtTray["setting_id"].as<String>() != "" && vtTray["setting_id"].as<String>() != ams_data[i].trays[0].setting_id) ||
(vtTray["tray_type"].as<String>() != "" && vtTray["cali_idx"].as<String>() != ams_data[i].trays[0].cali_idx)) {
hasChanges = true;
if (autoSendToBambu && autoSetToBambuSpoolId > 0 && hasChanges)
{
autoSetSpool(autoSetToBambuSpoolId, 254);
}
}
break;
}
}
if (!foundExternal) hasChanges = true;
}
if (!hasChanges) return;
// Fortfahren mit der bestehenden Verarbeitung, da Änderungen gefunden wurden
ams_count = amsArray.size();
for (int i = 0; i < ams_count && i < 16; i++) {
JsonObject amsObj = amsArray[i];
JsonArray trayArray = amsObj["tray"].as<JsonArray>();
ams_data[i].ams_id = i; // Setze die AMS-ID
for (int j = 0; j < trayArray.size() && j < 4; j++) { // Annahme: Maximal 4 Trays pro AMS
JsonObject trayObj = trayArray[j];
ams_data[i].trays[j].id = trayObj["id"].as<uint8_t>();
ams_data[i].trays[j].tray_info_idx = trayObj["tray_info_idx"].as<String>();
ams_data[i].trays[j].tray_type = trayObj["tray_type"].as<String>();
ams_data[i].trays[j].tray_sub_brands = trayObj["tray_sub_brands"].as<String>();
ams_data[i].trays[j].tray_color = trayObj["tray_color"].as<String>();
ams_data[i].trays[j].nozzle_temp_min = trayObj["nozzle_temp_min"].as<int>();
ams_data[i].trays[j].nozzle_temp_max = trayObj["nozzle_temp_max"].as<int>();
ams_data[i].trays[j].setting_id = trayObj["setting_id"].as<String>();
ams_data[i].trays[j].cali_idx = trayObj["cali_idx"].as<String>();
}
updateAmsWsData(doc, amsArray, ams_count, vtTray);
}
// Setze ams_count auf die Anzahl der normalen AMS
ams_count = amsArray.size();
// Wenn externe Spule vorhanden, füge sie hinzu
if (doc["print"]["vt_tray"].is<String>()) {
JsonObject vtTray = doc["print"]["vt_tray"];
int extIdx = ams_count; // Index für externe Spule
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
ams_data[extIdx].trays[0].id = 254; // Spezielle ID für externes Tray
ams_data[extIdx].trays[0].tray_info_idx = vtTray["tray_info_idx"].as<String>();
ams_data[extIdx].trays[0].tray_type = vtTray["tray_type"].as<String>();
ams_data[extIdx].trays[0].tray_sub_brands = vtTray["tray_sub_brands"].as<String>();
ams_data[extIdx].trays[0].tray_color = vtTray["tray_color"].as<String>();
ams_data[extIdx].trays[0].nozzle_temp_min = vtTray["nozzle_temp_min"].as<int>();
ams_data[extIdx].trays[0].nozzle_temp_max = vtTray["nozzle_temp_max"].as<int>();
ams_data[extIdx].trays[0].setting_id = vtTray["setting_id"].as<String>();
ams_data[extIdx].trays[0].cali_idx = vtTray["cali_idx"].as<String>();
ams_count++; // Erhöhe ams_count für die externe Spule
}
// Sende die aktualisierten AMS-Daten
//sendAmsData(nullptr);
// Erstelle JSON für WebSocket-Clients
JsonDocument wsDoc;
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int i = 0; i < ams_count; i++) {
JsonObject amsObj = wsArray.add<JsonObject>();
amsObj["ams_id"] = ams_data[i].ams_id;
JsonArray trays = amsObj["tray"].to<JsonArray>();
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
for (int j = 0; j < maxTrays; j++) {
JsonObject trayObj = trays.add<JsonObject>();
trayObj["id"] = ams_data[i].trays[j].id;
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
trayObj["tray_sub_brands"] = ams_data[i].trays[j].tray_sub_brands;
trayObj["tray_color"] = ams_data[i].trays[j].tray_color;
trayObj["nozzle_temp_min"] = ams_data[i].trays[j].nozzle_temp_min;
trayObj["nozzle_temp_max"] = ams_data[i].trays[j].nozzle_temp_max;
trayObj["setting_id"] = ams_data[i].trays[j].setting_id;
trayObj["cali_idx"] = ams_data[i].trays[j].cali_idx;
}
}
serializeJson(wsArray, amsJsonData);
sendAmsData(nullptr);
}
// Neue Bedingung für ams_filament_setting
else if (doc["print"]["command"] == "ams_filament_setting") {
if (doc["print"]["command"] == "ams_filament_setting") {
int amsId = doc["print"]["ams_id"].as<int>();
int trayId = doc["print"]["tray_id"].as<int>();
String settingId = doc["print"]["setting_id"].as<String>();
String settingId = (doc["print"]["setting_id"].is<String>()) ? doc["print"]["setting_id"].as<String>() : "";
// Finde das entsprechende AMS und Tray
for (int i = 0; i < ams_count; i++) {
if (ams_data[i].ams_id == amsId) {
// Update setting_id im entsprechenden Tray
ams_data[i].trays[trayId].setting_id = settingId;
// Erstelle neues JSON für WebSocket-Clients
JsonDocument wsDoc;
JsonArray wsArray = wsDoc.to<JsonArray>();
if (trayId == 254)
{
// Suche AMS mit ID 255 (externe Spule)
for (int j = 0; j < ams_count; j++) {
JsonObject amsObj = wsArray.add<JsonObject>();
amsObj["ams_id"] = ams_data[j].ams_id;
JsonArray trays = amsObj["tray"].to<JsonArray>();
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
for (int k = 0; k < maxTrays; k++) {
JsonObject trayObj = trays.add<JsonObject>();
trayObj["id"] = ams_data[j].trays[k].id;
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;
trayObj["tray_sub_brands"] = ams_data[j].trays[k].tray_sub_brands;
trayObj["tray_color"] = ams_data[j].trays[k].tray_color;
trayObj["nozzle_temp_min"] = ams_data[j].trays[k].nozzle_temp_min;
trayObj["nozzle_temp_max"] = ams_data[j].trays[k].nozzle_temp_max;
trayObj["setting_id"] = ams_data[j].trays[k].setting_id;
trayObj["cali_idx"] = ams_data[j].trays[k].cali_idx;
if (ams_data[j].ams_id == 255) {
ams_data[j].trays[0].setting_id = settingId;
break;
}
}
// Aktualisiere das globale amsJsonData
amsJsonData = "";
serializeJson(wsArray, amsJsonData);
}
else
{
ams_data[i].trays[trayId].setting_id = settingId;
}
// Sende an WebSocket Clients
Serial.println("Filament setting updated");
sendAmsData(nullptr);
break;
}
@ -461,15 +546,16 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
void reconnect() {
// Loop until we're reconnected
uint8_t retries = 0;
while (!client.connected()) {
Serial.print("Attempting MQTT connection...");
Serial.println("Attempting MQTT re/connection...");
bambu_connected = false;
oledShowTopRow();
// Attempt to connect
if (client.connect(bambu_serialnr, bambu_username, bambu_accesscode)) {
Serial.println("... re-connected");
// ... and resubscribe
Serial.println("MQTT re/connected");
client.subscribe(report_topic.c_str());
bambu_connected = true;
oledShowTopRow();
@ -479,14 +565,23 @@ void reconnect() {
Serial.println(" try again in 5 seconds");
bambu_connected = false;
oledShowTopRow();
// Wait 5 seconds before retrying
yield();
vTaskDelay(5000 / portTICK_PERIOD_MS);
if (retries > 5) {
Serial.println("Disable Bambu MQTT Task after 5 retries");
//vTaskSuspend(BambuMqttTask);
vTaskDelete(BambuMqttTask);
break;
}
retries++;
}
}
}
void mqtt_loop(void * parameter) {
Serial.println("Bambu MQTT Task gestartet");
for(;;) {
if (pauseBambuMqttTask) {
vTaskDelay(10000);
@ -500,6 +595,7 @@ void mqtt_loop(void * parameter) {
}
client.loop();
yield();
esp_task_wdt_reset();
vTaskDelay(100);
}
}
@ -507,7 +603,6 @@ void mqtt_loop(void * parameter) {
bool setupMqtt() {
// Wenn Bambu Daten vorhanden
bool success = loadBambuCredentials();
vTaskDelay(100 / portTICK_PERIOD_MS);
if (!success) {
Serial.println("Failed to load Bambu credentials");
@ -540,7 +635,7 @@ bool setupMqtt() {
xTaskCreatePinnedToCore(
mqtt_loop, /* Function to implement the task */
"BambuMqtt", /* Name of the task */
10000, /* Stack size in words */
8192, /* Stack size in words */
NULL, /* Task input parameter */
mqttTaskPrio, /* Priority of the task */
&BambuMqttTask, /* Task handle. */
@ -571,6 +666,7 @@ bool setupMqtt() {
void bambu_restart() {
if (BambuMqttTask) {
vTaskDelete(BambuMqttTask);
delay(10);
}
setupMqtt();
}

View File

@ -28,9 +28,11 @@ extern bool bambu_connected;
extern int ams_count;
extern AMSData ams_data[MAX_AMS];
extern bool autoSendToBambu;
extern int autoSetToBambuSpoolId;
bool loadBambuCredentials();
bool saveBambuCredentials(const String& bambu_ip, const String& bambu_serialnr, const String& bambu_accesscode);
bool saveBambuCredentials(const String& bambu_ip, const String& bambu_serialnr, const String& bambu_accesscode, const bool autoSend, const String& autoSendTime);
bool setupMqtt();
void mqtt_loop(void * parameter);
bool setBambuSpool(String payload);

View File

@ -1,8 +1,8 @@
#include "commonFS.h"
#include <SPIFFS.h>
#include <LittleFS.h>
bool saveJsonValue(const char* filename, const JsonDocument& doc) {
File file = SPIFFS.open(filename, "w");
File file = LittleFS.open(filename, "w");
if (!file) {
Serial.print("Fehler beim Öffnen der Datei zum Schreiben: ");
Serial.println(filename);
@ -20,7 +20,7 @@ bool saveJsonValue(const char* filename, const JsonDocument& doc) {
}
bool loadJsonValue(const char* filename, JsonDocument& doc) {
File file = SPIFFS.open(filename, "r");
File file = LittleFS.open(filename, "r");
if (!file) {
Serial.print("Fehler beim Öffnen der Datei zum Lesen: ");
Serial.println(filename);
@ -36,12 +36,12 @@ bool loadJsonValue(const char* filename, JsonDocument& doc) {
return true;
}
void initializeSPIFFS() {
if (!SPIFFS.begin(true, "/spiffs", 10, "spiffs")) {
Serial.println("SPIFFS Mount Failed");
void initializeFileSystem() {
if (!LittleFS.begin(true)) {
Serial.println("LittleFS Mount Failed");
return;
}
Serial.printf("SPIFFS Total: %u bytes\n", SPIFFS.totalBytes());
Serial.printf("SPIFFS Used: %u bytes\n", SPIFFS.usedBytes());
Serial.printf("SPIFFS Free: %u bytes\n", SPIFFS.totalBytes() - SPIFFS.usedBytes());
Serial.printf("LittleFS Total: %u bytes\n", LittleFS.totalBytes());
Serial.printf("LittleFS Used: %u bytes\n", LittleFS.usedBytes());
Serial.printf("LittleFS Free: %u bytes\n", LittleFS.totalBytes() - LittleFS.usedBytes());
}

View File

@ -2,11 +2,11 @@
#define COMMONFS_H
#include <Arduino.h>
#include <SPIFFS.h>
#include <ArduinoJson.h>
#include <LittleFS.h>
bool saveJsonValue(const char* filename, const JsonDocument& doc);
bool loadJsonValue(const char* filename, JsonDocument& doc);
void initializeSPIFFS();
void initializeFileSystem();
#endif

View File

@ -40,6 +40,10 @@ const uint8_t webserverPort = 80;
const char* apiUrl = "/api/v1";
// ***** API
// ***** Bambu Auto Set Spool
uint8_t autoSetBambuAmsCounter = 60;
// ***** Bambu Auto Set Spool
// ***** Task Prios
uint8_t rfidTaskCore = 1;
uint8_t rfidTaskPrio = 1;

View File

@ -23,6 +23,8 @@ extern const uint8_t OLED_DATA_END;
extern const char* apiUrl;
extern const uint8_t webserverPort;
extern uint8_t autoSetBambuAmsCounter;
extern const unsigned char wifi_on[];
extern const unsigned char wifi_off[];
extern const unsigned char cloud_on[];

View File

@ -20,9 +20,9 @@ void setupDisplay() {
// the library initializes this with an Adafruit splash screen.
display.setTextColor(WHITE);
display.display();
delay(1000); // Pause for 2 seconds
oledShowTopRow();
delay(2000);
oledShowMessage("FilaMan v" + String(VERSION));
vTaskDelay(2000 / portTICK_PERIOD_MS);
}
void oledclearline() {
@ -117,7 +117,6 @@ std::vector<String> splitTextIntoLines(String text, uint8_t textSize) {
lines.push_back(currentLine);
}
Serial.println(lines.size());
return lines;
}
@ -140,8 +139,9 @@ void oledShowMultilineMessage(String message, uint8_t size) {
int totalHeight = lines.size() * lineHeight;
int startY = OLED_DATA_START + ((OLED_DATA_END - OLED_DATA_START - totalHeight) / 2);
uint8_t lineDistance = (lines.size() == 2) ? 5 : 0;
for (size_t i = 0; i < lines.size(); i++) {
display.setCursor(oled_center_h(lines[i]), startY + (i * lineHeight));
display.setCursor(oled_center_h(lines[i]), startY + (i * lineHeight) + (i == 1 ? lineDistance : 0));
display.print(lines[i]);
}

View File

@ -1,6 +1,4 @@
#include <Arduino.h>
#include <DNSServer.h>
#include <ESPmDNS.h>
#include <Wire.h>
#include <WiFi.h>
@ -19,8 +17,14 @@
void setup() {
Serial.begin(115200);
uint64_t chipid;
chipid = ESP.getEfuseMac(); //The chip ID is essentially its MAC address(length: 6 bytes).
Serial.printf("ESP32 Chip ID = %04X", (uint16_t)(chipid >> 32)); //print High 2 bytes
Serial.printf("%08X\n", (uint32_t)chipid); //print Low 4bytes.
// Initialize SPIFFS
initializeSPIFFS();
initializeFileSystem();
// Start Display
setupDisplay();
@ -29,7 +33,6 @@ void setup() {
initWiFi();
// Webserver
Serial.println("Starte Webserver");
setupWebserver(server);
// Spoolman API
@ -37,22 +40,27 @@ void setup() {
initSpoolman();
// Bambu MQTT
// bambu.cpp
setupMqtt();
// mDNS
Serial.println("Starte MDNS");
if (!MDNS.begin("filaman")) { // Set the hostname to "esp32.local"
Serial.println("Error setting up MDNS responder!");
while(1) {
delay(1000);
}
}
Serial.println("mDNS responder started");
// NFC Reader
startNfc();
start_scale();
uint8_t scaleCalibrated = start_scale();
if (scaleCalibrated == 3) {
oledShowMessage("Scale not calibrated!");
for (uint16_t i = 0; i < 50000; i++) {
yield();
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
} else if (scaleCalibrated == 0) {
oledShowMessage("HX711 not found");
for (uint16_t i = 0; i < 50000; i++) {
yield();
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
}
// WDT initialisieren mit 10 Sekunden Timeout
bool panic = true; // Wenn true, löst ein WDT-Timeout einen System-Panik aus
@ -66,45 +74,85 @@ void setup() {
}
/**
* Safe interval check that handles millis() overflow
* @param currentTime Current millis() value
* @param lastTime Last recorded time
* @param interval Desired interval in milliseconds
* @return True if interval has elapsed
*/
bool intervalElapsed(unsigned long currentTime, unsigned long &lastTime, unsigned long interval) {
if (currentTime - lastTime >= interval || currentTime < lastTime) {
lastTime = currentTime;
return true;
}
return false;
}
unsigned long lastWeightReadTime = 0;
const unsigned long weightReadInterval = 1000; // 1 second
unsigned long lastAmsSendTime = 0;
const unsigned long amsSendInterval = 60000; // 1 minute
unsigned long lastAutoSetBambuAmsTime = 0;
const unsigned long autoSetBambuAmsInterval = 1000; // 1 second
uint8_t autoAmsCounter = 0;
uint8_t weightSend = 0;
int16_t lastWeight = 0;
uint8_t wifiErrorCounter = 0;
unsigned long lastWifiCheckTime = 0;
const unsigned long wifiCheckInterval = 60000; // Überprüfe alle 60 Sekunden (60000 ms)
// ##### PROGRAM START #####
void loop() {
/*
// Überprüfe den WLAN-Status
if (WiFi.status() != WL_CONNECTED) {
wifiErrorCounter++;
wifiOn = false;
} else {
wifiErrorCounter = 0;
wifiOn = true;
}
if (wifiErrorCounter > 20) ESP.restart();
*/
unsigned long currentMillis = millis();
// Send AMS Data min every Minute
if (currentMillis - lastAmsSendTime >= amsSendInterval) {
lastAmsSendTime = currentMillis;
sendAmsData(nullptr);
// Überprüfe regelmäßig die WLAN-Verbindung
if (intervalElapsed(currentMillis, lastWifiCheckTime, wifiCheckInterval)) {
checkWiFiConnection();
}
// Wenn Bambu auto set Spool aktiv
if (autoSendToBambu && autoSetToBambuSpoolId > 0) {
if (intervalElapsed(currentMillis, lastAutoSetBambuAmsTime, autoSetBambuAmsInterval))
{
if (hasReadRfidTag == 0)
{
lastAutoSetBambuAmsTime = currentMillis;
oledShowMessage("Auto Set " + String(autoSetBambuAmsCounter - autoAmsCounter) + "s");
autoAmsCounter++;
if (autoAmsCounter >= autoSetBambuAmsCounter)
{
autoSetToBambuSpoolId = 0;
autoAmsCounter = 0;
oledShowWeight(weight);
}
}
else
{
autoAmsCounter = 0;
}
}
}
// Wenn Waage nicht Kalibriert
if (scaleCalibrated == 3)
{
oledShowMessage("Scale not calibrated!");
vTaskDelay(5000 / portTICK_PERIOD_MS);
yield();
esp_task_wdt_reset();
return;
}
// Ausgabe der Waage auf Display
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0)
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0 && (!autoSendToBambu || autoSetToBambuSpoolId == 0))
{
(weight < 0) ? oledShowMessage("!! -1") : oledShowWeight(weight);
(weight < 2) ? ((weight < -2) ? oledShowMessage("!! -0") : oledShowWeight(0)) : oledShowWeight(weight);
}
// Wenn Timer abgelaufen und nicht gerade ein RFID-Tag geschrieben wird
if (currentMillis - lastWeightReadTime >= weightReadInterval && hasReadRfidTag < 3)
{
@ -148,6 +196,12 @@ void loop() {
oledShowIcon("success");
vTaskDelay(2000 / portTICK_PERIOD_MS);
weightSend = 1;
autoSetToBambuSpoolId = spoolId.toInt();
if (octoEnabled)
{
updateSpoolOcto(autoSetToBambuSpoolId);
}
}
else
{

View File

@ -44,8 +44,6 @@ void payloadToJson(uint8_t *data) {
DeserializationError error = deserializeJson(doc, jsonString);
if (!error) {
const char* version = doc["version"];
const char* protocol = doc["protocol"];
const char* color_hex = doc["color_hex"];
const char* type = doc["type"];
int min_temp = doc["min_temp"];
@ -55,8 +53,6 @@ void payloadToJson(uint8_t *data) {
Serial.println();
Serial.println("-----------------");
Serial.println("JSON-Parsed Data:");
Serial.println(version);
Serial.println(protocol);
Serial.println(color_hex);
Serial.println(type);
Serial.println(min_temp);
@ -93,8 +89,16 @@ bool formatNdefTag() {
return success;
}
uint16_t readTagSize()
{
uint8_t buffer[4];
memset(buffer, 0, 4);
nfc.ntag2xx_ReadPage(3, buffer);
return buffer[2]*8;
}
uint8_t ntag2xx_WriteNDEF(const char *payload) {
uint8_t tagSize = 240; // 144 bytes is maximum for NTAG213
uint16_t tagSize = readTagSize();
Serial.print("Tag Size: ");Serial.println(tagSize);
uint8_t pageBuffer[4] = {0, 0, 0, 0};
@ -136,6 +140,8 @@ uint8_t ntag2xx_WriteNDEF(const char *payload) {
if (combinedData == NULL)
{
Serial.println("Fehler: Nicht genug Speicher vorhanden.");
oledShowMessage("Tag too small");
vTaskDelay(2000 / portTICK_PERIOD_MS);
return 0;
}
@ -238,10 +244,12 @@ void writeJsonToTag(void *parameter) {
hasReadRfidTag = 3;
vTaskSuspend(RfidReaderTask);
vTaskDelay(500 / portTICK_PERIOD_MS);
vTaskDelay(50 / portTICK_PERIOD_MS);
//pauseBambuMqttTask = true;
// aktualisieren der Website wenn sich der Status ändert
sendNfcData(nullptr);
vTaskDelay(100 / portTICK_PERIOD_MS);
oledShowMessage("Waiting for NFC-Tag");
// Wait 10sec for tag
@ -331,7 +339,7 @@ void startWriteJsonToTag(const char* payload) {
xTaskCreate(
writeJsonToTag, // Task-Funktion
"WriteJsonToTagTask", // Task-Name
4096, // Stackgröße in Bytes
5115, // Stackgröße in Bytes
(void*)payloadCopy, // Parameter
rfidWriteTaskPrio, // Priorität
NULL // Task-Handle (nicht benötigt)
@ -367,21 +375,19 @@ void scanRfidTask(void * parameter) {
if (uidLength == 7)
{
uint8_t data[256];
uint16_t tagSize = readTagSize();
if(tagSize > 0)
{
// Create a buffer depending on the size of the tag
uint8_t* data = (uint8_t*)malloc(tagSize);
memset(data, 0, tagSize);
// We probably have an NTAG2xx card (though it could be Ultralight as well)
Serial.println("Seems to be an NTAG2xx tag (7 byte UID)");
for (uint8_t i = 0; i < 45; i++) {
/*
if (i < uidLength) {
uidString += String(uid[i], HEX);
if (i < uidLength - 1) {
uidString += ":"; // Optional: Trennzeichen hinzufügen
}
}
*/
if (!nfc.mifareclassic_ReadDataBlock(i, data + (i - 4) * 4))
uint8_t numPages = readTagSize()/4;
for (uint8_t i = 4; i < 4+numPages; i++) {
if (!nfc.ntag2xx_ReadPage(i, data+(i-4) * 4))
{
break; // Stop if reading fails
}
@ -407,6 +413,13 @@ void scanRfidTask(void * parameter) {
hasReadRfidTag = 1;
}
free(data);
}
else
{
oledShowMessage("NFC-Tag read error");
hasReadRfidTag = 2;
}
}
else
{
@ -420,7 +433,7 @@ void scanRfidTask(void * parameter) {
//uidString = "";
nfcJsonData = "";
Serial.println("Tag entfernt");
oledShowWeight(0);
if (!autoSendToBambu) oledShowWeight(weight);
}
// aktualisieren der Website wenn sich der Status ändert
@ -456,7 +469,7 @@ void startNfc() {
BaseType_t result = xTaskCreatePinnedToCore(
scanRfidTask, /* Function to implement the task */
"RfidReader", /* Name of the task */
10000, /* Stack size in words */
5115, /* Stack size in words */
NULL, /* Task input parameter */
rfidTaskPrio, /* Priority of the task */
&RfidReaderTask, /* Task handle. */

243
src/ota.cpp Normal file
View File

@ -0,0 +1,243 @@
#include <Arduino.h>
#include <website.h>
#include <commonFS.h>
// Globale Variablen für Config Backups hinzufügen
String bambuCredentialsBackup;
String spoolmanUrlBackup;
// Globale Variable für den Update-Typ
static int currentUpdateCommand = 0;
// Globale Update-Variablen
static size_t updateTotalSize = 0;
static size_t updateWritten = 0;
static bool isSpiffsUpdate = false;
/**
* Compares two version strings and determines if version1 is less than version2
*
* @param version1 First version string (format: x.y.z)
* @param version2 Second version string (format: x.y.z)
* @return true if version1 is less than version2
*/
bool isVersionLessThan(const String& version1, const String& version2) {
int major1 = 0, minor1 = 0, patch1 = 0;
int major2 = 0, minor2 = 0, patch2 = 0;
// Parse version1
sscanf(version1.c_str(), "%d.%d.%d", &major1, &minor1, &patch1);
// Parse version2
sscanf(version2.c_str(), "%d.%d.%d", &major2, &minor2, &patch2);
// Compare major version
if (major1 < major2) return true;
if (major1 > major2) return false;
// Major versions equal, compare minor
if (minor1 < minor2) return true;
if (minor1 > minor2) return false;
// Minor versions equal, compare patch
return patch1 < patch2;
}
void backupJsonConfigs() {
// Bambu Credentials backup
if (LittleFS.exists("/bambu_credentials.json")) {
File file = LittleFS.open("/bambu_credentials.json", "r");
if (file) {
bambuCredentialsBackup = file.readString();
file.close();
Serial.println("Bambu credentials backed up");
}
}
// Spoolman URL backup
if (LittleFS.exists("/spoolman_url.json")) {
File file = LittleFS.open("/spoolman_url.json", "r");
if (file) {
spoolmanUrlBackup = file.readString();
file.close();
Serial.println("Spoolman URL backed up");
}
}
}
void restoreJsonConfigs() {
// Restore Bambu credentials
if (bambuCredentialsBackup.length() > 0) {
File file = LittleFS.open("/bambu_credentials.json", "w");
if (file) {
file.print(bambuCredentialsBackup);
file.close();
Serial.println("Bambu credentials restored");
}
bambuCredentialsBackup = ""; // Clear backup
}
// Restore Spoolman URL
if (spoolmanUrlBackup.length() > 0) {
File file = LittleFS.open("/spoolman_url.json", "w");
if (file) {
file.print(spoolmanUrlBackup);
file.close();
Serial.println("Spoolman URL restored");
}
spoolmanUrlBackup = ""; // Clear backup
}
}
void espRestart() {
yield();
vTaskDelay(5000 / portTICK_PERIOD_MS);
ESP.restart();
}
void sendUpdateProgress(int progress, const char* status = nullptr, const char* message = nullptr) {
static int lastSentProgress = -1;
// Verhindere zu häufige Updates
if (progress == lastSentProgress && !status && !message) {
return;
}
String progressMsg = "{\"type\":\"updateProgress\",\"progress\":" + String(progress);
if (status) {
progressMsg += ",\"status\":\"" + String(status) + "\"";
}
if (message) {
progressMsg += ",\"message\":\"" + String(message) + "\"";
}
progressMsg += "}";
if (progress >= 100) {
// Sende die Nachricht nur einmal für den Abschluss
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
delay(50);
}
// Sende die Nachricht mehrmals mit Verzögerung für wichtige Updates
if (status || abs(progress - lastSentProgress) >= 10 || progress == 100) {
for (int i = 0; i < 2; i++) {
ws.textAll(progressMsg);
delay(100); // Längerer Delay zwischen Nachrichten
}
} else {
ws.textAll(progressMsg);
delay(50);
}
lastSentProgress = progress;
}
void handleUpdate(AsyncWebServer &server) {
AsyncCallbackWebHandler* updateHandler = new AsyncCallbackWebHandler();
updateHandler->setUri("/update");
updateHandler->setMethod(HTTP_POST);
// Check if current version is less than defined TOOLVERSION before proceeding with update
if (isVersionLessThan(VERSION, TOOLDVERSION)) {
updateHandler->onRequest([](AsyncWebServerRequest *request) {
request->send(400, "application/json",
"{\"success\":false,\"message\":\"Your current version is too old. Please perform a full upgrade.\"}");
});
server.addHandler(updateHandler);
return;
}
updateHandler->onUpload([](AsyncWebServerRequest *request, String filename,
size_t index, uint8_t *data, size_t len, bool final) {
if (!index) {
updateTotalSize = request->contentLength();
updateWritten = 0;
isSpiffsUpdate = (filename.indexOf("website") > -1);
if (isSpiffsUpdate) {
// Backup vor dem Update
sendUpdateProgress(0, "backup", "Backing up configurations...");
delay(200);
backupJsonConfigs();
delay(200);
const esp_partition_t *partition = esp_partition_find_first(ESP_PARTITION_TYPE_DATA, ESP_PARTITION_SUBTYPE_DATA_SPIFFS, NULL);
if (!partition || !Update.begin(partition->size, U_SPIFFS)) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
return;
}
sendUpdateProgress(5, "starting", "Starting SPIFFS update...");
delay(200);
} else {
if (!Update.begin(updateTotalSize)) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
return;
}
sendUpdateProgress(0, "starting", "Starting firmware update...");
delay(200);
}
}
if (len) {
if (Update.write(data, len) != len) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Write failed\"}");
return;
}
updateWritten += len;
int currentProgress;
// Berechne den Fortschritt basierend auf dem Update-Typ
if (isSpiffsUpdate) {
// SPIFFS: 5-75% für Upload
currentProgress = 6 + (updateWritten * 100) / updateTotalSize;
} else {
// Firmware: 0-100% für Upload
currentProgress = 1 + (updateWritten * 100) / updateTotalSize;
}
static int lastProgress = -1;
if (currentProgress != lastProgress && (currentProgress % 10 == 0 || final)) {
sendUpdateProgress(currentProgress, "uploading");
oledShowMessage("Update: " + String(currentProgress) + "%");
delay(50);
lastProgress = currentProgress;
}
}
if (final) {
if (Update.end(true)) {
if (isSpiffsUpdate) {
restoreJsonConfigs();
}
} else {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update finalization failed\"}");
}
}
});
updateHandler->onRequest([](AsyncWebServerRequest *request) {
if (Update.hasError()) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update failed\"}");
return;
}
// Erste 100% Nachricht
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
vTaskDelay(2000 / portTICK_PERIOD_MS);
AsyncWebServerResponse *response = request->beginResponse(200, "application/json",
"{\"success\":true,\"message\":\"Update successful! Restarting device...\"}");
response->addHeader("Connection", "close");
request->send(response);
// Zweite 100% Nachricht zur Sicherheit
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
espRestart();
});
server.addHandler(updateHandler);
}

9
src/ota.h Normal file
View File

@ -0,0 +1,9 @@
#ifndef OTA_H
#define OTA_H
#include <ArduinoOTA.h>
#include <ESPAsyncWebServer.h>
void handleUpdate(AsyncWebServer &server);
#endif

View File

@ -16,6 +16,7 @@ int16_t weight = 0;
uint8_t weigthCouterToApi = 0;
uint8_t scale_tare_counter = 0;
uint8_t pauseMainTask = 0;
uint8_t scaleCalibrated = 1;
Preferences preferences;
const char* NVS_NAMESPACE = "scale";
@ -46,15 +47,15 @@ void scale_loop(void * parameter) {
weight = round(scale.get_units());
}
vTaskDelay(pdMS_TO_TICKS(100)); // Verzögerung, um die CPU nicht zu überlasten
vTaskDelay(pdMS_TO_TICKS(100));
}
}
void start_scale() {
uint8_t start_scale() {
Serial.println("Prüfe Calibration Value");
long calibrationValue;
// NVS
// NVS lesen
preferences.begin(NVS_NAMESPACE, true); // true = readonly
calibrationValue = preferences.getLong(NVS_KEY_CALIBRATION, defaultScaleCalibrationValue);
preferences.end();
@ -64,7 +65,10 @@ void start_scale() {
scale.begin(LOADCELL_DOUT_PIN, LOADCELL_SCK_PIN);
if (isnan(calibrationValue) || calibrationValue < 1) calibrationValue = defaultScaleCalibrationValue;
if (isnan(calibrationValue) || calibrationValue < 1) {
calibrationValue = defaultScaleCalibrationValue;
scaleCalibrated = 0;
}
oledShowMessage("Scale Tare Please remove all");
for (uint16_t i = 0; i < 2000; i++) {
@ -86,7 +90,7 @@ void start_scale() {
BaseType_t result = xTaskCreatePinnedToCore(
scale_loop, /* Function to implement the task */
"ScaleLoop", /* Name of the task */
10000, /* Stack size in words */
2048, /* Stack size in words */
NULL, /* Task input parameter */
scaleTaskPrio, /* Priority of the task */
&ScaleTask, /* Task handle. */
@ -97,6 +101,8 @@ void start_scale() {
} else {
Serial.println("ScaleLoop-Task erfolgreich erstellt");
}
return (scaleCalibrated == 1) ? 1 : 3;
}
uint8_t calibrate_scale() {
@ -104,6 +110,7 @@ uint8_t calibrate_scale() {
//vTaskSuspend(RfidReaderTask);
vTaskDelete(RfidReaderTask);
vTaskDelete(ScaleTask);
pauseBambuMqttTask = true;
pauseMainTask = 1;
@ -171,8 +178,6 @@ uint8_t calibrate_scale() {
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
//ESP.restart();
}
else
{
@ -206,8 +211,7 @@ uint8_t calibrate_scale() {
oledShowMessage("Scale Ready");
Serial.println("starte Scale Task");
Serial.println("restart Scale Task");
start_scale();
pauseBambuMqttTask = false;

View File

@ -5,7 +5,7 @@
#include "HX711.h"
void start_scale();
uint8_t start_scale();
uint8_t calibrate_scale();
uint8_t tareScale();
@ -14,6 +14,7 @@ extern int16_t weight;
extern uint8_t weigthCouterToApi;
extern uint8_t scale_tare_counter;
extern uint8_t pauseMainTask;
extern uint8_t scaleCalibrated;
extern TaskHandle_t ScaleTask;

View File

@ -8,6 +8,8 @@
#include "scale.h"
#include "esp_task_wdt.h"
#include <Update.h>
#include "display.h"
#include "ota.h"
#ifndef VERSION
#define VERSION "1.1.0"
@ -22,6 +24,7 @@ AsyncWebSocket ws("/ws");
uint8_t lastSuccess = 0;
uint8_t lastHasReadRfidTag = 0;
void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) {
if (type == WS_EVT_CONNECT) {
Serial.println("Neuer Client verbunden!");
@ -32,6 +35,10 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
sendWriteResult(client, 3);
} else if (type == WS_EVT_DISCONNECT) {
Serial.println("Client getrennt.");
} else if (type == WS_EVT_ERROR) {
Serial.printf("WebSocket Client #%u error(%u): %s\n", client->id(), *((uint16_t*)arg), (char*)data);
} else if (type == WS_EVT_PONG) {
Serial.printf("WebSocket Client #%u pong\n", client->id());
} else if (type == WS_EVT_DATA) {
String message = String((char*)data);
JsonDocument doc;
@ -48,7 +55,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
}
else if (doc["type"] == "writeNfcTag") {
if (doc["payload"].is<String>()) {
if (doc["payload"].is<JsonObject>()) {
// Versuche NFC-Daten zu schreiben
String payloadString;
serializeJson(doc["payload"], payloadString);
@ -88,6 +95,15 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
setBambuSpool(doc["payload"]);
}
else if (doc["type"] == "setSpoolmanSettings") {
Serial.println(doc["payload"].as<String>());
if (updateSpoolBambuData(doc["payload"].as<String>())) {
ws.textAll("{\"type\":\"setSpoolmanSettings\",\"payload\":\"success\"}");
} else {
ws.textAll("{\"type\":\"setSpoolmanSettings\",\"payload\":\"error\"}");
}
}
else {
Serial.println("Unbekannter WebSocket-Typ: " + doc["type"].as<String>());
}
@ -97,12 +113,12 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
// Funktion zum Laden und Ersetzen des Headers in einer HTML-Datei
String loadHtmlWithHeader(const char* filename) {
Serial.println("Lade HTML-Datei: " + String(filename));
if (!SPIFFS.exists(filename)) {
if (!LittleFS.exists(filename)) {
Serial.println("Fehler: Datei nicht gefunden!");
return "Fehler: Datei nicht gefunden!";
}
File file = SPIFFS.open(filename, "r");
File file = LittleFS.open(filename, "r");
String html = file.readString();
file.close();
@ -160,6 +176,9 @@ void sendAmsData(AsyncWebSocketClient *client) {
}
void setupWebserver(AsyncWebServer &server) {
// Deaktiviere alle Debug-Ausgaben
Serial.setDebugOutput(false);
// WebSocket-Optimierungen
ws.onEvent(onWsEvent);
ws.enable(true);
@ -176,7 +195,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für about
server.on("/about", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /about erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/index.html.gz", "text/html");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/index.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -185,7 +204,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für Waage
server.on("/waage", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /waage erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/waage.html.gz", "text/html");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/waage.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -194,24 +213,13 @@ void setupWebserver(AsyncWebServer &server) {
// Route für RFID
server.on("/", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /rfid erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/rfid.html.gz", "text/html");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/rfid.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
Serial.println("RFID-Seite gesendet");
});
/*
// Neue API-Route für das Abrufen der Spool-Daten
server.on("/api/spools", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("API-Aufruf: /api/spools");
JsonDocument spoolsData = fetchSpoolsForWebsite();
String response;
serializeJson(spoolsData, response);
request->send(200, "application/json", response);
});
*/
server.on("/api/url", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("API-Aufruf: /api/url");
String jsonResponse = "{\"spoolman_url\": \"" + String(spoolmanUrl) + "\"}";
@ -221,7 +229,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für WiFi
server.on("/wifi", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /wifi erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/wifi.html.gz", "text/html");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/wifi.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -231,13 +239,18 @@ void setupWebserver(AsyncWebServer &server) {
server.on("/spoolman", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /spoolman erhalten");
String html = loadHtmlWithHeader("/spoolman.html");
html.replace("{{spoolmanUrl}}", spoolmanUrl);
html.replace("{{spoolmanUrl}}", (spoolmanUrl != "") ? spoolmanUrl : "");
html.replace("{{spoolmanOctoEnabled}}", octoEnabled ? "checked" : "");
html.replace("{{spoolmanOctoUrl}}", (octoUrl != "") ? octoUrl : "");
html.replace("{{spoolmanOctoToken}}", (octoToken != "") ? octoToken : "");
JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>())
{
String bambuIp = doc["bambu_ip"].as<String>();
String bambuSerial = doc["bambu_serialnr"].as<String>();
String bambuCode = doc["bambu_accesscode"].as<String>();
autoSendToBambu = doc["autoSendToBambu"].as<bool>();
bambuIp.trim();
bambuSerial.trim();
bambuCode.trim();
@ -245,6 +258,16 @@ void setupWebserver(AsyncWebServer &server) {
html.replace("{{bambuIp}}", bambuIp ? bambuIp : "");
html.replace("{{bambuSerial}}", bambuSerial ? bambuSerial : "");
html.replace("{{bambuCode}}", bambuCode ? bambuCode : "");
html.replace("{{autoSendToBambu}}", autoSendToBambu ? "checked" : "");
html.replace("{{autoSendTime}}", String(autoSetBambuAmsCounter));
}
else
{
html.replace("{{bambuIp}}", "");
html.replace("{{bambuSerial}}", "");
html.replace("{{bambuCode}}", "");
html.replace("{{autoSendToBambu}}", "");
html.replace("{{autoSendTime}}", String(autoSetBambuAmsCounter));
}
request->send(200, "text/html", html);
@ -257,10 +280,21 @@ void setupWebserver(AsyncWebServer &server) {
return;
}
String url = request->getParam("url")->value();
url.trim();
if (request->getParam("octoEnabled")->value() == "true" && (!request->hasParam("octoUrl") || !request->hasParam("octoToken"))) {
request->send(400, "application/json", "{\"success\": false, \"error\": \"Missing OctoPrint URL or Token parameter\"}");
return;
}
bool healthy = saveSpoolmanUrl(url);
String url = request->getParam("url")->value();
bool octoEnabled = (request->getParam("octoEnabled")->value() == "true") ? true : false;
String octoUrl = request->getParam("octoUrl")->value();
String octoToken = (request->getParam("octoToken")->value() != "") ? request->getParam("octoToken")->value() : "";
url.trim();
octoUrl.trim();
octoToken.trim();
bool healthy = saveSpoolmanUrl(url, octoEnabled, octoUrl, octoToken);
String jsonResponse = "{\"healthy\": " + String(healthy ? "true" : "false") + "}";
request->send(200, "application/json", jsonResponse);
@ -276,16 +310,20 @@ void setupWebserver(AsyncWebServer &server) {
String bambu_ip = request->getParam("bambu_ip")->value();
String bambu_serialnr = request->getParam("bambu_serialnr")->value();
String bambu_accesscode = request->getParam("bambu_accesscode")->value();
bool autoSend = (request->getParam("autoSend")->value() == "true") ? true : false;
String autoSendTime = request->getParam("autoSendTime")->value();
bambu_ip.trim();
bambu_serialnr.trim();
bambu_accesscode.trim();
autoSendTime.trim();
if (bambu_ip.length() == 0 || bambu_serialnr.length() == 0 || bambu_accesscode.length() == 0) {
request->send(400, "application/json", "{\"success\": false, \"error\": \"Empty parameter\"}");
return;
}
bool success = saveBambuCredentials(bambu_ip, bambu_serialnr, bambu_accesscode);
bool success = saveBambuCredentials(bambu_ip, bambu_serialnr, bambu_accesscode, autoSend, autoSendTime);
request->send(200, "application/json", "{\"healthy\": " + String(success ? "true" : "false") + "}");
});
@ -298,7 +336,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für das Laden der CSS-Datei
server.on("/style.css", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Lade style.css");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/style.css.gz", "text/css");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/style.css.gz", "text/css");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -307,7 +345,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für das Logo
server.on("/logo.png", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/logo.png.gz", "image/png");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/logo.png.gz", "image/png");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -316,7 +354,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für Favicon
server.on("/favicon.ico", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/favicon.ico", "image/x-icon");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/favicon.ico", "image/x-icon");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
Serial.println("favicon.ico gesendet");
@ -324,17 +362,26 @@ void setupWebserver(AsyncWebServer &server) {
// Route für spool_in.png
server.on("/spool_in.png", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/spool_in.png.gz", "image/png");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/spool_in.png.gz", "image/png");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
Serial.println("spool_in.png gesendet");
});
// Route für set_spoolman.png
server.on("/set_spoolman.png", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/set_spoolman.png.gz", "image/png");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
Serial.println("set_spoolman.png gesendet");
});
// Route für JavaScript Dateien
server.on("/spoolman.js", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /spoolman.js erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/spoolman.js.gz", "text/javascript");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/spoolman.js.gz", "text/javascript");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -343,7 +390,7 @@ void setupWebserver(AsyncWebServer &server) {
server.on("/rfid.js", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /rfid.js erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS,"/rfid.js.gz", "text/javascript");
AsyncWebServerResponse *response = request->beginResponse(LittleFS,"/rfid.js.gz", "text/javascript");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
@ -352,72 +399,14 @@ void setupWebserver(AsyncWebServer &server) {
// Vereinfachter Update-Handler
server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) {
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/upgrade.html.gz", "text/html");
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/upgrade.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", "no-store");
request->send(response);
});
// Update-Handler mit verbesserter Fehlerbehandlung
server.on("/update", HTTP_POST,
[](AsyncWebServerRequest *request) {
// Nach Update-Abschluss
bool success = !Update.hasError();
AsyncWebServerResponse *response = request->beginResponse(
success ? 200 : 400,
"application/json",
success ? "{\"success\":true,\"message\":\"Update successful\"}"
: "{\"success\":false,\"message\":\"Update failed\"}"
);
response->addHeader("Connection", "close");
request->send(response);
if (success) {
delay(500);
ESP.restart();
}
},
[](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) {
static size_t updateSize = 0;
static int command = 0;
if (!index) {
updateSize = request->contentLength();
command = (filename.indexOf("spiffs") > -1) ? U_SPIFFS : U_FLASH;
Serial.printf("Update Start: %s\nSize: %u\nCommand: %d\n", filename.c_str(), updateSize, command);
if (!Update.begin(updateSize, command)) {
Serial.printf("Update Begin Error: ");
Update.printError(Serial);
String errorMsg = String("Update begin failed: ") + Update.errorString();
request->send(400, "application/json", "{\"success\":false,\"message\":\"" + errorMsg + "\"}");
return;
}
}
if (len) {
if (Update.write(data, len) != len) {
Serial.printf("Update Write Error: ");
Update.printError(Serial);
String errorMsg = String("Write failed: ") + Update.errorString();
request->send(400, "application/json", "{\"success\":false,\"message\":\"" + errorMsg + "\"}");
return;
}
Serial.printf("Progress: %u/%u\r", index + len, updateSize);
}
if (final) {
if (!Update.end(true)) {
Serial.printf("Update End Error: ");
Update.printError(Serial);
String errorMsg = String("Update end failed: ") + Update.errorString();
request->send(400, "application/json", "{\"success\":false,\"message\":\"" + errorMsg + "\"}");
return;
}
Serial.printf("\nUpdate Success: %uB\n", index+len);
}
}
);
// Update-Handler registrieren
handleUpdate(server);
server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){
String fm_version = VERSION;
@ -441,80 +430,3 @@ void setupWebserver(AsyncWebServer &server) {
server.begin();
Serial.println("Webserver gestartet");
}
void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) {
static bool isSpiffsUpdate = false;
if (!index) {
// Start eines neuen Uploads
Serial.println("Update Start: " + filename);
// Überprüfe den Dateityp basierend auf dem Dateinamen
bool isFirmware = filename.startsWith("filaman_");
isSpiffsUpdate = filename.startsWith("webpage_");
if (!isFirmware && !isSpiffsUpdate) {
request->send(400, "application/json", "{\"message\":\"Invalid file type. File must start with 'filaman_' or 'webpage_'\"}");
return;
}
// Wähle den Update-Typ basierend auf dem Dateinamen
if (isSpiffsUpdate) {
if (!Update.begin(SPIFFS.totalBytes(), U_SPIFFS)) {
Update.printError(Serial);
request->send(400, "application/json", "{\"message\":\"SPIFFS Update failed: " + String(Update.errorString()) + "\"}");
return;
}
// Backup JSON configs before SPIFFS update
backupJsonConfigs();
} else {
if (!Update.begin(UPDATE_SIZE_UNKNOWN, U_FLASH)) {
Update.printError(Serial);
request->send(400, "application/json", "{\"message\":\"Firmware Update failed: " + String(Update.errorString()) + "\"}");
return;
}
}
}
if (Update.write(data, len) != len) {
Update.printError(Serial);
request->send(400, "application/json", "{\"message\":\"Write failed: " + String(Update.errorString()) + "\"}");
return;
}
if (final) {
if (!Update.end(true)) {
Update.printError(Serial);
request->send(400, "application/json", "{\"message\":\"Update failed: " + String(Update.errorString()) + "\"}");
return;
}
if (isSpiffsUpdate) {
// Restore JSON configs after SPIFFS update
restoreJsonConfigs();
}
request->send(200, "application/json", "{\"message\":\"Update successful!\", \"restart\": true}");
delay(500);
ESP.restart();
}
}
void backupJsonConfigs() {
const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"};
for (const char* config : configs) {
if (SPIFFS.exists(config)) {
String backupPath = String(config) + ".bak";
SPIFFS.remove(backupPath);
SPIFFS.rename(config, backupPath);
}
}
}
void restoreJsonConfigs() {
const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"};
for (const char* config : configs) {
String backupPath = String(config) + ".bak";
if (SPIFFS.exists(backupPath)) {
SPIFFS.remove(config);
SPIFFS.rename(backupPath, config);
}
}
}

View File

@ -19,7 +19,6 @@ extern AsyncWebSocket ws;
// Server-Initialisierung und Handler
void initWebServer();
void handleUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
void handleBody(AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total);
void setupWebserver(AsyncWebServer &server);
@ -29,8 +28,4 @@ void sendNfcData(AsyncWebSocketClient *client);
void foundNfcTag(AsyncWebSocketClient *client, uint8_t success);
void sendWriteResult(AsyncWebSocketClient *client, uint8_t success);
// Upgrade-Funktionen
void backupJsonConfigs();
void restoreJsonConfigs();
#endif

View File

@ -3,16 +3,20 @@
#include <WiFi.h>
#include <esp_wifi.h>
#include <WiFiManager.h>
#include <DNSServer.h>
#include <ESPmDNS.h>
#include "display.h"
#include "config.h"
WiFiManager wm;
bool wm_nonblocking = false;
uint8_t wifiErrorCounter = 0;
void initWiFi() {
void wifiSettings() {
// Optimierte WiFi-Einstellungen
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
WiFi.setSleep(false); // disable sleep mode
WiFi.setHostname("FilaMan");
esp_wifi_set_ps(WIFI_PS_NONE);
// Maximale Sendeleistung
@ -23,19 +27,45 @@ void initWiFi() {
// Aktiviere WiFi-Roaming für bessere Stabilität
esp_wifi_set_rssi_threshold(-80);
}
void startMDNS() {
if (!MDNS.begin("filaman")) {
Serial.println("Error setting up MDNS responder!");
while(1) {
vTaskDelay(1000 / portTICK_PERIOD_MS);
}
}
Serial.println("mDNS responder started");
}
void configModeCallback (WiFiManager *myWiFiManager) {
Serial.println("Entered config mode");
oledShowTopRow();
oledShowMessage("WiFi Config Mode");
}
void initWiFi() {
// load Wifi settings
wifiSettings();
wm.setAPCallback(configModeCallback);
wm.setSaveConfigCallback([]() {
Serial.println("Configurations updated");
ESP.restart();
});
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen
//wm.setConfigPortalTimeout(320); // Portal nach 5min schließen
wm.setWiFiAutoReconnect(true);
wm.setConnectTimeout(5);
oledShowTopRow();
oledShowMessage("WiFi Setup");
bool res;
// res = wm.autoConnect(); // auto generated AP name from chipid
res = wm.autoConnect("FilaMan"); // anonymous ap
// res = wm.autoConnect("spoolman","password"); // password protected ap
if(!res) {
//bool res = wm.autoConnect("FilaMan"); // anonymous ap
if(!wm.autoConnect("FilaMan")) {
Serial.println("Failed to connect or hit timeout");
// ESP.restart();
oledShowTopRow();
@ -51,5 +81,49 @@ void initWiFi() {
oledShowTopRow();
display.display();
vTaskDelay(500 / portTICK_PERIOD_MS);
// mDNS
startMDNS();
}
}
void checkWiFiConnection() {
if (WiFi.status() != WL_CONNECTED)
{
Serial.println("WiFi connection lost. Reconnecting...");
wifiOn = false;
oledShowTopRow();
oledShowMessage("WiFi reconnecting");
WiFi.reconnect(); // Versuche, die Verbindung wiederherzustellen
vTaskDelay(5000 / portTICK_PERIOD_MS); // Warte 5 Sekunden, bevor erneut geprüft wird
if (WiFi.status() != WL_CONNECTED)
{
Serial.println("Failed to reconnect. Restarting WiFi...");
WiFi.disconnect();
Serial.println("WiFi disconnected.");
vTaskDelay(1000 / portTICK_PERIOD_MS);
wifiErrorCounter++;
//wifiSettings();
WiFi.reconnect();
Serial.println("WiFi reconnecting...");
WiFi.waitForConnectResult();
}
else
{
Serial.println("WiFi reconnected.");
wifiErrorCounter = 0;
wifiOn = true;
oledShowTopRow();
startMDNS();
}
}
if (wifiErrorCounter >= 5)
{
Serial.println("Too many WiFi errors. Restarting...");
ESP.restart();
}
}

View File

@ -4,5 +4,6 @@
#include <Arduino.h>
void initWiFi();
void checkWiFiConnection();
#endif