Compare commits

...

510 Commits

Author SHA1 Message Date
8a558c3121 refactor: remove unnecessary delay in MQTT setup and add delay before restart 2025-03-03 16:58:24 +01:00
5afb60df32 fix: correct typo in console log for total length 2025-03-02 20:21:27 +01:00
3394e6eb01 feat: add new 3D print file for Filaman scale 2025-03-02 08:06:59 +01:00
3818c2c059 refactor: remove redundant scale calibration checks and enhance task management 2025-03-01 18:50:20 +01:00
0afc543b5f refactor: enhance AMS data handling and streamline spool auto-setting logic 2025-03-01 18:44:35 +01:00
adee46e3fc refactor: adjust stack size and improve scale calibration logic 2025-03-01 18:44:29 +01:00
1db74867e6 refactor: update labels and input types for better clarity and functionality 2025-03-01 18:44:17 +01:00
0f24a63d32 added Discord Server 2025-03-01 15:33:39 +01:00
3640809502 update documentation for clarity and accuracy 2025-03-01 13:04:28 +01:00
289d5357be docs: update changelog and header for version v1.4.0
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-03-01 12:46:18 +01:00
315530d1ea update NFC tag references to include NTAG213 and clarify storage capacity 2025-03-01 12:45:55 +01:00
f36773a4c4 bump version to 1.4.0 2025-03-01 12:37:50 +01:00
b35163936f add support for Spoolman Octoprint Plugin in README files 2025-03-01 12:33:26 +01:00
7a2c9d6d17 add OctoPrint integration with configurable fields and update functionality 2025-03-01 12:18:33 +01:00
eb2a8dc128 add version comparison function and check for outdated versions before updates 2025-03-01 12:18:21 +01:00
bec2c91331 remove unused version and protocol fields from JSON output; add error message for insufficient memory 2025-03-01 10:42:06 +01:00
c6e727de06 remove unused version and protocol fields from NFC data packet 2025-03-01 10:41:51 +01:00
3253e7d407 sort vendors alphabetically in the dropdown list 2025-03-01 10:41:44 +01:00
bce2ad2ed8
Merge pull request #10 from janecker/nfc-improvements
Improves NFC Tag handling
2025-03-01 10:03:46 +01:00
Jan Philipp Ecker
0eff29ef4a Improves NFC Tag handling
Fixes memory underflow when reading tags. Reads tags with their actual data size and uses actual size instead of constnat value for tag size when writing a tag.
2025-02-28 22:35:34 +01:00
492bf6cdb8 docs: update changelog and header for version v1.3.99
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m39s
2025-02-28 18:35:16 +01:00
b0317f4001 docs: update platformio.ini for version v1.3.99 2025-02-28 18:35:16 +01:00
58ff6458b0 refactor: update workflows to build firmware with LittleFS instead of SPIFFS 2025-02-28 18:35:05 +01:00
d9c40f5124 docs: update changelog and header for version v1.3.98
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m51s
2025-02-28 18:12:56 +01:00
68bc31e29a docs: update platformio.ini for version v1.3.98 2025-02-28 18:12:56 +01:00
9b23ac5fd2 refactor: migrate from SPIFFS to LittleFS for file handling 2025-02-28 18:12:42 +01:00
d31bff14c3 chore: remove unused VSCode settings file 2025-02-28 09:29:34 +01:00
150f92484a refactor: remove commented-out spoolman and filaman data from api.cpp 2025-02-28 09:26:09 +01:00
fa74832fb9 docs: update changelog and header for version v1.3.97
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m47s
2025-02-28 08:53:08 +01:00
2eab3db77d docs: update platformio.ini for version v1.3.97 2025-02-28 08:53:07 +01:00
0a1bf22f7e feat: füge Bestätigungsmeldung für Spool-Einstellung hinzu 2025-02-27 22:07:47 +01:00
d58244c1f8 fix: Speichernutzung optimiert 2025-02-27 21:56:31 +01:00
db626ea516 fix: behebe doppelte http.end() Aufrufe in checkSpoolmanExtraFields 2025-02-27 21:54:47 +01:00
fd8f7685a1 fix: optimiere Verzögerungen und Stackgrößen in NFC-Task-Funktionen 2025-02-27 21:54:32 +01:00
944b156528 feat: verbessere WLAN-Konfiguration und füge mDNS-Unterstützung hinzu 2025-02-27 21:53:48 +01:00
76100593cc refactor: entferne ungenutzte Bibliotheken und Debug-Ausgaben aus main.cpp 2025-02-27 15:50:04 +01:00
732d590344 feat: aktualisiere OLED-Anzeige mit Versionsnummer und verbessere Textausrichtung 2025-02-27 14:35:53 +01:00
46cd953b80 feat: füge regelmäßige WLAN-Verbindungsüberprüfung hinzu 2025-02-27 09:38:54 +01:00
c645035bbe feat: aktualisiere Schaltplan-Bild 2025-02-26 18:29:37 +01:00
9e76620cd3 style: entferne text-shadow von deaktivierten Schaltflächen 2025-02-26 18:07:22 +01:00
faddda6201 feat: zeige Versionsnummer im OLED-Display an 2025-02-26 18:01:35 +01:00
de9c1706c0 docs: füge Link zum Wiki für detaillierte Informationen über die Nutzung hinzu 2025-02-25 20:19:04 +01:00
9f7ee13e78 docs: update changelog and header for version v1.3.96
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m1s
2025-02-25 16:29:37 +01:00
cf3f6f6741 docs: update platformio.ini for version v1.3.96 2025-02-25 16:29:37 +01:00
b87d43c64e feat: füge Unterstützung für Spoolman-Einstellungen hinzu und aktualisiere die Benutzeroberfläche 2025-02-25 16:24:22 +01:00
3d0411e3c1 feat: entferne die sendAmsData-Funktion aus der API-Schnittstelle 2025-02-25 14:52:47 +01:00
9c61b708aa fix: aktualisiere Bedingungen für die AMS-Datenaktualisierung und entferne unnötige Aufrufe 2025-02-25 14:52:27 +01:00
90f800d042 fix: aktualisiere Bedingung für den Fortschritt der OTA-Update-Nachricht 2025-02-25 12:19:24 +01:00
a7b1721e1d feat: erweitere Bambu-Credentials um AutoSend-Zeit und aktualisiere die Benutzeroberfläche 2025-02-25 12:17:20 +01:00
e4825d2905 feat: erweitere Bambu-Credentials mit AutoSend-Wartezeit und aktualisiere die Benutzeroberfläche 2025-02-25 11:32:57 +01:00
c1733848d3 feat: add espRestart function and replace delay with vTaskDelay for OTA update process 2025-02-25 11:02:54 +01:00
484c95523d feat: implement OTA update functionality with backup and restore for configurations 2025-02-25 10:57:49 +01:00
8499613215 fix: update auto set logic to check RFID tag before setting Bambu spool 2025-02-25 10:57:36 +01:00
08f37186b4 feat: add own_filaments.json and integrate custom filament loading in bambu.cpp 2025-02-25 09:02:11 +01:00
2948a35fa8 docs: update changelog and header for version v1.3.95
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-02-24 19:56:19 +01:00
730724fe58 docs: update webpages for version v1.3.95 2025-02-24 19:56:18 +01:00
714b7065e7 fix: bind autoSendToBambu variable to checkbox in spoolman.html 2025-02-24 19:56:01 +01:00
2d8aec515d docs: update changelog and header for version v1.3.94
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-24 19:47:24 +01:00
b245a206ce docs: update webpages for version v1.3.94 2025-02-24 19:47:24 +01:00
f1489e75cc fix: correct payload type check in NFC write event handling 2025-02-24 19:46:58 +01:00
d9ae829503 docs: update changelog and header for version v1.3.93
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m1s
2025-02-24 19:15:03 +01:00
2247b8ed6c docs: update webpages for version v1.3.93 2025-02-24 19:15:03 +01:00
d70b187bf9 feat: implement auto send feature for Bambu spool management and update related configurations 2025-02-24 19:14:51 +01:00
1ade007473 fix: remove debug output from splitTextIntoLines and update weight display logic in scanRfidTask 2025-02-24 19:14:45 +01:00
0af14e2f7d docs: add debug mode instructions for Spoolman in README 2025-02-24 19:14:28 +01:00
de67cdbff3 fix: enhance weight display logic for negative values 2025-02-24 12:28:18 +01:00
98fce15ccc refactor: simplify filament names in JSON configuration 2025-02-24 12:21:27 +01:00
ab417ba64b refactor: update findFilamentIdx to return structured result and improve type searching logic 2025-02-24 12:11:27 +01:00
320057bc49 docs: add wiring diagrams to README for PN532 I2C setup 2025-02-24 10:10:15 +01:00
9007a65fc2 docs: update README to reflect PN532 I2C configuration and remove SPI pin details 2025-02-24 09:36:28 +01:00
2214f5f5de fix: remove unnecessary CPU frequency configuration from setup function 2025-02-24 09:20:44 +01:00
5c5846c52c docs: update changelog and header for version v1.3.92
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m49s
2025-02-24 07:47:58 +01:00
517fa37a3d docs: update webpages for version v1.3.92 2025-02-24 07:47:58 +01:00
aaa7a6ee9c fix: configure CPU frequency settings in setup function only for testing 2025-02-24 07:47:50 +01:00
a0b8639488 fix: update comment to clarify NVS reading process 2025-02-23 21:29:38 +01:00
a16c05287e fix: adjust weight display logic to handle cases for weight less than 2 2025-02-23 21:23:46 +01:00
ecb35a97bd fix: update weight display logic to handle negative and specific weight cases 2025-02-23 21:22:50 +01:00
ba968611ec refactor: remove commented-out code in setBambuSpool function 2025-02-23 21:17:55 +01:00
6bd11ddce3 docs: update installation instructions and formatting in README files 2025-02-23 20:35:46 +01:00
3eb313e61a docs: update changelog and header for version v1.3.91
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m51s
2025-02-23 20:29:45 +01:00
aad35dc296 docs: update webpages for version v1.3.91 2025-02-23 20:29:45 +01:00
85ac636b1e feat: update GitHub Actions workflow for FTP firmware upload with improved credential checks 2025-02-23 20:29:40 +01:00
6f1804c3fe docs: update changelog and header for version v1.3.90
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m52s
2025-02-23 20:28:25 +01:00
89716920dc docs: update webpages for version v1.3.90 2025-02-23 20:28:25 +01:00
78b5078651 feat: update index.html for improved content structure and additional links 2025-02-23 20:27:38 +01:00
6098c3b052 feat: improve UI for Spoolman and Bambu Lab printer credentials, enhancing layout and styling 2025-02-23 20:23:09 +01:00
e7537f94d4 docs: update README files with HSPI default PINs and add ESP32 pin diagram 2025-02-23 20:12:35 +01:00
37717392d0 feat: implement scale calibration checks and update start_scale function to return calibration status 2025-02-23 16:44:43 +01:00
c6da28ad6f feat: add FTP upload functionality to GitHub release workflow and update installation instructions in README 2025-02-23 16:13:42 +01:00
d6e38a4e73 fix: remove debug secrets check from Gitea release workflow 2025-02-23 16:01:42 +01:00
4e0d9353c8 docs: update changelog and header for version v1.3.89
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m59s
2025-02-23 15:57:14 +01:00
7059826659 docs: update webpages for version v1.3.89 2025-02-23 15:57:13 +01:00
41faa8bb1c fix: update Gitea release workflow to use vars for FTP credentials 2025-02-23 15:57:09 +01:00
b38e3fa5ef docs: update changelog and header for version v1.3.88
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m48s
2025-02-23 15:49:37 +01:00
5280d7e341 docs: update webpages for version v1.3.88 2025-02-23 15:49:37 +01:00
2f95c66d39 fix: update Gitea release workflow to use secrets for FTP credentials 2025-02-23 15:49:33 +01:00
df1b87465c docs: update changelog and header for version v1.3.87
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m43s
2025-02-23 15:39:09 +01:00
84f1420999 docs: update webpages for version v1.3.87 2025-02-23 15:39:09 +01:00
b14dd5475d fix: enhance FTP upload workflow with credential checks and version output 2025-02-23 15:38:59 +01:00
975845421b docs: update changelog and header for version v1.3.86
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m55s
2025-02-23 15:29:14 +01:00
044ddbe0eb docs: update webpages for version v1.3.86 2025-02-23 15:29:14 +01:00
c385544d67 fix: streamline FTP credentials usage in Gitea release workflow 2025-02-23 15:29:10 +01:00
c6cfd85687 docs: update changelog and header for version v1.3.85
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m41s
2025-02-23 15:20:13 +01:00
84632322e2 docs: update webpages for version v1.3.85 2025-02-23 15:20:13 +01:00
86e55a8696 fix: add FTP_USER variable for Gitea release workflow 2025-02-23 15:20:09 +01:00
d2b40daaca docs: update changelog and header for version v1.3.84
Some checks failed
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m47s
2025-02-23 15:13:49 +01:00
9d58cbc31c docs: update webpages for version v1.3.84 2025-02-23 15:13:48 +01:00
d09aeaf47c fix: add FTP_HOST variable for firmware upload in Gitea release workflow 2025-02-23 15:13:45 +01:00
9fb82fe51e docs: update changelog and header for version v1.3.83
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m57s
2025-02-23 15:07:40 +01:00
5e0e2c5f6b docs: update webpages for version v1.3.83 2025-02-23 15:07:40 +01:00
a8460503ff fix: correct variable interpolation for FTP credentials in Gitea release workflow 2025-02-23 15:07:35 +01:00
6700a1761f docs: update changelog and header for version v1.3.82
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m51s
2025-02-23 14:59:03 +01:00
7207f36e06 docs: update webpages for version v1.3.82 2025-02-23 14:59:03 +01:00
e79bee3381 feat: update Gitea release workflow to use variable interpolation for FTP credentials 2025-02-23 14:58:57 +01:00
c3918f075b docs: update changelog and header for version v1.3.81
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m41s
2025-02-23 14:53:03 +01:00
0c384219c5 docs: update webpages for version v1.3.81 2025-02-23 14:53:03 +01:00
42b9daf4be feat: update Gitea release workflow to use environment variables for FTP credentials and version 2025-02-23 14:53:00 +01:00
13a771682f docs: update changelog and header for version v1.3.80
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m39s
2025-02-23 14:43:41 +01:00
f79f87bf09 docs: update webpages for version v1.3.80 2025-02-23 14:43:41 +01:00
9fe3f6c0ff feat: add FTP_USER and FTP_PASSWORD secrets for firmware upload in Gitea release workflow 2025-02-23 14:43:36 +01:00
55e89948bb docs: update changelog and header for version v1.3.79
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m47s
2025-02-23 14:36:19 +01:00
6c5e8c4d07 docs: update webpages for version v1.3.79 2025-02-23 14:36:19 +01:00
4f79700d74 feat: add FTP_USER input for firmware upload in Gitea release workflow 2025-02-23 14:36:14 +01:00
1b4fecf409 docs: update changelog and header for version v1.3.78
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m7s
2025-02-23 12:35:14 +01:00
89a6101d97 docs: update webpages for version v1.3.78 2025-02-23 12:35:14 +01:00
ee45a74fee fix: change FTP protocol from FTPS to FTP for file upload in workflow 2025-02-23 12:35:09 +01:00
db365aba3c docs: update changelog and header for version v1.3.77
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m49s
2025-02-23 12:05:24 +01:00
63cdfaee6c docs: update webpages for version v1.3.77 2025-02-23 12:05:24 +01:00
eb2e360c35 fix: replace ncftp with lftp for secure firmware upload 2025-02-23 12:05:19 +01:00
7d578640e2 docs: update changelog and header for version v1.3.76
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m44s
2025-02-23 12:00:38 +01:00
b006533a91 docs: update webpages for version v1.3.76 2025-02-23 12:00:37 +01:00
9fa7526623 fix: replace FTP action with curl for secure firmware upload and install ncftp 2025-02-23 12:00:33 +01:00
dfbb2fbd9b docs: update changelog and header for version v1.3.75
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m40s
2025-02-23 11:55:20 +01:00
0302158449 docs: update webpages for version v1.3.75 2025-02-23 11:55:20 +01:00
68c385f9d7 fix: update FTP user and enhance SSL options in gitea-release workflow 2025-02-23 11:55:11 +01:00
9a8bd58cb3 docs: update changelog and header for version v1.3.74
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-23 11:39:55 +01:00
0d8b8918c1 docs: update webpages for version v1.3.74 2025-02-23 11:39:54 +01:00
a892b854b5 fix: update password syntax in gitea-release workflow 2025-02-23 11:39:51 +01:00
0f02f6c848 docs: update changelog and header for version v1.3.73
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m18s
2025-02-23 11:34:36 +01:00
96c054827e docs: update webpages for version v1.3.73 2025-02-23 11:34:36 +01:00
f93eedf775 chore: update version to 1.3.72 in platformio.ini 2025-02-23 11:34:32 +01:00
68a10dfeb2 docs: update changelog and header for version v1.3.72
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-23 11:17:17 +01:00
632b7a089e docs: update webpages for version v1.3.72 2025-02-23 11:17:17 +01:00
c0e3650bf4 fix: update FTP options for Gitea release workflow 2025-02-23 11:17:13 +01:00
8e3dfc93f7 docs: update changelog and header for version v1.3.71
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m37s
2025-02-23 11:09:57 +01:00
5016285dce docs: update webpages for version v1.3.71 2025-02-23 11:09:57 +01:00
9b1a232fde feat: add FTP upload step for firmware in Gitea release workflow 2025-02-23 11:09:49 +01:00
37e79b7a49 docs: update changelog and header for version v1.3.70
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-23 09:58:18 +01:00
6bd23f31c1 docs: update webpages for version v1.3.70 2025-02-23 09:58:17 +01:00
3099e9ded9 docs: update changelog and header for version v1.3.69
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-23 09:54:00 +01:00
4952ad3150 docs: update webpages for version v1.3.69 2025-02-23 09:54:00 +01:00
2055da9962 fix: update release note generation to use the second latest tag 2025-02-23 09:53:55 +01:00
459a31cad3 docs: update changelog and header for version v1.3.68
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m3s
2025-02-23 09:48:22 +01:00
4b1930209b docs: update webpages for version v1.3.68 2025-02-23 09:48:21 +01:00
7dde07b5ab fix: update release note generation to include commit hash and author 2025-02-23 09:48:15 +01:00
33a5406248 fix: remove commented test line from platformio.ini 2025-02-23 09:47:18 +01:00
b016a31ff0 docs: update changelog and header for version v1.3.67
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-23 09:18:57 +01:00
19bc4927e4 docs: update webpages for version v1.3.67 2025-02-23 09:18:57 +01:00
cd55cb86ba ci: update release note generation to use the latest tag 2025-02-23 09:18:52 +01:00
8ab16b351b docs: update changelog and header for version v1.3.66
All checks were successful
Release Workflow / detect-provider (push) Successful in 6s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m52s
2025-02-23 09:02:44 +01:00
400a37d3ac docs: update webpages for version v1.3.66 2025-02-23 09:02:44 +01:00
eb4f809435 ci: remove redundant git fetch for tags in release note generation 2025-02-23 09:02:40 +01:00
1148947b8e docs: update changelog and header for version v1.3.65
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m18s
2025-02-22 20:49:15 +01:00
3b01336999 docs: update webpages for version v1.3.65 2025-02-22 20:49:15 +01:00
44614b58dc ci: improve release note generation by fetching tags and sorting unique commits 2025-02-22 20:49:10 +01:00
ed8d618272 docs: update changelog and header for version v1.3.64
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m6s
2025-02-22 20:37:49 +01:00
cd2ac54e98 docs: update webpages for version v1.3.64 2025-02-22 20:37:49 +01:00
92f675b24c style: remove unnecessary closing tags from header.html 2025-02-22 20:37:46 +01:00
c342877558 docs: update changelog and header for version v1.3.63
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-22 20:33:55 +01:00
f5743cbd7b docs: update webpages for version v1.3.63 2025-02-22 20:33:55 +01:00
8a62597705 style: update release note generation for initial release handling 2025-02-22 20:33:51 +01:00
374721d1e5 style: update update-form background and add glass border effect 2025-02-22 20:30:30 +01:00
ea6f708c6e docs: update changelog and header for version v1.3.62
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m37s
2025-02-22 20:25:08 +01:00
78169dfdb1 docs: update webpages for version v1.3.62 2025-02-22 20:25:08 +01:00
074bfb658d style: update background colors and improve layout for update sections 2025-02-22 20:25:00 +01:00
989076e794 docs: update changelog and header for version v1.3.61
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m47s
2025-02-22 20:14:35 +01:00
aa0d056d10 docs: update webpages for version v1.3.61 2025-02-22 20:14:35 +01:00
cd619b8f2a feat: update release notes generation to use previous tag for changes 2025-02-22 20:13:15 +01:00
6d8358cbb9 docs: update changelog and header for version v1.3.60
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m48s
2025-02-22 20:06:25 +01:00
1f3a67634f docs: update webpages for version v1.3.60 2025-02-22 20:06:25 +01:00
09969b644e feat: remove automatic git push from changelog update script 2025-02-22 20:06:20 +01:00
deb7abd102 feat: implement release notes generation with categorized changes since last tag 2025-02-22 20:00:42 +01:00
1b059c35f1 docs: update changelog for version 1.3.59
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-22 19:57:13 +01:00
e098d71f6f docs: update webpages for version v1.3.59 2025-02-22 19:57:13 +01:00
4b25b72b2e feat: implement enhanced update progress handling and WebSocket notifications 2025-02-22 19:50:12 +01:00
5c59016f94 feat: improve update progress reporting and enhance WebSocket notifications 2025-02-22 18:49:45 +01:00
d2da501b94 feat: enhance update progress handling and add WebSocket closure notification 2025-02-22 18:19:21 +01:00
4135073623 feat: implement WebSocket for update progress and enhance update response handling 2025-02-22 18:12:27 +01:00
fe7b57fe0e docs: update changelog for version 1.3.58
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m25s
2025-02-22 17:59:59 +01:00
c1ae6b7295 docs: update webpages for version v1.3.58 2025-02-22 17:59:59 +01:00
9eee89fac7 feat: implement backup and restore functionality for Bambu credentials and Spoolman URL 2025-02-22 17:58:20 +01:00
8c5e7e26ac docs: update upgrade page message and improve progress display logic 2025-02-22 17:53:51 +01:00
7b52066378 docs: update changelog for version 1.3.57
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m59s
2025-02-22 17:36:09 +01:00
d5afa38ded docs: update webpages for version v1.3.57 2025-02-22 17:36:09 +01:00
cf50baba2d docs: update header title to 'Filament Management Tool' in multiple HTML files 2025-02-22 17:36:02 +01:00
aa9e7da94b docs: update changelog for version 1.3.56
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-22 17:31:33 +01:00
71cd3ba4fc docs: update webpages for version v1.3.56 2025-02-22 17:31:33 +01:00
73e240e879 docs: update header title and improve SPIFFS update error handling 2025-02-22 17:31:28 +01:00
0d34e1d718 docs: clarify comments in Gitea and GitHub release workflows 2025-02-22 17:18:11 +01:00
84cc8beb9b docs: update changelog for version 1.3.55
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m38s
2025-02-22 17:00:00 +01:00
fd70e3179d docs: update webpages for version v1.3.55 2025-02-22 16:59:59 +01:00
c553640ad8 docs: update component descriptions in README files 2025-02-22 16:59:56 +01:00
807eca3c43 docs: update changelog for version 1.3.54
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m12s
2025-02-22 16:47:52 +01:00
b52730bf67 docs: update webpages for version v1.3.54 2025-02-22 16:47:52 +01:00
9a59b91e88 workflow: update SPIFFS binary creation to exclude header 2025-02-22 16:47:27 +01:00
a5af4013d8 docs: update changelog for version 1.3.53
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m34s
2025-02-22 16:22:30 +01:00
e54ce58ec4 version: update to version 1.3.53 2025-02-22 16:22:27 +01:00
142eafd232 docs: update changelog for version 1.3.51 2025-02-22 16:22:04 +01:00
63ab9e0993 docs: update changelog for version 1.3.51 2025-02-22 16:21:54 +01:00
aaa5506d40 workflow: update SPIFFS binary magic byte and revert version to 1.3.51 2025-02-22 16:21:51 +01:00
8037adc045 docs: update changelog for version 1.3.52
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m20s
2025-02-22 16:09:46 +01:00
6e7c728cd8 docs: update webpages for version v1.3.52 2025-02-22 16:09:46 +01:00
3fe8271344 workflow: update SPIFFS binary creation to use correct chip revision (0xEB for Rev 3) 2025-02-22 16:09:41 +01:00
f2bc6eab92 docs: update changelog for version 1.3.51
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m35s
2025-02-22 15:50:55 +01:00
37df492339 docs: update webpages for version v1.3.51 2025-02-22 15:50:54 +01:00
c4b425403f config: update platformio.ini to specify correct chip revision and remove unused dependencies 2025-02-22 15:50:49 +01:00
73244689dd docs: update changelog for version 1.3.50
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m31s
2025-02-22 15:16:53 +01:00
27296104d2 docs: update webpages for version v1.3.50 2025-02-22 15:16:53 +01:00
5f99773897 docs: update changelog for version 1.3.49
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m43s
2025-02-22 14:54:23 +01:00
7416285fb9 docs: update webpages for version v1.3.49 2025-02-22 14:54:23 +01:00
85928e358d workflow: update SPIFFS binary header to use correct chip revision 2025-02-22 14:54:19 +01:00
092b4fd8ec docs: update changelog for version 1.3.48
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m52s
2025-02-22 14:31:23 +01:00
399645a2b3 docs: update webpages for version v1.3.48 2025-02-22 14:31:23 +01:00
164bb241b7 workflow: update SPIFFS binary header for firmware release 2025-02-22 14:29:33 +01:00
e564c6eeae docs: update changelog for version 1.3.47
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-22 14:04:37 +01:00
4288dd0cd4 docs: update webpages for version v1.3.47 2025-02-22 14:04:37 +01:00
37d43b2d7d workflow: optimize firmware and SPIFFS update process, improve progress handling and logging 2025-02-22 14:04:34 +01:00
adb354ddcd docs: update changelog for version 1.3.46
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m39s
2025-02-22 13:55:42 +01:00
15d5e5edce docs: update webpages for version v1.3.46 2025-02-22 13:55:42 +01:00
c6edf30245 docs: update changelog for version 1.3.45
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m44s
2025-02-22 12:21:35 +01:00
65ac207f36 docs: update webpages for version v1.3.45 2025-02-22 12:21:35 +01:00
698abbd669 workflow: update SPIFFS binary creation to include minimal header and adjust update validation logic 2025-02-22 12:21:33 +01:00
04a7c2cce3 docs: update changelog for version 1.3.44
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m48s
2025-02-22 12:11:30 +01:00
78f54b72fd docs: update webpages for version v1.3.44 2025-02-22 12:11:30 +01:00
f4eee9af91 docs: update header title to 'Hollo Lollo Trollo' 2025-02-22 12:11:26 +01:00
cad14b3bc2 docs: update header title to 'Filament Management Tool' and improve update response messages 2025-02-22 12:10:57 +01:00
312f75fc5f docs: update changelog for version 1.3.43
All checks were successful
Release Workflow / detect-provider (push) Successful in 7s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m28s
2025-02-22 12:03:29 +01:00
b8714e93e2 docs: update webpages for version v1.3.43 2025-02-22 12:03:28 +01:00
cd9da0fe4f docs: update header title to 'Hollo Lollo Trollo' 2025-02-22 12:03:25 +01:00
2b620ef5ed docs: update changelog for version 1.3.42
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m27s
2025-02-22 11:52:21 +01:00
3f63a01b8b docs: update webpages for version v1.3.42 2025-02-22 11:52:21 +01:00
22bb16b6a4 fix: correct path for SPIFFS binary creation in Gitea release workflow 2025-02-22 11:52:19 +01:00
53ceee7816 docs: update changelog for version 1.3.41
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m32s
2025-02-22 11:48:12 +01:00
d48b002806 docs: update webpages for version v1.3.41 2025-02-22 11:48:12 +01:00
dd905b6c6e fix: remove redundant buffer size setting in NFC initialization 2025-02-22 11:47:35 +01:00
77b9eda110 fix: update SPIFFS binary creation and enhance NFC buffer size 2025-02-22 11:46:17 +01:00
32a6e9dcd3 docs: update changelog for version 1.3.40
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m30s
2025-02-22 11:31:19 +01:00
6cd5539e60 docs: update webpages for version v1.3.40 2025-02-22 11:31:19 +01:00
903b697912 fix: update SPIFFS binary header and enhance WebSocket error handling 2025-02-22 11:31:15 +01:00
72c2fb70c2 docs: update changelog for version 1.3.39
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m44s
2025-02-22 11:26:27 +01:00
f2f3f0ab9f docs: update webpages for version v1.3.39 2025-02-22 11:26:27 +01:00
c07692c218 workflow: update SPIFFS binary creation to set chip version to max supported 2025-02-22 11:26:24 +01:00
a184903b66 docs: update changelog for version 1.3.38
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m58s
2025-02-22 11:21:08 +01:00
af1640383d docs: update webpages for version v1.3.38 2025-02-22 11:21:08 +01:00
c00e54b145 workflow: update SPIFFS binary creation with minimal ESP32 image header 2025-02-22 11:20:41 +01:00
f6c92c686b docs: update changelog for version 1.3.37
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m46s
2025-02-22 11:13:40 +01:00
b8db01529b docs: update webpages for version v1.3.37 2025-02-22 11:13:40 +01:00
55db6d76ab workflow: update ESP32-WROOM image header for SPIFFS binary creation 2025-02-22 11:13:07 +01:00
a18749a1ff docs: update changelog for version 1.3.36
All checks were successful
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-22 10:54:13 +01:00
1811fd9159 docs: update webpages for version v1.3.36 2025-02-22 10:54:13 +01:00
b550760427 partition: update SPIFFS binary header and offsets in workflow files 2025-02-22 10:53:50 +01:00
c5033acadc docs: update changelog for version 1.3.35
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m50s
2025-02-22 10:48:53 +01:00
7de4189c83 docs: update webpages for version v1.3.35 2025-02-22 10:48:53 +01:00
f43f2a15b2 partition: update SPIFFS binary header and offsets in workflow files 2025-02-22 10:48:44 +01:00
858192c6cb docs: update changelog for version 1.3.34
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m46s
2025-02-22 10:40:03 +01:00
e2bd39922d docs: update webpages for version v1.3.34 2025-02-22 10:40:03 +01:00
c86cc7173e partition: update SPIFFS binary creation and offsets in workflow files 2025-02-22 10:38:34 +01:00
16362e66a3 docs: update changelog for version 1.3.33
All checks were successful
Release Workflow / detect-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m46s
2025-02-22 10:09:03 +01:00
48d9ba8f71 docs: update webpages for version v1.3.33 2025-02-22 10:09:03 +01:00
e2bea5a0c3 partition: update spiffs offset and app sizes in partition files 2025-02-22 10:08:47 +01:00
3e11f65188 partition: update spiffs offset in partition files 2025-02-22 10:06:43 +01:00
df59c42c8a partition: update app sizes and offsets in partitions.csv 2025-02-22 10:04:11 +01:00
abe1d7c930 docs: update changelog for version 1.3.32
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m31s
2025-02-22 09:52:44 +01:00
ca614c3cc4 docs: update webpages for version v1.3.32 2025-02-22 09:52:44 +01:00
5153374093 workflow: update magic byte for SPIFFS binary creation 2025-02-22 09:52:39 +01:00
66db4d7a85 docs: update changelog for version 1.3.31
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m58s
2025-02-22 09:47:27 +01:00
90e71922b1 docs: update webpages for version v1.3.31 2025-02-22 09:47:27 +01:00
e8e5c0bd3d workflow: remove unnecessary data and SPIFFS change checks from release workflows 2025-02-22 09:47:15 +01:00
7e53e1ccb0 docs: update changelog for version 1.3.30
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-22 09:38:14 +01:00
e49e812b13 docs: update webpages for version v1.3.30 2025-02-22 09:38:14 +01:00
b1e0fcfadf workflow: update Gitea and GitHub release workflows to create SPIFFS binary with magic byte 2025-02-22 09:37:59 +01:00
31ef3ac8df docs: update changelog for version 1.3.29
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-02-21 23:44:36 +01:00
8cf3f87c89 docs: update webpages for version v1.3.29 2025-02-21 23:44:35 +01:00
c446188311 workflow: update Gitea release workflow to create release before file uploads 2025-02-21 23:44:30 +01:00
8e2a8d597d docs: update changelog for version 1.3.28
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m25s
2025-02-21 23:37:15 +01:00
7d3b1c34f6 docs: update webpages for version v1.3.28 2025-02-21 23:37:15 +01:00
b95c61118b workflow: update Gitea release workflow to use file uploads with curl 2025-02-21 23:37:12 +01:00
0dfb158959 docs: update changelog for version 1.3.27
Some checks failed
Release Workflow / detect-provider (push) Successful in 7s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m29s
2025-02-21 23:23:19 +01:00
75c774bb24 docs: update webpages for version v1.3.27 2025-02-21 23:23:19 +01:00
cf80adb43c workflow: add GITEA_TOKEN secret for Gitea API access in release workflows 2025-02-21 23:23:16 +01:00
36d50cbe7f docs: update changelog for version 1.3.26
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m51s
2025-02-21 23:14:34 +01:00
9148d207c7 docs: update webpages for version v1.3.26 2025-02-21 23:14:34 +01:00
5f6fef9448 workflow: improve Gitea release workflow with enhanced error handling and debug outputs 2025-02-21 23:14:30 +01:00
946202de0e docs: update changelog for version 1.3.25
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m33s
2025-02-21 23:07:41 +01:00
41a3717347 docs: update webpages for version v1.3.25 2025-02-21 23:07:41 +01:00
255c820439 workflow: update Gitea release workflow to include RUNNER_NAME and improve error handling 2025-02-21 23:07:38 +01:00
aef3ba77ba docs: update changelog for version 1.3.24
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m33s
2025-02-21 22:59:00 +01:00
2592c3a497 docs: update webpages for version v1.3.24 2025-02-21 22:59:00 +01:00
a48c5dfef0 workflow: rename update files to upgrade in GitHub release workflow 2025-02-21 22:58:54 +01:00
00554d0b09 workflow: aktualisiere bestehende Einträge im Changelog für vorhandene Versionen 2025-02-21 22:58:38 +01:00
05a91cd8d8 workflow: improve Gitea release process with dynamic URL determination and debug outputs 2025-02-21 22:58:24 +01:00
7cf113eaff docs: update changelog for version 1.3.23
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m57s
2025-02-21 22:48:04 +01:00
44d27adab2 docs: update webpages for version v1.3.23 2025-02-21 22:48:04 +01:00
e0a2dff5fe workflow: enhance Gitea release process with debug outputs and API connection checks 2025-02-21 22:47:30 +01:00
519a089684 docs: update changelog for version 1.3.22
Some checks failed
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m27s
2025-02-21 22:39:28 +01:00
ef053bb2b6 docs: update webpages for version v1.3.22 2025-02-21 22:39:28 +01:00
0a91c7b269 workflow: improve Gitea release process with additional environment variables and error handling 2025-02-21 22:39:24 +01:00
875d9d2b70 docs: update changelog for version 1.3.21
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m53s
2025-02-21 22:34:28 +01:00
52840b9b0b docs: update webpages for version v1.3.21 2025-02-21 22:34:28 +01:00
da1fc7678f workflow: enhance Gitea release process with API integration and token management 2025-02-21 22:34:18 +01:00
982bb5aa21 docs: update changelog for version 1.3.20
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m44s
2025-02-21 22:22:10 +01:00
007737db13 docs: update webpages for version v1.3.20 2025-02-21 22:22:10 +01:00
17e5949201 workflow: enable git tagging and pushing for Gitea releases 2025-02-21 22:22:06 +01:00
6a57186091 docs: update changelog for version 1.3.19
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m42s
2025-02-21 22:17:32 +01:00
babd3f47a0 docs: update webpages for version v1.3.19 2025-02-21 22:17:32 +01:00
5372fe10fe workflow: enable git push for version tagging in Gitea release 2025-02-21 22:17:22 +01:00
e0c9d90892 docs: update changelog for version 1.3.18
All checks were successful
Release Workflow / detect-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m49s
2025-02-21 22:11:13 +01:00
e5f5d1961b docs: update webpages for version v1.3.18 2025-02-21 22:11:13 +01:00
31a960fb9e docs: add note about filaman_full.bin installation in changelog 2025-02-21 22:11:07 +01:00
3c2e75b77a docs: update changelog for version 1.3.18 and enhance update script for existing entries 2025-02-21 22:10:32 +01:00
367143c456 docs: update changelog for version 1.3.17
All checks were successful
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m39s
2025-02-21 21:54:28 +01:00
fbde4b764f docs: update webpages for version v1.3.17 2025-02-21 21:54:28 +01:00
e57f4216d4 ci: comment out git tag and push commands in gitea-release workflow 2025-02-21 21:54:24 +01:00
b8beb992d6 config: update platformio.ini for version 1.3.16
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m32s
2025-02-21 21:49:48 +01:00
4234b2254e docs: update changelog for version 1.3.16 2025-02-21 21:44:40 +01:00
b8faf79163 docs: update webpages for version v1.3.16 2025-02-21 21:44:40 +01:00
d35afaff46 ci: update filenames for firmware and website binaries in release workflows 2025-02-21 21:44:33 +01:00
a8a00372b5 docs: update changelog for version 1.3.15 2025-02-21 21:34:36 +01:00
72f4eab588 docs: update webpages for version v1.3.15 2025-02-21 21:34:36 +01:00
afa4eddc00 ci: fix missing 'fi' in GitHub release workflow script 2025-02-21 21:34:32 +01:00
b0888e7e63 docs: update changelog for version 1.3.14
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m32s
2025-02-21 21:25:29 +01:00
238a84a8a2 docs: update webpages for version v1.3.14 2025-02-21 21:25:29 +01:00
59cc00ca13 ci: update GitHub release workflow to improve file upload handling 2025-02-21 21:25:16 +01:00
ab083f5f57 docs: update changelog for version 1.3.13
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m39s
2025-02-21 21:17:06 +01:00
c111573206 docs: update webpages for version v1.3.13 2025-02-21 21:17:06 +01:00
52b2494e52 ci: update GitHub release workflow to use RELEASE_TOKEN for improved security 2025-02-21 21:17:02 +01:00
069ec2d7a1 docs: update changelog for version 1.3.12
Some checks failed
Release Workflow / detect-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m45s
2025-02-21 21:13:40 +01:00
94e35ae86e docs: update webpages for version v1.3.12 2025-02-21 21:13:40 +01:00
d71e3d8184 ci: enhance GitHub release workflow with token handling and file upload improvements 2025-02-21 21:13:36 +01:00
bb166aa29f docs: update changelog for version 1.3.11
Some checks failed
Release Workflow / detect-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m55s
2025-02-21 21:07:19 +01:00
0d718023f8 docs: update webpages for version v1.3.11 2025-02-21 21:07:19 +01:00
b16781043f ci: refactor Gitea release workflow by simplifying input handling and removing unnecessary checks 2025-02-21 21:07:15 +01:00
dff184ff25 docs: update changelog for version 1.3.10
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-21 21:03:56 +01:00
0ce281221d docs: update webpages for version v1.3.10 2025-02-21 21:03:56 +01:00
bc26c160e8 ci: simplify GitHub release workflow by removing provider verification step 2025-02-21 21:03:52 +01:00
c25f41db75 docs: update changelog for version 1.3.9
All checks were successful
Release Workflow / route (push) Successful in 16s
Release Workflow / verify-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m51s
2025-02-21 21:00:21 +01:00
e107c17f50 docs: update webpages for version v1.3.9 2025-02-21 21:00:21 +01:00
85b9d03ebd ci: comment out permissions for GitHub release workflow 2025-02-21 20:59:37 +01:00
17b188626a docs: update changelog for version 1.3.8
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m35s
2025-02-21 20:56:07 +01:00
a534c5f872 docs: update webpages for version v1.3.8 2025-02-21 20:56:07 +01:00
93f7582790 feat: add Gitea and GitHub release workflows 2025-02-21 20:56:02 +01:00
46acc63756 docs: update changelog for version 1.3.7
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-21 20:54:16 +01:00
67a9e1bdce docs: update webpages for version v1.3.7 2025-02-21 20:54:16 +01:00
2b75b64b4a feat: add GitHub and Gitea release workflows 2025-02-21 20:54:11 +01:00
8d003295e7 docs: update changelog for version 1.3.6
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-21 20:45:33 +01:00
f89500946a docs: update webpages for version v1.3.6 2025-02-21 20:45:33 +01:00
14e745ff06 fix: update GitHub token reference and correct file path in release workflow 2025-02-21 20:45:25 +01:00
d058397fa2 docs: update changelog for version 1.3.5
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m24s
2025-02-21 20:22:03 +01:00
622f5403a7 docs: update webpages for version v1.3.5 2025-02-21 20:22:03 +01:00
92b78a86dd feat: enhance release workflow to support Gitea alongside GitHub 2025-02-21 20:21:57 +01:00
ec399390e8 docs: update changelog for version 1.3.4
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m22s
2025-02-21 20:04:05 +01:00
909c4e9b5e docs: update webpages for version v1.3.4 2025-02-21 20:04:05 +01:00
f4b20bfffd Merge branch 'old' 2025-02-21 20:03:45 +01:00
78464215a9 feat: add Gitea and GitHub release workflows 2025-02-21 20:03:21 +01:00
4365f0463a docs: update changelog for version 1.3.3
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m42s
2025-02-21 19:48:48 +01:00
727bc0e760 docs: update webpages for version v1.3.3 2025-02-21 19:48:48 +01:00
04604013eb fix: correct directory path in GitHub workflows for SPIFFS binary 2025-02-21 19:48:45 +01:00
cf5fc5f6f1 docs: update changelog for version 1.3.2
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m35s
2025-02-21 19:33:26 +01:00
945a4ccce6 docs: update webpages for version v1.3.2 2025-02-21 19:33:26 +01:00
7cf9e2d145 fix: add missing conditional exit in release workflow 2025-02-21 19:33:24 +01:00
9db4e338ea docs: update changelog for version 1.3.1
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Failing after 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been skipped
2025-02-21 19:10:52 +01:00
dea6ca2c66 docs: update webpages for version v1.3.1 2025-02-21 19:10:52 +01:00
e224e72e41 feat: enhance GitHub and Gitea release workflows with Python setup and binary preparation 2025-02-21 19:10:48 +01:00
306c517da7 docs: update changelog for version 1.3.0
All checks were successful
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m40s
2025-02-21 18:58:35 +01:00
0337bbabe0 bump version to 1.3.0 in platformio.ini 2025-02-21 18:58:32 +01:00
bde14e50e0 docs: update changelog for version 1.2.102
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m1s
2025-02-21 18:28:20 +01:00
9c656a9bd0 docs: update webpages for version v1.2.102 2025-02-21 18:28:20 +01:00
eae552017d fix: adjust bootloader offset in binary merge for Gitea and GitHub workflows 2025-02-21 18:28:16 +01:00
a77918da41 docs: update changelog for version 1.2.101
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m38s
2025-02-21 18:07:47 +01:00
262dad38a6 docs: update webpages for version v1.2.101 2025-02-21 18:07:46 +01:00
cfc9f103cf refactor: always create SPIFFS binary in release workflows 2025-02-21 18:07:43 +01:00
0117302672 refactor: migrate calibration value storage from EEPROM to NVS 2025-02-21 18:04:05 +01:00
1de283b62f docs: update changelog for version 1.2.100
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-21 17:49:32 +01:00
f1eb78eb38 docs: update webpages for version v1.2.100 2025-02-21 17:49:32 +01:00
8a65b86475 refactor: remove OTA handling and JSON backup/restore functions 2025-02-21 17:47:25 +01:00
a3aef819c8 docs: update changelog for version 1.2.99
Some checks failed
Release Workflow / route (push) Successful in 8s
Release Workflow / verify-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-21 17:42:40 +01:00
a62b5ec933 docs: update webpages for version v1.2.99 2025-02-21 17:42:40 +01:00
1a8cf7a58f docs: add SPIFFS change detection and binary copying to release workflows 2025-02-21 17:42:04 +01:00
b0b3d41c84 docs: add backup and restore functions for JSON configurations during OTA updates 2025-02-21 17:38:20 +01:00
38b68aecfc docs: update JSON field type checks from JsonObject to String for improved validation 2025-02-21 17:11:48 +01:00
4992f5f433 docs: update JSON handling in API and Bambu modules for improved object management 2025-02-21 16:53:45 +01:00
5cbbe1d231 docs: update platformio.ini dependencies and improve version handling in website.cpp 2025-02-21 16:35:19 +01:00
9b29460d64 docs: update Cache-Control header to reflect a 1-week duration 2025-02-21 15:15:01 +01:00
dd14d475b7 docs: remove version definition from website.cpp 2025-02-21 15:14:36 +01:00
9e6cd3b451 docs: optimize WiFi and WebSocket settings; enhance TCP/IP stack configuration 2025-02-21 15:07:16 +01:00
c1be6ca582 docs: update upgrade page title and heading; adjust cache control duration 2025-02-21 14:35:12 +01:00
265ff0c787 docs: update changelog for version 1.2.98
All checks were successful
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m28s
2025-02-21 13:52:27 +01:00
67eca82ac5 docs: update webpages for version v1.2.98 2025-02-21 13:52:27 +01:00
568db90db0 docs: update changelog for version 1.2.97
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m37s
2025-02-21 12:34:25 +01:00
2dfd53d64a docs: update webpages for version v1.2.97 2025-02-21 12:34:25 +01:00
262a2fcbd4 refactor: streamline Gitea and GitHub release workflows to check for data changes and update binary handling 2025-02-21 12:34:20 +01:00
3770de15d3 docs: update changelog for version 1.2.96
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m45s
2025-02-21 12:09:40 +01:00
75a74ec9bd docs: update webpages for version v1.2.96 2025-02-21 12:09:40 +01:00
979adcbb14 feat: add SPIFFS build step to Gitea and GitHub release workflows 2025-02-21 12:09:36 +01:00
2dd563a178 Merge branch 'temp-branch'
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m3s
2025-02-21 12:03:12 +01:00
767c217c25 docs: update changelog for version 1.2.95 2025-02-21 11:58:00 +01:00
c07689e15a docs: update webpages for version v1.2.95 2025-02-21 11:58:00 +01:00
d6ca69fd19 chore: bump version to 1.2.94 in platformio.ini 2025-02-21 11:57:57 +01:00
60553255b8 feat: enhance update process with separate forms for firmware and webpage uploads, including validation and improved UI 2025-02-21 11:57:19 +01:00
8199b283c0 feat: add API endpoint for version retrieval and update HTML to display dynamic version 2025-02-21 11:53:59 +01:00
d774ce0d09 docs: update changelog for version 1.2.94
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m56s
2025-02-21 10:46:05 +01:00
4a44eda5c4 docs: update webpages for version v1.2.94 2025-02-21 10:46:05 +01:00
c43ca20d8d refactor: remove unused flash size parameters in release workflows 2025-02-21 10:46:00 +01:00
21ba35cd19 docs: update changelog for version 1.2.93
All checks were successful
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m47s
2025-02-21 10:35:58 +01:00
62273320e9 docs: update webpages for version v1.2.93 2025-02-21 10:35:58 +01:00
b8e4af4e4d refactor: update SPIFFS initialization and partition sizes; enhance WiFi setup 2025-02-21 10:35:52 +01:00
513d74fdb0 docs: update changelog for version 1.2.92
All checks were successful
Release Workflow / route (push) Successful in 8s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m43s
2025-02-21 09:25:00 +01:00
df884e7668 docs: update webpages for version v1.2.92 2025-02-21 09:25:00 +01:00
8182b5f684 feat: enhance OTA upload handling with chunk validation and timeout checks 2025-02-21 09:24:54 +01:00
4477537cec docs: update changelog for version 1.2.91
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m57s
2025-02-20 20:40:20 +01:00
44ba7df34f docs: update webpages for version v1.2.91 2025-02-20 20:40:20 +01:00
54744a06dd fix: add file existence checks before uploading binaries in release workflows 2025-02-20 20:40:14 +01:00
cefa81030b docs: update changelog for version 1.2.90
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m48s
2025-02-20 20:33:25 +01:00
62052927d2 docs: update webpages for version v1.2.90 2025-02-20 20:33:25 +01:00
933a84f8ce refactor: update Gitea and GitHub release workflows to include SPIFFS directory creation and firmware copying 2025-02-20 20:33:19 +01:00
db3c19ff2e docs: update changelog for version 1.2.89
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m46s
2025-02-20 20:20:09 +01:00
ae9eb4cc6b docs: update webpages for version v1.2.89 2025-02-20 20:20:09 +01:00
89d40832c5 refactor: enhance OTA update process with SPIFFS support and improved error handling 2025-02-20 20:19:24 +01:00
c161216c04 docs: update changelog for version 1.2.88
All checks were successful
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m42s
2025-02-20 20:05:38 +01:00
6a016b6ac4 docs: update webpages for version v1.2.88 2025-02-20 20:05:38 +01:00
44dd485e17 fix: improve OTA update handling and logging for better error reporting 2025-02-20 20:04:58 +01:00
d41f0f3e67 docs: update changelog for version 1.2.87
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m30s
2025-02-20 19:52:44 +01:00
484058515e docs: update webpages for version v1.2.87 2025-02-20 19:52:44 +01:00
f552b492cf fix: update firmware build process and remove unused OTA environment 2025-02-20 19:52:34 +01:00
c3040b3c29 docs: update changelog for version 1.2.86
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m36s
2025-02-20 19:09:35 +01:00
d7ba67085d docs: update webpages for version v1.2.86 2025-02-20 19:09:35 +01:00
48efb9e21a fix: update SPIFFS offset and size in release workflows and partitions.csv 2025-02-20 19:09:28 +01:00
e983ba6e44 docs: update changelog for version 1.2.85
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m48s
2025-02-20 18:37:57 +01:00
37171d6eca docs: update webpages for version v1.2.85 2025-02-20 18:37:57 +01:00
ebb08a7a66 ci: streamline release workflows by removing unnecessary binary uploads and adding SPIFFS formatting 2025-02-20 18:37:51 +01:00
b5330af351 docs: update changelog for version 1.2.84
All checks were successful
Release Workflow / route (push) Successful in 10s
Release Workflow / verify-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m41s
2025-02-20 18:04:06 +01:00
4919d34484 docs: update webpages for version v1.2.84 2025-02-20 18:04:06 +01:00
2da641d604 ci: update installation steps for xxd to include package list update 2025-02-20 18:04:00 +01:00
ce413965c7 docs: update changelog for version 1.2.83
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 25s
2025-02-20 18:01:22 +01:00
3fafed930e docs: update webpages for version v1.2.83 2025-02-20 18:01:22 +01:00
e1c604ee8d refactor: replace hexdump with xxd for magic byte verification in release workflows 2025-02-20 18:01:16 +01:00
11bbfb7db6 docs: update changelog for version 1.2.82
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m27s
2025-02-20 17:53:00 +01:00
71d8f7ec5a docs: update webpages for version v1.2.82 2025-02-20 17:53:00 +01:00
f4518e4a36 refactor: enhance Gitea and GitHub release workflows to include magic byte handling and improve binary verification 2025-02-20 17:52:18 +01:00
62d9596d08 docs: update changelog for version 1.2.81
All checks were successful
Release Workflow / route (push) Successful in 8s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m42s
2025-02-20 17:11:28 +01:00
e27e95d291 docs: update webpages for version v1.2.81 2025-02-20 17:11:28 +01:00
b7651ad50d refactor: update Gitea and GitHub release workflows to use esp32dev_ota for building and uploading firmware and SPIFFS binaries 2025-02-20 17:11:22 +01:00
f1937e2977 docs: update changelog for version 1.2.80
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m37s
2025-02-20 17:05:53 +01:00
ad5ddf713c docs: update webpages for version v1.2.80 2025-02-20 17:05:52 +01:00
ccb494f843 bump version to 1.2.79 in platformio.ini 2025-02-20 17:05:47 +01:00
17307d8f03 refactor: update Gitea and GitHub release workflows to include new firmware and SPIFFS binaries; fix version display in HTML files to v1.2.78 2025-02-20 17:04:56 +01:00
e5240a9572 docs: update changelog for version 1.2.79
All checks were successful
Release Workflow / route (push) Successful in 12s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m45s
2025-02-20 16:25:02 +01:00
06ebf105cf docs: update webpages for version v1.2.79 2025-02-20 16:25:02 +01:00
118e099fc5 refactor: simplify Gitea release workflow by using esptool for binary creation 2025-02-20 16:24:57 +01:00
8edd50f786 docs: update changelog for version 1.2.78
Some checks failed
Release Workflow / route (push) Successful in 8s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m44s
2025-02-20 16:15:59 +01:00
b85325a747 docs: update webpages for version v1.2.78 2025-02-20 16:15:59 +01:00
e1e0352beb refactor: streamline Gitea release workflow and update version to 1.2.77 2025-02-20 16:15:48 +01:00
8a93cccfce refactor: update Gitea and GitHub release workflows to improve binary preparation and verification
fix: correct version number in HTML files and platformio.ini to v1.2.76
enhance: streamline OTA update handling by removing unnecessary magic byte checks
2025-02-20 16:14:49 +01:00
c374069f36 docs: update changelog for version 1.2.77
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m33s
2025-02-20 15:53:26 +01:00
59cd7c177d refactor: optimize Gitea release workflow by simplifying build steps and improving file handling 2025-02-20 15:53:22 +01:00
45088b5838 docs: update webpages for version v1.2.77 2025-02-20 15:53:07 +01:00
1b9c79b559 docs: update changelog for version 1.2.76
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 2m48s
2025-02-20 15:42:44 +01:00
37e1e861d3 docs: update webpages for version v1.2.76 2025-02-20 15:42:44 +01:00
cce39319d9 refactor: streamline Gitea release workflow and remove obsolete OTA data initialization script 2025-02-20 15:41:14 +01:00
6391054c23 feat: enhance OTA update process with improved file verification and new OTA data initialization 2025-02-20 15:07:13 +01:00
52cf46d7f8 docs: update changelog for version 1.2.75
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 2m57s
2025-02-20 14:57:17 +01:00
84b05e48ce docs: update webpages for version v1.2.75 2025-02-20 14:57:16 +01:00
5c41d864c1 refactor: simplify OTA update handling by removing unnecessary variables and improving error reporting 2025-02-20 14:56:31 +01:00
5dc3563da6 docs: update changelog for version 1.2.74
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m17s
2025-02-20 14:46:12 +01:00
9e1b2943d6 docs: update webpages for version v1.2.74 2025-02-20 14:46:12 +01:00
7b89b04621 refactor: enhance OTA update process with improved handling of full image updates and SPIFFS data 2025-02-20 14:45:34 +01:00
e140f8e003 docs: update changelog for version 1.2.73
All checks were successful
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m14s
2025-02-20 14:31:44 +01:00
3d0bdde476 docs: update webpages for version v1.2.73 2025-02-20 14:31:44 +01:00
3ac7d6b4f7 refactor: improve OTA update process with enhanced size checks and progress logging 2025-02-20 14:31:10 +01:00
5f52775984 refactor: enhance OTA update process with improved size checks and debugging output 2025-02-20 14:28:11 +01:00
463eaf4b6f docs: update changelog for version 1.2.72
All checks were successful
Release Workflow / route (push) Successful in 13s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m16s
2025-02-20 14:17:34 +01:00
4bf6b11d3a docs: update webpages for version v1.2.72 2025-02-20 14:17:34 +01:00
b0c4af7c4e refactor: simplify OTA update process by removing unnecessary buffer and adjusting offsets 2025-02-20 14:16:49 +01:00
249e896ea4 docs: update changelog for version 1.2.71
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-20 14:09:21 +01:00
c74f587fff docs: update webpages for version v1.2.71 2025-02-20 14:09:21 +01:00
7a7ee72585 fix: update version number to v1.2.65 in HTML files and platformio.ini; add script to create full binary 2025-02-20 14:08:17 +01:00
3dd5fbc585 docs: update changelog for version 1.2.70
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m23s
2025-02-20 12:01:08 +01:00
ed9c1487ed docs: update webpages for version v1.2.70 2025-02-20 12:01:08 +01:00
d8756421a1 fix: add logging to stopAllTasks for better debugging 2025-02-20 12:00:45 +01:00
d92c78f9d0 docs: update changelog for version 1.2.69
All checks were successful
Release Workflow / route (push) Successful in 11s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m21s
2025-02-20 11:54:38 +01:00
2d19ea745f docs: update webpages for version v1.2.69 2025-02-20 11:54:37 +01:00
13779cc9d7 docs: update changelog for version 1.2.68
Some checks failed
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 2s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-20 11:53:15 +01:00
b6d5a8a00b docs: update webpages for version v1.2.68 2025-02-20 11:53:15 +01:00
f6319e79f0 fix: update stopAllTasks to suspend RfidReaderTask instead of NfcTask 2025-02-20 11:53:04 +01:00
6f24630a7d feat: add task handles for BambuMqttTask and ScaleTask; rename stopTasks to stopAllTasks 2025-02-20 11:52:36 +01:00
4475d21218 docs: update changelog for version 1.2.67
All checks were successful
Release Workflow / route (push) Successful in 6s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m6s
2025-02-20 11:42:41 +01:00
01a926a38d feat: improve OTA upload handling and add SPIFFS update support 2025-02-20 11:42:36 +01:00
6b966c02b3 docs: update webpages for version v1.2.67 2025-02-20 11:42:25 +01:00
1450e1ad2e docs: update changelog for version 1.2.66
All checks were successful
Release Workflow / route (push) Successful in 8s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m5s
2025-02-20 11:27:18 +01:00
3102a6c217 docs: update webpages for version v1.2.66 2025-02-20 11:27:18 +01:00
d5b2b2746d feat: enhance OTA upload to support SPIFFS updates 2025-02-20 11:27:11 +01:00
7e776d4816 docs: update changelog for version 1.2.65
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 5s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m8s
2025-02-20 11:16:54 +01:00
e84b2973c5 docs: update webpages for version v1.2.65 2025-02-20 11:16:53 +01:00
5793dc1a1f docs: update changelog for version 1.2.64
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-20 11:13:56 +01:00
1732491c48 docs: update webpages for version v1.2.64 2025-02-20 11:13:56 +01:00
0500bb6951 fix: improve error handling in OTA upload process 2025-02-20 11:13:49 +01:00
ef9ef7257a docs: update changelog for version 1.2.63
All checks were successful
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Successful in 3m10s
2025-02-20 10:59:17 +01:00
e86fd229dc docs: update webpages for version v1.2.63 2025-02-20 10:59:17 +01:00
b940a166da feat: update version to 1.2.63 2025-02-20 10:59:10 +01:00
c857e16de2 feat: enhance OTA upload handling with magic byte checks for image types
Some checks failed
Release Workflow / route (push) Successful in 7s
Release Workflow / verify-provider (push) Successful in 3s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Failing after 3m4s
2025-02-20 10:53:23 +01:00
51 changed files with 18556 additions and 1118 deletions

208
.github/workflows/gitea-release.yml vendored Normal file
View File

@ -0,0 +1,208 @@
name: Gitea Release
on:
workflow_call:
secrets:
GITEA_TOKEN:
description: 'Token für Gitea API-Zugriff'
required: true
outputs:
version:
description: 'The version that was released'
value: ${{ jobs.create-release.outputs.version }}
jobs:
create-release:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.VERSION }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio esptool
- name: Install xxd
run: |
sudo apt-get update
sudo apt-get install xxd
- name: Build Firmware
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
# Build firmware and LittleFS
echo "Building firmware and LittleFS..."
pio run -e esp32dev
pio run -t buildfs
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
# Create LittleFS binary - direct copy without header
cp .pio/build/esp32dev/littlefs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
# Create full binary
(cd .pio/build/esp32dev &&
esptool.py --chip esp32 merge_bin \
--fill-flash-size 4MB \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o filaman_full_${VERSION}.bin \
0x1000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x3D0000 littlefs.bin)
# Verify file sizes
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Generate Release Notes
id: release_notes
run: |
# Get the latest tag
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
if [ -n "$LATEST_TAG" ]; then
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Get all commits since last release with commit hash and author
echo "### Added" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
else
# First release
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Initial Release" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Add all commits for initial release
echo "### Added" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Determine Gitea URL
id: gitea_url
run: |
echo "Debug Environment:"
echo "GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-not set}"
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
echo "GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-not set}"
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
# Set API URL based on environment
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
GITEA_API_URL="${GITHUB_SERVER_URL}"
GITEA_REPO=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f2)
GITEA_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1)
else
echo "Error: This workflow is only for Gitea"
exit 1
fi
echo "GITEA_API_URL=${GITEA_API_URL}" >> $GITHUB_OUTPUT
echo "GITEA_REPO=${GITEA_REPO}" >> $GITHUB_OUTPUT
echo "GITEA_OWNER=${GITEA_OWNER}" >> $GITHUB_OUTPUT
- name: Create Gitea Release
env:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
GITEA_API_URL: ${{ steps.gitea_url.outputs.GITEA_API_URL }}
GITEA_REPO: ${{ steps.gitea_url.outputs.GITEA_REPO }}
GITEA_OWNER: ${{ steps.gitea_url.outputs.GITEA_OWNER }}
run: |
# Debug Token (nur Länge ausgeben für Sicherheit)
echo "Debug: Token length: ${#GITEA_TOKEN}"
if [ -z "$GITEA_TOKEN" ]; then
echo "Error: GITEA_TOKEN is empty"
exit 1
fi
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
# Debug-Ausgaben
echo "Debug: API URL: ${GITEA_API_URL}"
echo "Debug: Repository: ${GITEA_OWNER}/${GITEA_REPO}"
# Erstelle zuerst den Release ohne Dateien
echo "Debug: Creating release..."
RELEASE_DATA="{\"tag_name\":\"v${VERSION}\",\"name\":\"v${VERSION}\",\"body\":\"${{ steps.release_notes.outputs.CHANGES }}\"}"
RELEASE_RESPONSE=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d "${RELEASE_DATA}" \
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases")
RELEASE_STATUS=$(echo "$RELEASE_RESPONSE" | tail -n1)
RELEASE_BODY=$(echo "$RELEASE_RESPONSE" | head -n -1)
if [ "$RELEASE_STATUS" != "201" ]; then
echo "Error: Failed to create release"
echo "Response: $RELEASE_BODY"
exit 1
fi
# Extrahiere die Release-ID aus der Antwort
RELEASE_ID=$(echo "$RELEASE_BODY" | grep -o '"id":[0-9]*' | cut -d':' -f2)
# Lade die Dateien einzeln hoch
for file in upgrade_filaman_firmware_v${VERSION}.bin upgrade_filaman_website_v${VERSION}.bin filaman_full_${VERSION}.bin; do
if [ -f "$file" ]; then
echo "Debug: Uploading $file..."
UPLOAD_RESPONSE=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary @"$file" \
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases/${RELEASE_ID}/assets?name=${file}")
UPLOAD_STATUS=$(echo "$UPLOAD_RESPONSE" | tail -n1)
if [ "$UPLOAD_STATUS" != "201" ]; then
echo "Warning: Failed to upload $file"
echo "Response: $(echo "$UPLOAD_RESPONSE" | head -n -1)"
else
echo "Successfully uploaded $file"
fi
fi
done

185
.github/workflows/github-release.yml vendored Normal file
View File

@ -0,0 +1,185 @@
name: GitHub Release
on:
workflow_call:
secrets:
RELEASE_TOKEN:
description: 'GitHub token for release creation'
required: true
permissions:
contents: write
jobs:
create-release:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio esptool
- name: Install xxd
run: |
sudo apt-get update
sudo apt-get install xxd
- name: Build Firmware
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
# Always build firmware and LittleFS
echo "Building firmware and LittleFS..."
pio run -e esp32dev
pio run -t buildfs
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
# Create LittleFS binary - direct copy without header
cp .pio/build/esp32dev/littlefs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
# Create full binary (always)
(cd .pio/build/esp32dev &&
esptool.py --chip esp32 merge_bin \
--fill-flash-size 4MB \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o filaman_full_${VERSION}.bin \
0x1000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x3D0000 littlefs.bin)
# Verify file sizes
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Generate Release Notes
id: release_notes
run: |
# Get the latest tag
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
if [ -n "$LATEST_TAG" ]; then
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Get all commits since last release with commit hash and author
echo "### Added" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
else
# First release
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Initial Release" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Add all commits for initial release
echo "### Added" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Create GitHub Release
env:
GH_TOKEN: ${{ secrets.RELEASE_TOKEN }}
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
# Create release with available files
FILES_TO_UPLOAD=""
# Always add firmware
if [ -f "upgrade_filaman_firmware_v${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_firmware_v${VERSION}.bin"
fi
# Add LittleFS and full binary only if they exist
if [ -f "upgrade_filaman_website_v${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_website_v${VERSION}.bin"
fi
if [ -f "filaman_full_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_full_${VERSION}.bin"
fi
# Create release with available files
if [ -n "$FILES_TO_UPLOAD" ]; then
gh release create "v${VERSION}" \
--title "Release ${VERSION}" \
--notes "${{ steps.release_notes.outputs.CHANGES }}" \
$FILES_TO_UPLOAD
else
echo "Error: No files found to upload"
exit 1
fi
- name: Install lftp
run: sudo apt-get install -y lftp
- name: Upload Firmware via FTP
if: success()
env:
FTP_PASSWORD: ${{ vars.FTP_PASSWORD }}
FTP_USER: ${{ vars.FTP_USER }}
FTP_HOST: ${{ vars.FTP_HOST }}
VERSION: ${{ steps.get_version.outputs.VERSION }}
run: |
echo "Environment variables:"
env | grep -E '^FTP_' | while read -r line; do
var_name=$(echo "$line" | cut -d= -f1)
var_value=$(echo "$line" | cut -d= -f2-)
echo "$var_name is $(if [ -n "$var_value" ]; then echo "set"; else echo "empty"; fi)"
done
cd .pio/build/esp32dev
if [ -n "$FTP_USER" ] && [ -n "$FTP_PASSWORD" ] && [ -n "$FTP_HOST" ]; then
echo "All FTP credentials are present, attempting upload..."
lftp -c "set ssl:verify-certificate no; \
set ftp:ssl-protect-data true; \
set ftp:ssl-force true; \
set ssl:check-hostname false; \
set ftp:ssl-auth TLS; \
open -u $FTP_USER,$FTP_PASSWORD $FTP_HOST; \
put -O / filaman_full_${VERSION}.bin -o filaman_full.bin"
else
echo "Error: Some FTP credentials are missing"
exit 1
fi

View File

@ -1,134 +0,0 @@
name: Gitea Release
on:
workflow_call:
inputs:
gitea_ref_name:
description: 'Gitea ref name'
required: true
type: string
gitea_server_url:
description: 'Gitea server URL'
required: true
type: string
gitea_repository:
description: 'Gitea repository'
required: true
type: string
secrets:
GITEA_TOKEN:
required: true
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Install System Dependencies
run: |
sudo apt-get update
sudo apt-get install -y python3 python3-venv build-essential curl git
- name: Set up Python Virtual Environment
run: |
python3 -m venv venv
source venv/bin/activate
pip install --upgrade pip
pip install platformio esptool
echo "Verifying installations:"
platformio --version
python3 --version
esptool.py version
- name: Build Firmware
run: |
source venv/bin/activate
echo "Building SPIFFS..."
platformio run -t buildfs
echo "Building firmware..."
platformio run
- name: Create Release Files
run: |
source venv/bin/activate
echo "Creating release files..."
esptool.py --chip esp32 merge_bin \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o .pio/build/esp32dev/filaman_full.bin \
0x1000 .pio/build/esp32dev/bootloader.bin \
0x8000 .pio/build/esp32dev/partitions.bin \
0x10000 .pio/build/esp32dev/firmware.bin \
0x3D0000 .pio/build/esp32dev/spiffs.bin
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
- name: Read CHANGELOG.md
id: changelog
run: |
VERSION=$(echo "${{ inputs.gitea_ref_name }}" | sed 's/^v//')
CHANGELOG=$(awk "/## \\[$VERSION\\]/{p=1;print;next} /## \\[/ {p=0} p" CHANGELOG.md)
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
echo "CHANGELOG CONTENT:"
echo "$CHANGELOG"
if [ -z "$CHANGELOG" ]; then
echo "No changelog found for version $VERSION"
exit 1
fi
- name: Create Release
env:
TOKEN: ${{ secrets.GITEA_TOKEN }}
GITEA_REF_NAME: ${{ inputs.gitea_ref_name }}
GITEA_SERVER_URL: ${{ inputs.gitea_server_url }}
GITEA_REPOSITORY: ${{ inputs.gitea_repository }}
CHANGELOG: ${{ steps.changelog.outputs.CHANGES }}
run: |
echo "Debug environment:"
echo "GITEA_REF_NAME: ${GITEA_REF_NAME}"
echo "GITEA_SERVER_URL: ${GITEA_SERVER_URL}"
echo "GITEA_REPOSITORY: ${GITEA_REPOSITORY}"
echo "CHANGELOG: ${CHANGELOG}"
TAG="${GITEA_REF_NAME}"
API_URL="${GITEA_SERVER_URL}/api/v1"
REPO="${GITEA_REPOSITORY}"
echo "Creating release for ${TAG} on ${REPO}..."
# Create release
RESPONSE=$(curl -k -s \
-X POST \
-H "Authorization: token ${TOKEN}" \
-H "Content-Type: application/json" \
-d "{\"tag_name\":\"${TAG}\",\"name\":\"Release ${TAG}\",\"body\":\"${CHANGELOG}\"}" \
"${API_URL}/repos/${REPO}/releases")
RELEASE_ID=$(echo "$RESPONSE" | grep -o '"id":[0-9]*' | cut -d':' -f2 | head -n1)
UPLOAD_URL=$(echo "$RESPONSE" | grep -o '"upload_url":"[^"]*' | cut -d':' -f2- | tr -d '"')
if [ -n "$RELEASE_ID" ]; then
echo "Release created with ID: $RELEASE_ID"
# Upload files
for file in "filaman_full.bin" "filaman_ota.bin"; do
echo "Uploading $file..."
curl -k -s \
-X POST \
-H "Authorization: token ${TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary "@.pio/build/esp32dev/$file" \
"${UPLOAD_URL}?name=$file"
done
else
echo "Failed to create release. Response:"
echo "$RESPONSE"
exit 1
fi

View File

@ -1,71 +0,0 @@
name: GitHub Release
on:
workflow_call:
jobs:
create-release:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio
- name: Build Firmware
run: |
pio run -t buildfs # Build SPIFFS
pio run # Build firmware
- name: Install esptool
run: |
pip install esptool
- name: Merge firmware and SPIFFS
run: |
esptool.py --chip esp32 merge_bin \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o .pio/build/esp32dev/filaman_full.bin \
0x1000 .pio/build/esp32dev/bootloader.bin \
0x8000 .pio/build/esp32dev/partitions.bin \
0x10000 .pio/build/esp32dev/firmware.bin \
0x3D0000 .pio/build/esp32dev/spiffs.bin
- name: Prepare OTA firmware
run: |
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
- name: Get version from tag
id: get_version
run: |
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
- name: Read CHANGELOG.md
id: changelog
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
CHANGELOG=$(awk "/## \\[$VERSION\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create GitHub Release
env:
GH_TOKEN: ${{ github.token }}
run: |
gh release create "${{ github.ref_name }}" \
--title "Release ${{ steps.get_version.outputs.VERSION }}" \
--notes "${{ steps.changelog.outputs.CHANGES }}" \
.pio/build/esp32dev/filaman_full.bin \
.pio/build/esp32dev/filaman_ota.bin

View File

@ -5,66 +5,37 @@ on:
tags: tags:
- 'v*' - 'v*'
permissions:
contents: write
jobs: jobs:
route: detect-provider:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
provider: ${{ steps.provider.outputs.provider }} provider: ${{ steps.provider.outputs.provider }}
gitea_ref_name: ${{ steps.provider.outputs.gitea_ref_name }}
gitea_server_url: ${{ steps.provider.outputs.gitea_server_url }}
gitea_repository: ${{ steps.provider.outputs.gitea_repository }}
steps: steps:
- name: Checkout Repository
uses: actions/checkout@v3
- name: Debug Environment
run: |
echo "CI Environment Details:"
echo "GITHUB_ACTIONS=${GITHUB_ACTIONS:-not set}"
echo "GITEA_ACTIONS=${GITEA_ACTIONS:-not set}"
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
- name: Determine CI Provider - name: Determine CI Provider
id: provider id: provider
shell: bash shell: bash
run: | run: |
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
echo "provider=gitea" >> "$GITHUB_OUTPUT" echo "provider=gitea" >> "$GITHUB_OUTPUT"
echo "gitea_ref_name=${GITHUB_REF_NAME}" >> "$GITHUB_OUTPUT"
echo "gitea_server_url=${GITHUB_SERVER_URL}" >> "$GITHUB_OUTPUT"
echo "gitea_repository=${GITHUB_REPOSITORY}" >> "$GITHUB_OUTPUT"
elif [ "${GITHUB_ACTIONS}" = "true" ]; then
echo "provider=github" >> "$GITHUB_OUTPUT"
else else
echo "provider=unknown" >> "$GITHUB_OUTPUT" echo "provider=github" >> "$GITHUB_OUTPUT"
fi
verify-provider:
needs: route
runs-on: ubuntu-latest
steps:
- name: Echo detected provider
run: |
echo "Detected CI Provider: ${{ needs.route.outputs.provider }}"
if [ "${{ needs.route.outputs.provider }}" = "unknown" ]; then
echo "::error::Failed to detect CI provider!"
exit 1
fi fi
github-release: github-release:
needs: [route, verify-provider] needs: detect-provider
if: needs.route.outputs.provider == 'github' permissions:
uses: ./.github/workflows/providers/github-release.yml contents: write
if: needs.detect-provider.outputs.provider == 'github'
uses: ./.github/workflows/github-release.yml
secrets:
RELEASE_TOKEN: ${{ secrets.GITHUB_TOKEN }}
gitea-release: gitea-release:
needs: [route, verify-provider] needs: detect-provider
if: needs.route.outputs.provider == 'gitea' if: needs.detect-provider.outputs.provider == 'gitea'
uses: ./.github/workflows/providers/gitea-release.yml uses: ./.github/workflows/gitea-release.yml
with:
gitea_ref_name: ${{ needs.route.outputs.gitea_ref_name }}
gitea_server_url: ${{ needs.route.outputs.gitea_server_url }}
gitea_repository: ${{ needs.route.outputs.gitea_repository }}
secrets: secrets:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }} GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}

54
.vscode/settings.json vendored
View File

@ -1,54 +0,0 @@
{
"files.associations": {
"algorithm": "cpp",
"vector": "cpp",
"cmath": "cpp",
"array": "cpp",
"atomic": "cpp",
"*.tcc": "cpp",
"bitset": "cpp",
"cctype": "cpp",
"clocale": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdint": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"deque": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"exception": "cpp",
"functional": "cpp",
"iterator": "cpp",
"map": "cpp",
"memory": "cpp",
"memory_resource": "cpp",
"numeric": "cpp",
"optional": "cpp",
"random": "cpp",
"regex": "cpp",
"string": "cpp",
"string_view": "cpp",
"system_error": "cpp",
"tuple": "cpp",
"type_traits": "cpp",
"utility": "cpp",
"fstream": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"istream": "cpp",
"limits": "cpp",
"new": "cpp",
"ostream": "cpp",
"sstream": "cpp",
"stdexcept": "cpp",
"streambuf": "cpp",
"cinttypes": "cpp",
"typeinfo": "cpp"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -6,9 +6,12 @@ Das System integriert sich nahtlos mit der [Spoolman](https://github.com/Donkie/
![Scale](./img/scale_trans.png) ![Scale](./img/scale_trans.png)
Weitere Bilder finden Sie im [img Ordner](/img/) Weitere Bilder finden Sie im [img Ordner](/img/)
oder auf meiner Website: [FilaMan Website](https://www.filaman.app) oder auf meiner Website: [FilaMan Website](https://www.filaman.app)
Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaOHU) Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaOHU)
Discord Server: [https://discord.gg/vMAx2gf5](https://discord.gg/vMAx2gf5)
### Es gibt jetzt auch ein Wiki, dort sind nochmal alle Funktionen beschrieben: [Wiki](https://github.com/ManuelW77/Filaman/wiki)
### ESP32 Hardware-Funktionen ### ESP32 Hardware-Funktionen
- **Gewichtsmessung:** Verwendung einer Wägezelle mit HX711-Verstärker für präzise Gewichtsverfolgung. - **Gewichtsmessung:** Verwendung einer Wägezelle mit HX711-Verstärker für präzise Gewichtsverfolgung.
@ -16,7 +19,7 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- **OLED-Display:** Zeigt aktuelles Gewicht, Verbindungsstatus (WiFi, Bambu Lab, Spoolman). - **OLED-Display:** Zeigt aktuelles Gewicht, Verbindungsstatus (WiFi, Bambu Lab, Spoolman).
- **WLAN-Konnektivität:** WiFiManager für einfache Netzwerkkonfiguration. - **WLAN-Konnektivität:** WiFiManager für einfache Netzwerkkonfiguration.
- **MQTT-Integration:** Verbindet sich mit Bambu Lab Drucker für AMS-Steuerung. - **MQTT-Integration:** Verbindet sich mit Bambu Lab Drucker für AMS-Steuerung.
- **NFC-Tag NTAG215:** Verwendung von NTAG215 wegen ausreichendem Speicherplatz auf dem Tag - **NFC-Tag NTAG213 NTAG215:** Verwendung von NTAG213, besser NTAG215 wegen ausreichendem Speicherplatz auf dem Tag
### Weboberflächen-Funktionen ### Weboberflächen-Funktionen
- **Echtzeit-Updates:** WebSocket-Verbindung für Live-Daten-Updates. - **Echtzeit-Updates:** WebSocket-Verbindung für Live-Daten-Updates.
@ -33,6 +36,7 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- Filtern und Auswählen von Filamenten. - Filtern und Auswählen von Filamenten.
- Automatische Aktualisierung der Spulengewichte. - Automatische Aktualisierung der Spulengewichte.
- Verfolgung von NFC-Tag-Zuweisungen. - Verfolgung von NFC-Tag-Zuweisungen.
- Unterstützt das Spoolman Octoprint Plugin
### Wenn Sie meine Arbeit unterstützen möchten, freue ich mich über einen Kaffee ### Wenn Sie meine Arbeit unterstützen möchten, freue ich mich über einen Kaffee
<a href="https://www.buymeacoffee.com/manuelw" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 108px !important;" ></a> <a href="https://www.buymeacoffee.com/manuelw" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 108px !important;" ></a>
@ -53,14 +57,14 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
### Komponenten ### Komponenten
- **ESP32 Entwicklungsboard:** Jede ESP32-Variante. - **ESP32 Entwicklungsboard:** Jede ESP32-Variante.
[Amazon Link](https://amzn.eu/d/aXThslf) [Amazon Link](https://amzn.eu/d/aXThslf)
- **HX711 Wägezellen-Verstärker:** Für Gewichtsmessung. - **HX711 5kg Wägezellen-Verstärker:** Für Gewichtsmessung.
[Amazon Link](https://amzn.eu/d/1wZ4v0x) [Amazon Link](https://amzn.eu/d/06A0DLb)
- **OLED Display:** 128x64 SSD1306. - **OLED 0.96 Zoll I2C weiß/gelb Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/dozAYDU) [Amazon Link](https://amzn.eu/d/0AuBp2c)
- **PN532 NFC Modul:** Für NFC-Tag-Operationen. - **PN532 NFC NXP RFID-Modul V3:** Für NFC-Tag-Operationen.
[Amazon Link](https://amzn.eu/d/8205DDh) [Amazon Link](https://amzn.eu/d/jfIuQXb)
- **NFC-Tag:** NTAG215 - **NFC Tags NTAG213 NTA215:** RFID Tag
[Amazon Link](https://amzn.eu/d/fywy4c4) [Amazon Link](https://amzn.eu/d/9Z6mXc1)
### Pin-Konfiguration ### Pin-Konfiguration
| Komponente | ESP32 Pin | | Komponente | ESP32 Pin |
@ -71,10 +75,15 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
| OLED SCL | 22 | | OLED SCL | 22 |
| PN532 IRQ | 32 | | PN532 IRQ | 32 |
| PN532 RESET | 33 | | PN532 RESET | 33 |
| PN532 SCK | 14 | | PN532 SDA | 21 |
| PN532 MOSI | 13 | | PN532 SCL | 22 |
| PN532 MISO | 12 |
| PN532 CS/SS | 15 | **Achte darauf, dass am PN532 die DIP-Schalter auf I2C gestellt sind**
![Wiring](./img/Schaltplan.png)
![myWiring](./img/IMG_2589.jpeg)
![myWiring](./img/IMG_2590.jpeg)
## Software-Abhängigkeiten ## Software-Abhängigkeiten
@ -101,7 +110,31 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- PN532 NFC Modul - PN532 NFC Modul
- Verbindungskabel - Verbindungskabel
### Schritt-für-Schritt Installation ## Wichtiger Hinweis
Du musst Spoolman auf DEBUG Modus setzten, da man bisher in Spoolman keine CORS Domains setzen kann!
```
# Enable debug mode
# If enabled, the client will accept requests from any host
# This can be useful when developing, but is also a security risk
# Default: FALSE
#SPOOLMAN_DEBUG_MODE=TRUE
```
## Schritt-für-Schritt Installation
### Einfache Installation
1. **Gehe auf [FilaMan Installer](https://www.filaman.app/installer.html)**
2. **Stecke dein ESP an den Rechner und klicke Connect**
3. **Wähle dein Device Port und klicke Intall**
4. **Ersteinrichtung:**
- Mit dem "FilaMan" WLAN-Zugangspunkt verbinden.
- WLAN-Einstellungen über das Konfigurationsportal vornehmen.
- Weboberfläche unter `http://filaman.local` oder der IP-Adresse aufrufen.
### Compile by yourself
1. **Repository klonen:** 1. **Repository klonen:**
```bash ```bash
git clone https://github.com/ManuelW77/Filaman.git git clone https://github.com/ManuelW77/Filaman.git

View File

@ -6,12 +6,16 @@ FilaMan is a filament management system for 3D printing. It uses ESP32 hardware
Users can manage filament spools, monitor the status of the Automatic Material System (AMS) and make settings via a web interface. Users can manage filament spools, monitor the status of the Automatic Material System (AMS) and make settings via a web interface.
The system integrates seamlessly with [Bambulab](https://bambulab.com/en-us) 3D printers and [Spoolman](https://github.com/Donkie/Spoolman) filament management as well as the [Openspool](https://github.com/spuder/OpenSpool) NFC-TAG format. The system integrates seamlessly with [Bambulab](https://bambulab.com/en-us) 3D printers and [Spoolman](https://github.com/Donkie/Spoolman) filament management as well as the [Openspool](https://github.com/spuder/OpenSpool) NFC-TAG format.
![Scale](./img/scale_trans.png) ![Scale](./img/scale_trans.png)
More Images can be found in the [img Folder](/img/) More Images can be found in the [img Folder](/img/)
or my website:[FilaMan Website](https://www.filaman.app) or my website: [FilaMan Website](https://www.filaman.app)
german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaOHU) german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaOHU)
Discord Server: [https://discord.gg/vMAx2gf5](https://discord.gg/vMAx2gf5)
### Now more detailed informations about the usage: [Wiki](https://github.com/ManuelW77/Filaman/wiki)
### ESP32 Hardware Features ### ESP32 Hardware Features
- **Weight Measurement:** Using a load cell with HX711 amplifier for precise weight tracking. - **Weight Measurement:** Using a load cell with HX711 amplifier for precise weight tracking.
@ -19,7 +23,7 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- **OLED Display:** Shows current weight, connection status (WiFi, Bambu Lab, Spoolman). - **OLED Display:** Shows current weight, connection status (WiFi, Bambu Lab, Spoolman).
- **WiFi Connectivity:** WiFiManager for easy network configuration. - **WiFi Connectivity:** WiFiManager for easy network configuration.
- **MQTT Integration:** Connects to Bambu Lab printer for AMS control. - **MQTT Integration:** Connects to Bambu Lab printer for AMS control.
- **NFC-Tag NTAG215:** Use NTAG215 because of enaught space on the Tag - **NFC-Tag NTAG213 NTAG215:** Use NTAG213, better NTAG215 because of enaught space on the Tag
### Web Interface Features ### Web Interface Features
- **Real-time Updates:** WebSocket connection for live data updates. - **Real-time Updates:** WebSocket connection for live data updates.
@ -36,6 +40,7 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- Filter and select filaments. - Filter and select filaments.
- Update spool weights automatically. - Update spool weights automatically.
- Track NFC tag assignments. - Track NFC tag assignments.
- Supports Spoolman Octoprint Plugin
### If you want to support my work, i would be happy to get a coffe ### If you want to support my work, i would be happy to get a coffe
<a href="https://www.buymeacoffee.com/manuelw" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 108px !important;" ></a> <a href="https://www.buymeacoffee.com/manuelw" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 108px !important;" ></a>
@ -56,14 +61,14 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
### Components ### Components
- **ESP32 Development Board:** Any ESP32 variant. - **ESP32 Development Board:** Any ESP32 variant.
[Amazon Link](https://amzn.eu/d/aXThslf) [Amazon Link](https://amzn.eu/d/aXThslf)
- **HX711 Load Cell Amplifier:** For weight measurement. - **HX711 5kg Load Cell Amplifier:** For weight measurement.
[Amazon Link](https://amzn.eu/d/1wZ4v0x) [Amazon Link](https://amzn.eu/d/06A0DLb)
- **OLED Display:** 128x64 SSD1306. - **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/dozAYDU) [Amazon Link](https://amzn.eu/d/0AuBp2c)
- **PN532 NFC Module:** For NFC tag operations. - **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
[Amazon Link](https://amzn.eu/d/8205DDh) [Amazon Link](https://amzn.eu/d/jfIuQXb)
- **NFC-Tag:** NTAG215 - **NFC Tags NTAG213 NTAG215:** RFID Tag
[Amazon Link](https://amzn.eu/d/fywy4c4) [Amazon Link](https://amzn.eu/d/9Z6mXc1)
### Pin Configuration ### Pin Configuration
@ -75,10 +80,15 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
| OLED SCL | 22 | | OLED SCL | 22 |
| PN532 IRQ | 32 | | PN532 IRQ | 32 |
| PN532 RESET | 33 | | PN532 RESET | 33 |
| PN532 SCK | 14 | | PN532 SDA | 21 |
| PN532 MOSI | 13 | | PN532 SCL | 22 |
| PN532 MISO | 12 |
| PN532 CS/SS | 15 | **Make sure that the DIP switches on the PN532 are set to I2C**
![Wiring](./img/Schaltplan.png)
![myWiring](./img/IMG_2589.jpeg)
![myWiring](./img/IMG_2590.jpeg)
## Software Dependencies ## Software Dependencies
@ -91,9 +101,9 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- `Adafruit_SSD1306`: OLED display control - `Adafruit_SSD1306`: OLED display control
- `HX711`: Load cell communication - `HX711`: Load cell communication
## Installation ### Installation
### Prerequisites ## Prerequisites
- **Software:** - **Software:**
- [PlatformIO](https://platformio.org/) in VS Code - [PlatformIO](https://platformio.org/) in VS Code
- [Spoolman](https://github.com/Donkie/Spoolman) instance - [Spoolman](https://github.com/Donkie/Spoolman) instance
@ -105,7 +115,32 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- PN532 NFC Module - PN532 NFC Module
- Connecting wires - Connecting wires
### Step-by-Step Installation ## Important Note
You have to activate Spoolman in debug mode, because you are not able to set CORS Domains in Spoolman yet.
```
# Enable debug mode
# If enabled, the client will accept requests from any host
# This can be useful when developing, but is also a security risk
# Default: FALSE
#SPOOLMAN_DEBUG_MODE=TRUE
```
## Step-by-Step Installation
### Easy Installation
1. **Go to [FilaMan Installer](https://www.filaman.app/installer.html)**
2. **Plug you device in and push Connect button**
3. **Select your Device Port and push Intall**
4. **Initial Setup:**
- Connect to the "FilaMan" WiFi access point.
- Configure WiFi settings through the captive portal.
- Access the web interface at `http://filaman.local` or the IP address.
### Compile by yourself
1. **Clone the Repository:** 1. **Clone the Repository:**
```bash ```bash
git clone https://github.com/ManuelW77/Filaman.git git clone https://github.com/ManuelW77/Filaman.git
@ -124,25 +159,6 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- Configure WiFi settings through the captive portal. - Configure WiFi settings through the captive portal.
- Access the web interface at `http://filaman.local` or the IP address. - Access the web interface at `http://filaman.local` or the IP address.
## GitHub Actions Configuration
### Required Secrets for Gitea Releases
When using Gitea as your repository host, you need to configure the following secrets in your repository:
- `GITEA_API_URL`: The base URL of your Gitea instance, including protocol (e.g., `https://git.example.com`)
- `GITEA_TOKEN`: Your Gitea access token with permissions to create releases
- `GITEA_REPOSITORY`: The repository name in format `owner/repo` (e.g., `username/filaman`)
Example values:
```
GITEA_API_URL=https://git.example.com
GITEA_TOKEN=abcdef1234567890
GITEA_REPOSITORY=username/filaman
```
Make sure to set these secrets in your repository settings under Settings > Secrets and Variables > Actions.
## Documentation ## Documentation
### Relevant Links ### Relevant Links

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@ -1,7 +1,31 @@
{ {
"GFU99": "Generic TPU", "GFU99": "TPU",
"GFN99": "Generic PA", "GFN99": "PA",
"GFN98": "Generic PA-CF", "GFN98": "PA-CF",
"GFL99": "PLA",
"GFL96": "PLA Silk",
"GFL98": "PLA-CF",
"GFL95": "PLA High Speed",
"GFG99": "PETG",
"GFG98": "PETG-CF",
"GFG97": "PCTG",
"GFB99": "ABS",
"GFC99": "PC",
"GFB98": "ASA",
"GFS99": "PVA",
"GFS98": "HIPS",
"GFT98": "PPS-CF",
"GFT97": "PPS",
"GFN97": "PPA-CF",
"GFN96": "PPA-GF",
"GFP99": "PE",
"GFP98": "PE-CF",
"GFP97": "PP",
"GFP96": "PP-CF",
"GFP95": "PP-GF",
"GFR99": "EVA",
"GFR98": "PHA",
"GFS97": "BVOH",
"GFA01": "Bambu PLA Matte", "GFA01": "Bambu PLA Matte",
"GFA00": "Bambu PLA Basic", "GFA00": "Bambu PLA Basic",
"GFA09": "Bambu PLA Tough", "GFA09": "Bambu PLA Tough",
@ -13,15 +37,11 @@
"GFL03": "eSUN PLA+", "GFL03": "eSUN PLA+",
"GFL01": "PolyTerra PLA", "GFL01": "PolyTerra PLA",
"GFL00": "PolyLite PLA", "GFL00": "PolyLite PLA",
"GFL99": "Generic PLA",
"GFL96": "Generic PLA Silk",
"GFL98": "Generic PLA-CF",
"GFA50": "Bambu PLA-CF", "GFA50": "Bambu PLA-CF",
"GFS02": "Bambu Support For PLA", "GFS02": "Bambu Support For PLA",
"GFA11": "Bambu PLA Aero", "GFA11": "Bambu PLA Aero",
"GFL04": "Overture PLA", "GFL04": "Overture PLA",
"GFL05": "Overture Matte PLA", "GFL05": "Overture Matte PLA",
"GFL95": "Generic PLA High Speed",
"GFA12": "Bambu PLA Glow", "GFA12": "Bambu PLA Glow",
"GFA13": "Bambu PLA Dynamic", "GFA13": "Bambu PLA Dynamic",
"GFA15": "Bambu PLA Galaxy", "GFA15": "Bambu PLA Galaxy",
@ -30,41 +50,21 @@
"GFU00": "Bambu TPU 95A HF", "GFU00": "Bambu TPU 95A HF",
"GFG00": "Bambu PETG Basic", "GFG00": "Bambu PETG Basic",
"GFT01": "Bambu PET-CF", "GFT01": "Bambu PET-CF",
"GFG99": "Generic PETG",
"GFG98": "Generic PETG-CF",
"GFG50": "Bambu PETG-CF", "GFG50": "Bambu PETG-CF",
"GFG60": "PolyLite PETG", "GFG60": "PolyLite PETG",
"GFG01": "Bambu PETG Translucent", "GFG01": "Bambu PETG Translucent",
"GFG97": "Generic PCTG",
"GFB00": "Bambu ABS", "GFB00": "Bambu ABS",
"GFB99": "Generic ABS",
"GFB60": "PolyLite ABS", "GFB60": "PolyLite ABS",
"GFB50": "Bambu ABS-GF", "GFB50": "Bambu ABS-GF",
"GFC00": "Bambu PC", "GFC00": "Bambu PC",
"GFC99": "Generic PC",
"GFB98": "Generic ASA",
"GFB01": "Bambu ASA", "GFB01": "Bambu ASA",
"GFB61": "PolyLite ASA", "GFB61": "PolyLite ASA",
"GFB02": "Bambu ASA-Aero", "GFB02": "Bambu ASA-Aero",
"GFS99": "Generic PVA",
"GFS04": "Bambu PVA", "GFS04": "Bambu PVA",
"GFS01": "Bambu Support G", "GFS01": "Bambu Support G",
"GFN03": "Bambu PA-CF", "GFN03": "Bambu PA-CF",
"GFN04": "Bambu PAHT-CF", "GFN04": "Bambu PAHT-CF",
"GFS03": "Bambu Support For PA/PET", "GFS03": "Bambu Support For PA/PET",
"GFN05": "Bambu PA6-CF", "GFN05": "Bambu PA6-CF",
"GFN08": "Bambu PA6-GF", "GFN08": "Bambu PA6-GF"
"GFS98": "Generic HIPS",
"GFT98": "Generic PPS-CF",
"GFT97": "Generic PPS",
"GFN97": "Generic PPA-CF",
"GFN96": "Generic PPA-GF",
"GFP99": "Generic PE",
"GFP98": "Generic PE-CF",
"GFP97": "Generic PP",
"GFP96": "Generic PP-CF",
"GFP95": "Generic PP-GF",
"GFR99": "Generic EVA",
"GFR98": "Generic PHA",
"GFS97": "Generic BVOH"
} }

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>
@ -36,7 +47,7 @@
<!-- head --> <!-- head -->
<div class="container"> <div class="content">
<h1>FilaMan</h1> <h1>FilaMan</h1>
<p>Filament Management Tool</p> <p>Filament Management Tool</p>
<p>Your smart solution for <strong>Filament Management</strong> in 3D printing.</p> <p>Your smart solution for <strong>Filament Management</strong> in 3D printing.</p>
@ -44,10 +55,11 @@
<h2>About FilaMan</h2> <h2>About FilaMan</h2>
<p> <p>
FilaMan is a tool designed to simplify filament spool management. It allows you to identify and weigh filament spools, FilaMan is a tool designed to simplify filament spool management. It allows you to identify and weigh filament spools,
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform, automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform.
and ensure compatibility with <a href="https://github.com/spuder/OpenSpool" target="_blank">OpenSpool</a> for Bambu printers.
</p> </p>
<p>Get more information at <a href="https://www.filaman.app" target="_blank">https://www.filaman.app</a> and <a href="https://github.com/ManuelW77/Filaman" target="_blank">https://github.com/ManuelW77/Filaman</a>.</p>
<div class="features"> <div class="features">
<div class="feature"> <div class="feature">
<h3>Spool Identification</h3> <h3>Spool Identification</h3>
@ -62,12 +74,6 @@
<p>Works with OpenSpool to recognize and activate spools on Bambu printers.</p> <p>Works with OpenSpool to recognize and activate spools on Bambu printers.</p>
</div> </div>
</div> </div>
<h2>Future Plans</h2>
<p>
We are working on expanding compatibility to support smaller NFC tags like NTag213
and developing custom software to enhance the OpenSpool experience.
</p>
</div> </div>
</body> </body>
</html> </html>

31
html/own_filaments.json Normal file
View File

@ -0,0 +1,31 @@
{
"TPU": "GFU99",
"PA": "GFN99",
"PA-CF": "GFN98",
"PLA": "GFL99",
"PLA Silk": "GFL96",
"PLA-CF": "GFL98",
"PLA High Speed": "GFL95",
"PETG": "GFG99",
"PETG-CF": "GFG98",
"PCTG": "GFG97",
"ABS": "GFB99",
"ABS+HS": "GFB99",
"PC": "GFC99",
"PC/ABS": "GFC99",
"ASA": "GFB98",
"PVA": "GFS99",
"HIPS": "GFS98",
"PPS-CF": "GFT98",
"PPS": "GFT97",
"PPA-CF": "GFN97",
"PPA-GF": "GFN96",
"PE": "GFP99",
"PE-CF": "GFP98",
"PP": "GFP97",
"PP-CF": "GFP96",
"PP-GF": "GFP95",
"EVA": "GFR99",
"PHA": "GFR98",
"BVOH": "GFS97"
}

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>

View File

@ -150,6 +150,13 @@ function initWebSocket() {
ramStatus.textContent = `${data.freeHeap}k`; ramStatus.textContent = `${data.freeHeap}k`;
} }
} }
else if (data.type === 'setSpoolmanSettings') {
if (data.payload == 'success') {
showNotification(`Spoolman Settings set successfully`, true);
} else {
showNotification(`Error setting Spoolman Settings`, false);
}
}
}; };
} catch (error) { } catch (error) {
isConnected = false; isConnected = false;
@ -285,6 +292,14 @@ function displayAmsData(amsData) {
<img src="spool_in.png" alt="Spool In" style="width: 48px; height: 48px; transform: rotate(180deg) scaleX(-1);"> <img src="spool_in.png" alt="Spool In" style="width: 48px; height: 48px; transform: rotate(180deg) scaleX(-1);">
</button>`; </button>`;
const spoolmanButtonHtml = `
<button class="spool-button" onclick="handleSpoolmanSettings('${tray.tray_info_idx}', '${tray.setting_id}', '${tray.cali_idx}', '${tray.nozzle_temp_min}', '${tray.nozzle_temp_max}')"
style="position: absolute; bottom: 0px; right: 0px;
background: none; border: none; padding: 0;
cursor: pointer; display: none;">
<img src="set_spoolman.png" alt="Spool In" style="width: 38px; height: 38px;">
</button>`;
if (!hasAnyContent) { if (!hasAnyContent) {
return ` return `
<div class="tray"> <div class="tray">
@ -348,6 +363,7 @@ function displayAmsData(amsData) {
${trayDetails} ${trayDetails}
${tempHTML} ${tempHTML}
${(ams.ams_id === 255 && tray.tray_type !== '') ? outButtonHtml : ''} ${(ams.ams_id === 255 && tray.tray_type !== '') ? outButtonHtml : ''}
${(tray.setting_id != "" && tray.setting_id != "null") ? spoolmanButtonHtml : ''}
</div> </div>
</div>`; </div>`;
@ -373,6 +389,36 @@ function updateSpoolButtons(show) {
}); });
} }
function handleSpoolmanSettings(tray_info_idx, setting_id, cali_idx, nozzle_temp_min, nozzle_temp_max) {
// Hole das ausgewählte Filament
const selectedText = document.getElementById("selected-filament").textContent;
// Finde die ausgewählte Spule in den Daten
const selectedSpool = spoolsData.find(spool =>
`${spool.id} | ${spool.filament.name} (${spool.filament.material})` === selectedText
);
const payload = {
type: 'setSpoolmanSettings',
payload: {
filament_id: selectedSpool.filament.id,
tray_info_idx: tray_info_idx,
setting_id: setting_id,
cali_idx: cali_idx,
temp_min: nozzle_temp_min,
temp_max: nozzle_temp_max
}
};
try {
socket.send(JSON.stringify(payload));
showNotification(`Setting send to Spoolman`, true);
} catch (error) {
console.error("Error while sending settings to Spoolman:", error);
showNotification("Error while sending!", false);
}
}
function handleSpoolOut() { function handleSpoolOut() {
// Erstelle Payload // Erstelle Payload
const payload = { const payload = {
@ -594,8 +640,6 @@ function writeNfcTag() {
// Erstelle das NFC-Datenpaket mit korrekten Datentypen // Erstelle das NFC-Datenpaket mit korrekten Datentypen
const nfcData = { const nfcData = {
version: "2.0",
protocol: "openspool",
color_hex: selectedSpool.filament.color_hex || "FFFFFF", color_hex: selectedSpool.filament.color_hex || "FFFFFF",
type: selectedSpool.filament.material, type: selectedSpool.filament.material,
min_temp: minTemp, min_temp: minTemp,

BIN
html/set_spoolman.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>
@ -41,11 +52,18 @@
if (spoolmanUrl && spoolmanUrl.trim() !== "") { if (spoolmanUrl && spoolmanUrl.trim() !== "") {
document.getElementById('spoolmanUrl').value = spoolmanUrl; document.getElementById('spoolmanUrl').value = spoolmanUrl;
} }
// Initialize OctoPrint fields visibility
toggleOctoFields();
}; };
function checkSpoolmanInstance() { function checkSpoolmanInstance() {
const url = document.getElementById('spoolmanUrl').value; const url = document.getElementById('spoolmanUrl').value;
fetch(`/api/checkSpoolman?url=${encodeURIComponent(url)}`) const spoolmanOctoEnabled = document.getElementById('spoolmanOctoEnabled').checked;
const spoolmanOctoUrl = document.getElementById('spoolmanOctoUrl').value;
const spoolmanOctoToken = document.getElementById('spoolmanOctoToken').value;
fetch(`/api/checkSpoolman?url=${encodeURIComponent(url)}&octoEnabled=${spoolmanOctoEnabled}&octoUrl=${spoolmanOctoUrl}&octoToken=${spoolmanOctoToken}`)
.then(response => response.json()) .then(response => response.json())
.then(data => { .then(data => {
if (data.healthy) { if (data.healthy) {
@ -63,8 +81,10 @@
const ip = document.getElementById('bambuIp').value; const ip = document.getElementById('bambuIp').value;
const serial = document.getElementById('bambuSerial').value; const serial = document.getElementById('bambuSerial').value;
const code = document.getElementById('bambuCode').value; const code = document.getElementById('bambuCode').value;
const autoSend = document.getElementById('autoSend').checked;
const autoSendTime = document.getElementById('autoSendTime').value;
fetch(`/api/bambu?bambu_ip=${encodeURIComponent(ip)}&bambu_serialnr=${encodeURIComponent(serial)}&bambu_accesscode=${encodeURIComponent(code)}`) fetch(`/api/bambu?bambu_ip=${encodeURIComponent(ip)}&bambu_serialnr=${encodeURIComponent(serial)}&bambu_accesscode=${encodeURIComponent(code)}&autoSend=${autoSend}&autoSendTime=${autoSendTime}`)
.then(response => response.json()) .then(response => response.json())
.then(data => { .then(data => {
if (data.healthy) { if (data.healthy) {
@ -77,6 +97,15 @@
document.getElementById('bambuStatusMessage').innerText = 'Error while saving: ' + error.message; document.getElementById('bambuStatusMessage').innerText = 'Error while saving: ' + error.message;
}); });
} }
/**
* Controls visibility of OctoPrint configuration fields based on checkbox state
* Called on page load and when checkbox changes
*/
function toggleOctoFields() {
const octoEnabled = document.getElementById('spoolmanOctoEnabled').checked;
document.getElementById('octoFields').style.display = octoEnabled ? 'block' : 'none';
}
</script> </script>
<script> <script>
var spoolmanUrl = "{{spoolmanUrl}}"; var spoolmanUrl = "{{spoolmanUrl}}";
@ -84,27 +113,58 @@
<div class="content"> <div class="content">
<h1>Spoolman API URL / Bambu Credentials</h1> <h1>Spoolman API URL / Bambu Credentials</h1>
<label for="spoolmanUrl">Set URL/IP to your Spoolman-Instanz:</label>
<input type="text" id="spoolmanUrl" placeholder="http://ip-or-url-of-your-spoolman-instanz:port">
<button onclick="checkSpoolmanInstance()">Save Spoolman URL</button>
<p id="statusMessage"></p>
<h2>Bambu Lab Printer Credentials</h2> <div class="card">
<div class="bambu-settings"> <div class="card-body">
<div class="input-group"> <h5 class="card-title">Set URL/IP to your Spoolman-Instanz</h5>
<label for="bambuIp">Bambu Drucker IP-Adresse:</label> <input type="text" id="spoolmanUrl" placeholder="http://ip-or-url-of-your-spoolman-instanz:port">
<input type="text" id="bambuIp" placeholder="192.168.1.xxx" value="{{bambuIp}}"> <h5 class="card-title">If you want to enable sending Spool to Spoolman Octoprint Plugin:</h5>
<p>
<input type="checkbox" id="spoolmanOctoEnabled" {{spoolmanOctoEnabled}} onchange="toggleOctoFields()"> Send to Octo-Plugin
</p>
<div id="octoFields" style="display: none;">
<p>
<input type="text" id="spoolmanOctoUrl" placeholder="http://ip-or-url-of-your-octoprint-instanz:port" value="{{spoolmanOctoUrl}}">
<input type="text" id="spoolmanOctoToken" placeholder="Your Octoprint Token" value="{{spoolmanOctoToken}}">
</p>
</div>
<button onclick="checkSpoolmanInstance()">Save Spoolman URL</button>
<p id="statusMessage"></p>
</div> </div>
<div class="input-group"> </div>
<label for="bambuSerial">Drucker Seriennummer:</label>
<input type="text" id="bambuSerial" placeholder="BBLXXXXXXXX" value="{{bambuSerial}}"> <div class="card">
<div class="card-body">
<h5 class="card-title">Bambu Lab Printer Credentials</h5>
<div class="bambu-settings">
<div class="input-group">
<label for="bambuIp">Bambu Drucker IP-Adresse:</label>
<input type="text" id="bambuIp" placeholder="192.168.1.xxx" value="{{bambuIp}}">
</div>
<div class="input-group">
<label for="bambuSerial">Drucker Seriennummer:</label>
<input type="text" id="bambuSerial" placeholder="BBLXXXXXXXX" value="{{bambuSerial}}">
</div>
<div class="input-group">
<label for="bambuCode">Access Code:</label>
<input type="text" id="bambuCode" placeholder="Access Code vom Drucker" value="{{bambuCode}}">
</div>
<hr>
<p>If activated, FilaMan will automatically update the next filled tray with the last scanned and weighed spool.</p>
<div class="input-group" style="display: flex; margin-bottom: 0;">
<label for="autoSend" style="width: 250px; margin-right: 5px;">Auto Send to Bambu:</label>
<label for="autoSendTime" style="width: 250px; margin-right: 5px;">Wait for Spool in Sec:</label>
</div>
<div class="input-group" style="display: flex;">
<input type="checkbox" id="autoSend" {{autoSendToBambu}} style="width: 190px; margin-right: 10px;">
<input type="number" min="60" id="autoSendTime" placeholder="Time to wait" value="{{autoSendTime}}" style="width: 100px;">
</div>
<button style="margin: 0;" onclick="saveBambuCredentials()">Save Bambu Credentials</button>
<p id="bambuStatusMessage"></p>
</div>
</div> </div>
<div class="input-group">
<label for="bambuCode">Access Code:</label>
<input type="text" id="bambuCode" placeholder="Access Code vom Drucker" value="{{bambuCode}}">
</div>
<button onclick="saveBambuCredentials()">Save Bambu Credentials</button>
<p id="bambuStatusMessage"></p>
</div> </div>
</div> </div>
</body> </body>

View File

@ -86,7 +86,7 @@ function populateVendorDropdown(data, selectedSmId = null) {
}); });
// Nach der Schleife: Formatierung der Gesamtlänge // Nach der Schleife: Formatierung der Gesamtlänge
console.log("Total Lenght: ", totalLength); console.log("Total Length: ", totalLength);
const formattedLength = totalLength > 1000 const formattedLength = totalLength > 1000
? (totalLength / 1000).toFixed(2) + " km" ? (totalLength / 1000).toFixed(2) + " km"
: totalLength.toFixed(2) + " m"; : totalLength.toFixed(2) + " m";
@ -97,13 +97,15 @@ function populateVendorDropdown(data, selectedSmId = null) {
? (weightInKg / 1000).toFixed(2) + " t" ? (weightInKg / 1000).toFixed(2) + " t"
: weightInKg.toFixed(2) + " kg"; : weightInKg.toFixed(2) + " kg";
// Dropdown mit gefilterten Herstellern befüllen // Dropdown mit gefilterten Herstellern befüllen - alphabetisch sortiert
Object.entries(filteredVendors).forEach(([id, name]) => { Object.entries(filteredVendors)
const option = document.createElement("option"); .sort(([, nameA], [, nameB]) => nameA.localeCompare(nameB)) // Sort vendors alphabetically by name
option.value = id; .forEach(([id, name]) => {
option.textContent = name; const option = document.createElement("option");
vendorSelect.appendChild(option); option.value = id;
}); option.textContent = name;
vendorSelect.appendChild(option);
});
document.getElementById("totalSpools").textContent = totalSpools; document.getElementById("totalSpools").textContent = totalSpools;
document.getElementById("spoolsWithoutTag").textContent = spoolsWithoutTag; document.getElementById("spoolsWithoutTag").textContent = spoolsWithoutTag;

View File

@ -188,14 +188,18 @@ label {
font-weight: bold; font-weight: bold;
} }
input[type="text"], input[type="submit"] { input[type="text"], input[type="submit"], input[type="number"] {
padding: 10px; padding: 10px;
border: 1px solid #ccc; border: 1px solid #ccc;
border-radius: 5px; border-radius: 5px;
font-size: 16px; font-size: 16px;
} }
input[type="text"]:focus { input[type="number"] {
width: 108px !important;
}
input[type="text"]:focus, input[type="number"]:focus {
border-color: #007bff; border-color: #007bff;
outline: none; outline: none;
} }
@ -279,9 +283,10 @@ a:hover {
/* Karten-Stil für optische Trennung */ /* Karten-Stil für optische Trennung */
.card { .card {
background: #f9f9f9; background: var(--primary-color);
width: 500px;
padding: 15px; padding: 15px;
margin: 20px 0; margin: 20px auto;
border-radius: 8px; border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
} }
@ -760,17 +765,19 @@ a:hover {
right: 20px; right: 20px;
padding: 15px 25px; padding: 15px 25px;
border-radius: 4px; border-radius: 4px;
color: white; color: black;
z-index: 1000; z-index: 1000;
animation: slideIn 0.3s ease-out; animation: slideIn 0.3s ease-out;
} }
.notification.success { .notification.success {
background-color: #28a745; background-color: #28a745;
color: black !important;
} }
.notification.error { .notification.error {
background-color: #dc3545; background-color: #dc3545;
color: white !important;
} }
.notification.fade-out { .notification.fade-out {
@ -959,7 +966,6 @@ input[type="submit"]:disabled,
/* Bambu Settings Erweiterung */ /* Bambu Settings Erweiterung */
.bambu-settings { .bambu-settings {
background: white;
padding: 20px; padding: 20px;
border-radius: 8px; border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
@ -1013,6 +1019,7 @@ input[type="submit"]:disabled,
color: #000; color: #000;
vertical-align: middle; vertical-align: middle;
margin-left: 0.5rem; margin-left: 0.5rem;
text-shadow: none !important;
} }
.progress-container { .progress-container {
@ -1051,9 +1058,10 @@ input[type="submit"]:disabled,
} }
.update-form { .update-form {
background: var(--primary-color); background: var(--primary-color);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.05);
border: var(--glass-border);
padding: 20px; padding: 20px;
border-radius: 8px; border-radius: 8px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
margin: 0 auto; margin: 0 auto;
width: 400px; width: 400px;
text-align: center; text-align: center;
@ -1064,7 +1072,7 @@ input[type="submit"]:disabled,
padding: 8px; padding: 8px;
border: 1px solid #ddd; border: 1px solid #ddd;
border-radius: 4px; border-radius: 4px;
background: white; background-color: #4CAF50;
} }
.update-form input[type="submit"] { .update-form input[type="submit"] {
background-color: #4CAF50; background-color: #4CAF50;
@ -1086,10 +1094,66 @@ input[type="submit"]:disabled,
.warning { .warning {
background-color: var(--primary-color); background-color: var(--primary-color);
border: 1px solid #ffe0b2; border: 1px solid #ffe0b2;
color: white;
padding: 15px;
margin: 20px auto; margin: 20px auto;
border-radius: 4px; border-radius: 4px;
max-width: 600px; max-width: 600px;
text-align: center; text-align: center;
color: #e65100;
padding: 15px;
}
.update-options {
display: flex;
gap: 2rem;
margin: 2rem 0;
}
.update-section {
flex: 1;
background: var(--background-green);
padding: 1.5rem;
border-radius: 8px;
}
.update-section h2 {
margin-top: 0;
color: #333;
}
.update-section p {
color: #666;
margin-bottom: 1rem;
}
.progress-container {
margin: 20px 0;
background: #f0f0f0;
border-radius: 4px;
overflow: hidden;
}
.progress-bar {
width: 0;
height: 20px;
background: #4CAF50;
transition: width 0.3s ease-in-out;
text-align: center;
line-height: 20px;
color: white;
}
.status {
margin-top: 20px;
padding: 10px;
border-radius: 4px;
display: none;
}
.status.success {
background: #e8f5e9;
color: #2e7d32;
}
.status.error {
background: #ffebee;
color: #c62828;
}
.warning {
background: #fff3e0;
color: #e65100;
padding: 15px;
border-radius: 4px;
margin-bottom: 20px;
} }

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>
@ -40,18 +51,34 @@
<h1>Firmware Upgrade</h1> <h1>Firmware Upgrade</h1>
<div class="warning"> <div class="warning">
<strong>Warning:</strong> Please do not turn off or restart the device during the update. <strong>Warning:</strong> Do not power off the device during update.
The device will restart automatically after the update.
</div> </div>
<div class="update-form"> <div class="update-options">
<form id="updateForm" enctype='multipart/form-data'> <div class="update-section">
<input type='file' name='update' accept='.bin' required> <h2>Firmware Update</h2>
<input type='submit' value='Start Firmware Update'> <p>Upload a new firmware file (filaman_*.bin)</p>
</form> <div class="update-form">
<form id="firmwareForm" enctype='multipart/form-data' data-type="firmware">
<input type='file' name='update' accept='.bin' required>
<input type='submit' value='Start Firmware Update'>
</form>
</div>
</div>
<div class="update-section">
<h2>Webpage Update</h2>
<p>Upload a new webpage file (webpage_*.bin)</p>
<div class="update-form">
<form id="webpageForm" enctype='multipart/form-data' data-type="webpage">
<input type='file' name='update' accept='.bin' required>
<input type='submit' value='Start Webpage Update'>
</form>
</div>
</div>
</div> </div>
<div class="progress-container"> <div class="progress-container" style="display: none;">
<div class="progress-bar">0%</div> <div class="progress-bar">0%</div>
</div> </div>
<div class="status"></div> <div class="status"></div>
@ -64,91 +91,163 @@
statusContainer.style.display = 'none'; statusContainer.style.display = 'none';
} }
document.getElementById('updateForm').addEventListener('submit', async (e) => { const progress = document.querySelector('.progress-bar');
const progressContainer = document.querySelector('.progress-container');
const status = document.querySelector('.status');
let updateInProgress = false;
let lastReceivedProgress = 0;
// WebSocket Handling
let ws = null;
let wsReconnectTimer = null;
function connectWebSocket() {
ws = new WebSocket('ws://' + window.location.host + '/ws');
ws.onmessage = function(event) {
try {
const data = JSON.parse(event.data);
if (data.type === "updateProgress" && updateInProgress) {
// Zeige Fortschrittsbalken
progressContainer.style.display = 'block';
// Aktualisiere den Fortschritt nur wenn er größer ist
const newProgress = parseInt(data.progress);
if (!isNaN(newProgress) && newProgress >= lastReceivedProgress) {
progress.style.width = newProgress + '%';
progress.textContent = newProgress + '%';
lastReceivedProgress = newProgress;
}
// Zeige Status-Nachricht
if (data.message || data.status) {
status.textContent = data.message || getStatusMessage(data.status);
status.className = 'status success';
status.style.display = 'block';
// Starte Reload wenn Update erfolgreich
if (data.status === 'success' || lastReceivedProgress >= 98) {
clearTimeout(wsReconnectTimer);
setTimeout(() => {
window.location.href = '/';
}, 30000);
}
}
}
} catch (e) {
console.error('WebSocket message error:', e);
}
};
ws.onclose = function() {
if (updateInProgress) {
// Wenn der Fortschritt hoch genug ist, gehen wir von einem erfolgreichen Update aus
if (lastReceivedProgress >= 85) {
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
status.className = 'status success';
status.style.display = 'block';
clearTimeout(wsReconnectTimer);
setTimeout(() => {
window.location.href = '/';
}, 30000);
} else {
// Versuche Reconnect bei niedrigem Fortschritt
wsReconnectTimer = setTimeout(connectWebSocket, 1000);
}
}
};
ws.onerror = function(err) {
console.error('WebSocket error:', err);
if (updateInProgress && lastReceivedProgress >= 85) {
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
status.className = 'status success';
status.style.display = 'block';
setTimeout(() => {
window.location.href = '/';
}, 30000);
}
};
}
// Initial WebSocket connection
connectWebSocket();
function getStatusMessage(status) {
switch(status) {
case 'starting': return 'Starting update...';
case 'uploading': return 'Uploading...';
case 'finalizing': return 'Finalizing update...';
case 'restoring': return 'Restoring configurations...';
case 'preparing': return 'Preparing for restart...';
case 'success': return 'Update successful! Device is restarting... Page will reload in 30 seconds.';
default: return 'Updating...';
}
}
function handleUpdate(e) {
e.preventDefault(); e.preventDefault();
const form = e.target; const form = e.target;
const file = form.update.files[0]; const file = form.update.files[0];
const updateType = form.dataset.type;
if (!file) { if (!file) {
alert('Please select a firmware file.'); alert('Please select a file.');
return; return;
} }
const formData = new FormData();
formData.append('update', file);
const progress = document.querySelector('.progress-bar');
const progressContainer = document.querySelector('.progress-container');
const status = document.querySelector('.status');
// Validate file name pattern
if (updateType === 'firmware' && !file.name.startsWith('upgrade_filaman_firmware_')) {
alert('Please select a valid firmware file (upgrade_filaman_firmware_*.bin)');
return;
}
if (updateType === 'webpage' && !file.name.startsWith('upgrade_filaman_website_')) {
alert('Please select a valid webpage file (upgrade_filaman_website_*.bin)');
return;
}
// Reset UI
updateInProgress = true;
progressContainer.style.display = 'block'; progressContainer.style.display = 'block';
status.style.display = 'none'; status.style.display = 'none';
status.className = 'status'; status.className = 'status';
form.querySelector('input[type=submit]').disabled = true; progress.style.width = '0%';
progress.textContent = '0%';
// Disable submit buttons
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true);
// Send update
const xhr = new XMLHttpRequest(); const xhr = new XMLHttpRequest();
xhr.open('POST', '/update', true); xhr.open('POST', '/update', true);
xhr.upload.onprogress = (e) => {
if (e.lengthComputable) {
const percentComplete = (e.loaded / e.total) * 100;
progress.style.width = percentComplete + '%';
progress.textContent = Math.round(percentComplete) + '%';
}
};
xhr.onload = function() { xhr.onload = function() {
try { if (xhr.status !== 200 && !progress.textContent.startsWith('100')) {
let response = this.responseText; status.textContent = "Update failed: " + (xhr.responseText || "Unknown error");
try { status.className = 'status error';
const jsonResponse = JSON.parse(response);
response = jsonResponse.message;
if (jsonResponse.restart) {
status.textContent = response + " Redirecting in 20 seconds...";
let countdown = 20;
const timer = setInterval(() => {
countdown--;
if (countdown <= 0) {
clearInterval(timer);
window.location.href = '/';
} else {
status.textContent = response + ` Redirecting in ${countdown} seconds...`;
}
}, 1000);
}
} catch (e) {
if (!isNaN(response)) {
const percent = parseInt(response);
progress.style.width = percent + '%';
progress.textContent = percent + '%';
return;
}
}
status.textContent = response;
status.classList.add(xhr.status === 200 ? 'success' : 'error');
status.style.display = 'block'; status.style.display = 'block';
updateInProgress = false;
if (xhr.status !== 200) { document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
form.querySelector('input[type=submit]').disabled = false; }
} };
} catch (error) {
status.textContent = 'Error: ' + error.message; xhr.onerror = function() {
status.classList.add('error'); if (!progress.textContent.startsWith('100')) {
status.textContent = "Network error during update";
status.className = 'status error';
status.style.display = 'block'; status.style.display = 'block';
form.querySelector('input[type=submit]').disabled = false; updateInProgress = false;
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
} }
}; };
xhr.onerror = function() { const formData = new FormData();
status.textContent = 'Update failed: Network error'; formData.append('update', file);
status.classList.add('error');
status.style.display = 'block';
form.querySelector('input[type=submit]').disabled = false;
};
xhr.send(formData); xhr.send(formData);
}); }
document.getElementById('firmwareForm').addEventListener('submit', handleUpdate);
document.getElementById('webpageForm').addEventListener('submit', handleUpdate);
</script> </script>
</body> </body>
</html> </html>

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>

View File

@ -6,13 +6,24 @@
<title>FilaMan - Filament Management Tool</title> <title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico"> <link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css"> <link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head> </head>
<body> <body>
<div class="navbar"> <div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;"> <div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo"> <img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text"> <div class="logo-text">
<h1>FilaMan<span class="version">v1.2.62</span></h1> <h1>FilaMan<span class="version"></span></h1>
<h4>Filament Management Tool</h4> <h4>Filament Management Tool</h4>
</div> </div>
</div> </div>

BIN
img/7-enable.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

BIN
img/ESP32-SPI-Pins.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

BIN
img/IMG_2589.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

BIN
img/IMG_2590.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

BIN
img/Schaltplan.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 283 KiB

View File

@ -9,8 +9,10 @@
; https://docs.platformio.org/page/projectconf.html ; https://docs.platformio.org/page/projectconf.html
[common] [common]
version = "1.2.62" version = "1.4.0"
to_old_version = "1.4.0"
##
[env:esp32dev] [env:esp32dev]
platform = espressif32 platform = espressif32
board = esp32dev board = esp32dev
@ -20,7 +22,10 @@ monitor_speed = 115200
lib_deps = lib_deps =
tzapu/WiFiManager @ ^2.0.17 tzapu/WiFiManager @ ^2.0.17
https://github.com/me-no-dev/ESPAsyncWebServer.git#master https://github.com/me-no-dev/ESPAsyncWebServer.git#master
me-no-dev/AsyncTCP @ ^1.1.1 #me-no-dev/AsyncTCP @ ^1.1.1
https://github.com/esphome/AsyncTCP.git
#mathieucarbou/ESPAsyncWebServer @ ^3.6.0
#esp32async/AsyncTCP @ ^3.3.5
bogde/HX711 @ ^0.7.5 bogde/HX711 @ ^0.7.5
adafruit/Adafruit SSD1306 @ ^2.5.13 adafruit/Adafruit SSD1306 @ ^2.5.13
adafruit/Adafruit GFX Library @ ^1.11.11 adafruit/Adafruit GFX Library @ ^1.11.11
@ -30,7 +35,8 @@ lib_deps =
digitaldragon/SSLClient @ ^1.3.2 digitaldragon/SSLClient @ ^1.3.2
; Enable SPIFFS upload ; Enable SPIFFS upload
board_build.filesystem = spiffs #board_build.filesystem = spiffs
board_build.filesystem = littlefs
; Update partition settings ; Update partition settings
board_build.partitions = partitions.csv board_build.partitions = partitions.csv
board_upload.flash_size = 4MB board_upload.flash_size = 4MB
@ -41,36 +47,29 @@ build_flags =
-Os -Os
-ffunction-sections -ffunction-sections
-fdata-sections -fdata-sections
-DNDEBUG #-DNDEBUG
-mtext-section-literals -mtext-section-literals
'-D VERSION="${common.version}"' -DVERSION=\"${common.version}\"
-DTOOLDVERSION=\"${common.to_old_version}\"
-DASYNCWEBSERVER_REGEX -DASYNCWEBSERVER_REGEX
-DCORE_DEBUG_LEVEL=1 #-DCORE_DEBUG_LEVEL=3
-DCONFIG_ARDUHAL_LOG_COLORS=1 -DCONFIG_ARDUHAL_LOG_COLORS=1
-DOTA_DEBUG=1 #-DOTA_DEBUG=1
-DARDUINO_RUNNING_CORE=1
-DARDUINO_EVENT_RUNNING_CORE=1
-DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1 -DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1
-DCONFIG_ESP32_PANIC_PRINT_REBOOT -DBOOT_APP_PARTITION_OTA_0=1
-DCONFIG_ARDUINO_OTA_READSIZE=1024 -DCONFIG_LWIP_TCP_MSL=60000
-DCONFIG_ASYNC_TCP_RUNNING_CORE=1 -DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096
-DCONFIG_ASYNC_TCP_USE_WDT=0 -DCONFIG_LWIP_MAX_ACTIVE_TCP=16
-DCONFIG_LWIP_TCP_MSS=1460
-DOTA_PARTITION_SUBTYPE=0x10
-DPARTITION_TABLE_OFFSET=0x8000
-DPARTITION_TABLE_SIZE=0x1000
extra_scripts = extra_scripts =
scripts/extra_script.py scripts/extra_script.py
pre:scripts/pre_build.py ; wird zuerst ausgeführt ${env:buildfs.extra_scripts}
pre:scripts/pre_spiffs.py ; wird als zweites ausgeführt
pre:scripts/combine_html.py ; wird als drittes ausgeführt
scripts/gzip_files.py
; Remove or comment out the targets line [env:buildfs]
;targets = buildfs, build extra_scripts =
pre:scripts/combine_html.py ; Combine header with HTML files
scripts/gzip_files.py ; Compress files for SPIFFS
; Add a custom target to build both
[platformio] [platformio]
default_envs = esp32dev default_envs = esp32dev

View File

@ -1,7 +1,39 @@
Import("env") Import("env")
board_config = env.BoardConfig()
# Calculate SPIFFS size based on partition table
SPIFFS_START = 0x310000 # From partitions.csv
SPIFFS_SIZE = 0xE0000 # From partitions.csv
SPIFFS_PAGE = 256
SPIFFS_BLOCK = 4096
env.Replace(
MKSPIFFSTOOL="mkspiffs",
SPIFFSBLOCKSZ=SPIFFS_BLOCK,
SPIFFSBLOCKSIZE=SPIFFS_BLOCK,
SPIFFSSTART=SPIFFS_START,
SPIFFSEND=SPIFFS_START + SPIFFS_SIZE,
SPIFFSPAGESZ=SPIFFS_PAGE,
SPIFFSSIZE=SPIFFS_SIZE
)
# Wiederverwendung der replace_version Funktion # Wiederverwendung der replace_version Funktion
exec(open("./scripts/pre_build.py").read()) exec(open("./scripts/pre_build.py").read())
# Bind to SPIFFS build # Bind to SPIFFS build
env.AddPreAction("buildfs", replace_version) env.AddPreAction("buildfs", replace_version)
import os
import shutil
from SCons.Script import DefaultEnvironment
env = DefaultEnvironment()
# Format SPIFFS partition before uploading new files
spiffs_dir = os.path.join(env.subst("$BUILD_DIR"), "spiffs")
if os.path.exists(spiffs_dir):
shutil.rmtree(spiffs_dir)
os.makedirs(spiffs_dir)
print("SPIFFS partition formatted.")

View File

@ -64,29 +64,10 @@ def get_changes_from_git():
return changes return changes
def push_changes(version):
"""Push changes to upstream"""
try:
# Stage the CHANGELOG.md
subprocess.run(['git', 'add', 'CHANGELOG.md'], check=True)
# Commit the changelog
commit_msg = f"docs: update changelog for version {version}"
subprocess.run(['git', 'commit', '-m', commit_msg], check=True)
# Push to origin (local)
subprocess.run(['git', 'push', 'origin'], check=True)
print("Successfully pushed to origin")
except subprocess.CalledProcessError as e:
print(f"Error during git operations: {e}")
return False
return True
def update_changelog(): def update_changelog():
print("Starting changelog update...") # Add this line print("Starting changelog update...")
version = get_version() version = get_version()
print(f"Current version: {version}") # Add this line print(f"Current version: {version}")
today = datetime.now().strftime('%Y-%m-%d') today = datetime.now().strftime('%Y-%m-%d')
script_dir = os.path.dirname(os.path.abspath(__file__)) script_dir = os.path.dirname(os.path.abspath(__file__))
@ -111,7 +92,7 @@ def update_changelog():
if not os.path.exists(changelog_path): if not os.path.exists(changelog_path):
with open(changelog_path, 'w') as f: with open(changelog_path, 'w') as f:
f.write(f"# Changelog\n\n{changelog_entry}") f.write(f"# Changelog\n\n{changelog_entry}")
push_changes(version) print(f"Created new changelog file with version {version}")
else: else:
with open(changelog_path, 'r') as f: with open(changelog_path, 'r') as f:
content = f.read() content = f.read()
@ -120,9 +101,30 @@ def update_changelog():
updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}") updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}")
with open(changelog_path, 'w') as f: with open(changelog_path, 'w') as f:
f.write(updated_content) f.write(updated_content)
push_changes(version) print(f"Added new version {version} to changelog")
else: else:
print(f"Version {version} already exists in changelog") # Version existiert bereits, aktualisiere die bestehenden Einträge
version_pattern = f"## \\[{version}\\] - \\d{{4}}-\\d{{2}}-\\d{{2}}"
next_version_pattern = "## \\[.*?\\] - \\d{4}-\\d{2}-\\d{2}"
# Finde den Start der aktuellen Version
version_match = re.search(version_pattern, content)
if version_match:
version_start = version_match.start()
# Suche nach der nächsten Version
next_version_match = re.search(next_version_pattern, content[version_start + 1:])
if next_version_match:
# Ersetze den Inhalt zwischen aktueller und nächster Version
next_version_pos = version_start + 1 + next_version_match.start()
updated_content = content[:version_start] + changelog_entry + content[next_version_pos:]
else:
# Wenn keine nächste Version existiert, ersetze bis zum Ende
updated_content = content[:version_start] + changelog_entry + "\n"
with open(changelog_path, 'w') as f:
f.write(updated_content)
print(f"Updated entries for version {version}")
if __name__ == "__main__": if __name__ == "__main__":
update_changelog() update_changelog()

View File

@ -5,41 +5,20 @@
bool spoolman_connected = false; bool spoolman_connected = false;
String spoolmanUrl = ""; String spoolmanUrl = "";
bool octoEnabled = false;
String octoUrl = "";
String octoToken = "";
struct SendToApiParams { struct SendToApiParams {
String httpType; String httpType;
String spoolsUrl; String spoolsUrl;
String updatePayload; String updatePayload;
String octoToken;
}; };
/* JsonDocument fetchSingleSpoolInfo(int spoolId) {
// Spoolman Data
{
"version":"1.0",
"protocol":"openspool",
"color_hex":"AF7933",
"type":"ABS",
"min_temp":175,
"max_temp":275,
"brand":"Overture"
}
// FilaMan Data
{
"version":"1.0",
"protocol":"openspool",
"color_hex":"AF7933",
"type":"ABS",
"min_temp":175,
"max_temp":275,
"brand":"Overture",
"sm_id":
}
*/
JsonDocument fetchSpoolsForWebsite() {
HTTPClient http; HTTPClient http;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool"; String spoolsUrl = spoolmanUrl + apiUrl + "/spool/" + spoolId;
Serial.print("Rufe Spool-Daten von: "); Serial.print("Rufe Spool-Daten von: ");
Serial.println(spoolsUrl); Serial.println(spoolsUrl);
@ -56,84 +35,45 @@ JsonDocument fetchSpoolsForWebsite() {
Serial.print("Fehler beim Parsen der JSON-Antwort: "); Serial.print("Fehler beim Parsen der JSON-Antwort: ");
Serial.println(error.c_str()); Serial.println(error.c_str());
} else { } else {
JsonArray spools = doc.as<JsonArray>(); String filamentType = doc["filament"]["material"].as<String>();
JsonArray filteredSpools = filteredDoc.to<JsonArray>(); String filamentBrand = doc["filament"]["vendor"]["name"].as<String>();
for (JsonObject spool : spools) { int nozzle_temp_min = 0;
JsonObject filteredSpool = filteredSpools.createNestedObject(); int nozzle_temp_max = 0;
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"]; if (doc["filament"]["extra"]["nozzle_temperature"].is<String>()) {
String tempString = doc["filament"]["extra"]["nozzle_temperature"].as<String>();
tempString.replace("[", "");
tempString.replace("]", "");
int commaIndex = tempString.indexOf(',');
if (commaIndex != -1) {
nozzle_temp_min = tempString.substring(0, commaIndex).toInt();
nozzle_temp_max = tempString.substring(commaIndex + 1).toInt();
}
}
JsonObject filament = filteredSpool.createNestedObject("filament"); String filamentColor = doc["filament"]["color_hex"].as<String>();
filament["sm_id"] = spool["id"]; filamentColor.toUpperCase();
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
filament["color_hex"] = spool["filament"]["color_hex"];
filament["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"]; // [190,230]
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
JsonObject vendor = filament.createNestedObject("vendor"); String tray_info_idx = doc["filament"]["extra"]["bambu_idx"].as<String>();
vendor["id"] = spool["filament"]["vendor"]["id"]; tray_info_idx.replace("\"", "");
vendor["name"] = spool["filament"]["vendor"]["name"];
} String cali_idx = doc["filament"]["extra"]["bambu_cali_id"].as<String>(); // "\"153\""
} cali_idx.replace("\"", "");
} else {
Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: "); String bambu_setting_id = doc["filament"]["extra"]["bambu_setting_id"].as<String>(); // "\"PFUSf40e9953b40d3d\""
Serial.println(httpCode); bambu_setting_id.replace("\"", "");
}
http.end(); doc.clear();
return filteredDoc;
}
JsonDocument fetchAllSpoolsInfo() { filteredDoc["color"] = filamentColor;
HTTPClient http; filteredDoc["type"] = filamentType;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool"; filteredDoc["nozzle_temp_min"] = nozzle_temp_min;
filteredDoc["nozzle_temp_max"] = nozzle_temp_max;
Serial.print("Rufe Spool-Daten von: "); filteredDoc["brand"] = filamentBrand;
Serial.println(spoolsUrl); filteredDoc["tray_info_idx"] = tray_info_idx;
filteredDoc["cali_idx"] = cali_idx;
http.begin(spoolsUrl); filteredDoc["bambu_setting_id"] = bambu_setting_id;
int httpCode = http.GET();
JsonDocument filteredDoc;
if (httpCode == HTTP_CODE_OK) {
String payload = http.getString();
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (error) {
Serial.print("Fehler beim Parsen der JSON-Antwort: ");
Serial.println(error.c_str());
} else {
JsonArray spools = doc.as<JsonArray>();
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.createNestedObject();
filteredSpool["price"] = spool["price"];
filteredSpool["remaining_weight"] = spool["remaining_weight"];
filteredSpool["used_weight"] = spool["used_weight"];
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
JsonObject filament = filteredSpool.createNestedObject("filament");
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
filament["density"] = spool["filament"]["density"];
filament["diameter"] = spool["filament"]["diameter"];
filament["spool_weight"] = spool["filament"]["spool_weight"];
filament["color_hex"] = spool["filament"]["color_hex"];
JsonObject vendor = filament.createNestedObject("vendor");
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
JsonObject extra = filament.createNestedObject("extra");
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
}
} }
} else { } else {
Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: "); Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: ");
@ -151,19 +91,21 @@ void sendToApi(void *parameter) {
String httpType = params->httpType; String httpType = params->httpType;
String spoolsUrl = params->spoolsUrl; String spoolsUrl = params->spoolsUrl;
String updatePayload = params->updatePayload; String updatePayload = params->updatePayload;
String octoToken = params->octoToken;
HTTPClient http; HTTPClient http;
http.begin(spoolsUrl); http.begin(spoolsUrl);
http.addHeader("Content-Type", "application/json"); http.addHeader("Content-Type", "application/json");
if (octoEnabled && octoToken != "") http.addHeader("X-Api-Key", octoToken);
int httpCode = http.PUT(updatePayload); int httpCode = http.PUT(updatePayload);
if (httpType == "PATCH") httpCode = http.PATCH(updatePayload); if (httpType == "PATCH") httpCode = http.PATCH(updatePayload);
if (httpType == "POST") httpCode = http.POST(updatePayload);
if (httpCode == HTTP_CODE_OK) { if (httpCode == HTTP_CODE_OK) {
Serial.println("Gewicht der Spule erfolgreich aktualisiert"); Serial.println("Spoolman erfolgreich aktualisiert");
} else { } else {
Serial.println("Fehler beim Aktualisieren des Gewichts der Spule"); Serial.println("Fehler beim Senden an Spoolman!");
oledShowMessage("Spoolman update failed"); oledShowMessage("Spoolman update failed");
vTaskDelay(2000 / portTICK_PERIOD_MS); vTaskDelay(2000 / portTICK_PERIOD_MS);
} }
@ -186,7 +128,7 @@ bool updateSpoolTagId(String uidString, const char* payload) {
} }
// Überprüfe, ob die erforderlichen Felder vorhanden sind // Überprüfe, ob die erforderlichen Felder vorhanden sind
if (!doc.containsKey("sm_id") || doc["sm_id"] == "") { if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") {
Serial.println("Keine Spoolman-ID gefunden."); Serial.println("Keine Spoolman-ID gefunden.");
return false; return false;
} }
@ -262,6 +204,89 @@ uint8_t updateSpoolWeight(String spoolId, uint16_t weight) {
return 1; return 1;
} }
bool updateSpoolOcto(int spoolId) {
String spoolsUrl = octoUrl + "/plugin/Spoolman/selectSpool";
Serial.print("Update Spule in Octoprint mit URL: ");
Serial.println(spoolsUrl);
JsonDocument updateDoc;
updateDoc["spool_id"] = spoolId;
updateDoc["tool"] = "tool0";
String updatePayload;
serializeJson(updateDoc, updatePayload);
Serial.print("Update Payload: ");
Serial.println(updatePayload);
SendToApiParams* params = new SendToApiParams();
if (params == nullptr) {
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
return false;
}
params->httpType = "POST";
params->spoolsUrl = spoolsUrl;
params->updatePayload = updatePayload;
params->octoToken = octoToken;
// Erstelle die Task
BaseType_t result = xTaskCreate(
sendToApi, // Task-Funktion
"SendToApiTask", // Task-Name
4096, // Stackgröße in Bytes
(void*)params, // Parameter
0, // Priorität
NULL // Task-Handle (nicht benötigt)
);
return true;
}
bool updateSpoolBambuData(String payload) {
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (error) {
Serial.print("Fehler beim JSON-Parsing: ");
Serial.println(error.c_str());
return false;
}
String spoolsUrl = spoolmanUrl + apiUrl + "/filament/" + doc["filament_id"].as<String>();
Serial.print("Update Spule mit URL: ");
Serial.println(spoolsUrl);
JsonDocument updateDoc;
updateDoc["extra"]["bambu_setting_id"] = "\"" + doc["setting_id"].as<String>() + "\"";
updateDoc["extra"]["bambu_cali_id"] = "\"" + doc["cali_idx"].as<String>() + "\"";
updateDoc["extra"]["bambu_idx"] = "\"" + doc["tray_info_idx"].as<String>() + "\"";
updateDoc["extra"]["nozzle_temperature"] = "[" + doc["temp_min"].as<String>() + "," + doc["temp_max"].as<String>() + "]";
String updatePayload;
serializeJson(updateDoc, updatePayload);
Serial.print("Update Payload: ");
Serial.println(updatePayload);
SendToApiParams* params = new SendToApiParams();
if (params == nullptr) {
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
return false;
}
params->httpType = "PATCH";
params->spoolsUrl = spoolsUrl;
params->updatePayload = updatePayload;
// Erstelle die Task
BaseType_t result = xTaskCreate(
sendToApi, // Task-Funktion
"SendToApiTask", // Task-Name
4096, // Stackgröße in Bytes
(void*)params, // Parameter
0, // Priorität
NULL // Task-Handle (nicht benötigt)
);
return true;
}
// #### Spoolman init // #### Spoolman init
bool checkSpoolmanExtraFields() { bool checkSpoolmanExtraFields() {
HTTPClient http; HTTPClient http;
@ -368,7 +393,7 @@ bool checkSpoolmanExtraFields() {
for (uint8_t s = 0; s < extraLength; s++) { for (uint8_t s = 0; s < extraLength; s++) {
bool found = false; bool found = false;
for (JsonObject field : doc.as<JsonArray>()) { for (JsonObject field : doc.as<JsonArray>()) {
if (field.containsKey("key") && field["key"] == extraFields[s]) { if (field["key"].is<String>() && field["key"] == extraFields[s]) {
Serial.println("Feld gefunden: " + extraFields[s]); Serial.println("Feld gefunden: " + extraFields[s]);
found = true; found = true;
break; break;
@ -403,12 +428,13 @@ bool checkSpoolmanExtraFields() {
} }
} }
} }
http.end();
} }
Serial.println("-------- ENDE Prüfe Felder --------"); Serial.println("-------- ENDE Prüfe Felder --------");
Serial.println(); Serial.println();
http.end();
return true; return true;
} }
@ -430,7 +456,7 @@ bool checkSpoolmanInstance(const String& url) {
String payload = http.getString(); String payload = http.getString();
JsonDocument doc; JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload); DeserializationError error = deserializeJson(doc, payload);
if (!error && doc.containsKey("status")) { if (!error && doc["status"].is<String>()) {
const char* status = doc["status"]; const char* status = doc["status"];
http.end(); http.end();
@ -452,24 +478,38 @@ bool checkSpoolmanInstance(const String& url) {
return false; return false;
} }
bool saveSpoolmanUrl(const String& url) { bool saveSpoolmanUrl(const String& url, bool octoOn, const String& octoWh, const String& octoTk) {
if (!checkSpoolmanInstance(url)) return false; if (!checkSpoolmanInstance(url)) return false;
JsonDocument doc; JsonDocument doc;
doc["url"] = url; doc["url"] = url;
Serial.print("Speichere URL in Datei: "); doc["octoEnabled"] = octoOn;
Serial.println(url); doc["octoUrl"] = octoWh;
doc["octoToken"] = octoTk;
Serial.print("Speichere Spoolman Data in Datei: ");
Serial.println(doc.as<String>());
if (!saveJsonValue("/spoolman_url.json", doc)) { if (!saveJsonValue("/spoolman_url.json", doc)) {
Serial.println("Fehler beim Speichern der Spoolman-URL."); Serial.println("Fehler beim Speichern der Spoolman-URL.");
return false;
} }
spoolmanUrl = url; spoolmanUrl = url;
octoEnabled = octoOn;
octoUrl = octoWh;
octoToken = octoTk;
return true; return true;
} }
String loadSpoolmanUrl() { String loadSpoolmanUrl() {
JsonDocument doc; JsonDocument doc;
if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) { if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) {
octoEnabled = (doc["octoEnabled"].is<bool>()) ? doc["octoEnabled"].as<bool>() : false;
if (octoEnabled && doc["octoToken"].is<String>() && doc["octoUrl"].is<String>())
{
octoUrl = doc["octoUrl"].as<String>();
octoToken = doc["octoToken"].as<String>();
}
return doc["url"].as<String>(); return doc["url"].as<String>();
} }
Serial.println("Keine gültige Spoolman-URL gefunden."); Serial.println("Keine gültige Spoolman-URL gefunden.");

View File

@ -9,16 +9,19 @@
extern bool spoolman_connected; extern bool spoolman_connected;
extern String spoolmanUrl; extern String spoolmanUrl;
extern bool octoEnabled;
extern String octoUrl;
extern String octoToken;
bool checkSpoolmanInstance(const String& url); bool checkSpoolmanInstance(const String& url);
bool saveSpoolmanUrl(const String& url); bool saveSpoolmanUrl(const String& url, bool octoOn, const String& octoWh, const String& octoTk);
String loadSpoolmanUrl(); // Neue Funktion zum Laden der URL String loadSpoolmanUrl(); // Neue Funktion zum Laden der URL
bool checkSpoolmanExtraFields(); // Neue Funktion zum Überprüfen der Extrafelder bool checkSpoolmanExtraFields(); // Neue Funktion zum Überprüfen der Extrafelder
JsonDocument fetchSpoolsForWebsite(); // API-Funktion für die Webseite JsonDocument fetchSingleSpoolInfo(int spoolId); // API-Funktion für die Webseite
JsonDocument fetchAllSpoolsInfo();
void sendAmsData(AsyncWebSocketClient *client); // Neue Funktion zum Senden von AMS-Daten
bool updateSpoolTagId(String uidString, const char* payload); // Neue Funktion zum Aktualisieren eines Spools bool updateSpoolTagId(String uidString, const char* payload); // Neue Funktion zum Aktualisieren eines Spools
uint8_t updateSpoolWeight(String spoolId, uint16_t weight); // Neue Funktion zum Aktualisieren des Gewichts uint8_t updateSpoolWeight(String spoolId, uint16_t weight); // Neue Funktion zum Aktualisieren des Gewichts
bool initSpoolman(); // Neue Funktion zum Initialisieren von Spoolman bool initSpoolman(); // Neue Funktion zum Initialisieren von Spoolman
bool updateSpoolBambuData(String payload); // Neue Funktion zum Aktualisieren der Bambu-Daten
bool updateSpoolOcto(int spoolId); // Neue Funktion zum Aktualisieren der Octo-Daten
#endif #endif

View File

@ -23,14 +23,21 @@ const char* bambu_username = "bblp";
const char* bambu_ip = nullptr; const char* bambu_ip = nullptr;
const char* bambu_accesscode = nullptr; const char* bambu_accesscode = nullptr;
const char* bambu_serialnr = nullptr; const char* bambu_serialnr = nullptr;
String g_bambu_ip = "";
String g_bambu_accesscode = "";
String g_bambu_serialnr = "";
bool bambu_connected = false; bool bambu_connected = false;
bool autoSendToBambu = false;
int autoSetToBambuSpoolId = 0;
// Globale Variablen für AMS-Daten // Globale Variablen für AMS-Daten
int ams_count = 0; int ams_count = 0;
String amsJsonData; // Speichert das fertige JSON für WebSocket-Clients String amsJsonData; // Speichert das fertige JSON für WebSocket-Clients
AMSData ams_data[MAX_AMS]; // Definition des Arrays AMSData ams_data[MAX_AMS]; // Definition des Arrays;
bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode) { bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode, bool autoSend, const String& autoSendTime) {
if (BambuMqttTask) { if (BambuMqttTask) {
vTaskDelete(BambuMqttTask); vTaskDelete(BambuMqttTask);
} }
@ -39,6 +46,8 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
doc["bambu_ip"] = ip; doc["bambu_ip"] = ip;
doc["bambu_accesscode"] = accesscode; doc["bambu_accesscode"] = accesscode;
doc["bambu_serialnr"] = serialnr; doc["bambu_serialnr"] = serialnr;
doc["autoSendToBambu"] = autoSend;
doc["autoSendTime"] = (autoSendTime != "") ? autoSendTime.toInt() : autoSetBambuAmsCounter;
if (!saveJsonValue("/bambu_credentials.json", doc)) { if (!saveJsonValue("/bambu_credentials.json", doc)) {
Serial.println("Fehler beim Speichern der Bambu-Credentials."); Serial.println("Fehler beim Speichern der Bambu-Credentials.");
@ -49,6 +58,8 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
bambu_ip = ip.c_str(); bambu_ip = ip.c_str();
bambu_accesscode = accesscode.c_str(); bambu_accesscode = accesscode.c_str();
bambu_serialnr = serialnr.c_str(); bambu_serialnr = serialnr.c_str();
autoSendToBambu = autoSend;
autoSetBambuAmsCounter = autoSendTime.toInt();
vTaskDelay(100 / portTICK_PERIOD_MS); vTaskDelay(100 / portTICK_PERIOD_MS);
if (!setupMqtt()) return false; if (!setupMqtt()) return false;
@ -58,20 +69,27 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
bool loadBambuCredentials() { bool loadBambuCredentials() {
JsonDocument doc; JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) { if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
// Temporäre Strings für die Werte // Temporäre Strings für die Werte
String ip = doc["bambu_ip"].as<String>(); String ip = doc["bambu_ip"].as<String>();
String code = doc["bambu_accesscode"].as<String>(); String code = doc["bambu_accesscode"].as<String>();
String serial = doc["bambu_serialnr"].as<String>(); String serial = doc["bambu_serialnr"].as<String>();
g_bambu_ip = ip;
g_bambu_accesscode = code;
g_bambu_serialnr = serial;
if (doc["autoSendToBambu"].is<bool>()) autoSendToBambu = doc["autoSendToBambu"].as<bool>();
if (doc["autoSendTime"].is<int>()) autoSetBambuAmsCounter = doc["autoSendTime"].as<int>();
ip.trim(); ip.trim();
code.trim(); code.trim();
serial.trim(); serial.trim();
// Dynamische Speicherallokation für die globalen Pointer // Dynamische Speicherallokation für die globalen Pointer
bambu_ip = strdup(ip.c_str()); bambu_ip = g_bambu_ip.c_str();
bambu_accesscode = strdup(code.c_str()); bambu_accesscode = g_bambu_accesscode.c_str();
bambu_serialnr = strdup(serial.c_str()); bambu_serialnr = g_bambu_serialnr.c_str();
report_topic = "device/" + String(bambu_serialnr) + "/report"; report_topic = "device/" + String(bambu_serialnr) + "/report";
//request_topic = "device/" + String(bambu_serialnr) + "/request"; //request_topic = "device/" + String(bambu_serialnr) + "/request";
@ -81,19 +99,49 @@ bool loadBambuCredentials() {
return false; return false;
} }
String findFilamentIdx(String brand, String type) { struct FilamentResult {
String key;
String type;
};
FilamentResult findFilamentIdx(String brand, String type) {
// JSON-Dokument für die Filament-Daten erstellen // JSON-Dokument für die Filament-Daten erstellen
JsonDocument doc; JsonDocument doc;
// Laden der own_filaments.json
String ownFilament = "";
if (!loadJsonValue("/own_filaments.json", doc))
{
Serial.println("Fehler beim Laden der eigenen Filament-Daten");
}
else
{
// Durchsuche direkt nach dem Type als Schlüssel
if (doc[type].is<String>()) {
ownFilament = doc[type].as<String>();
}
doc.clear();
}
doc.clear();
// Laden der bambu_filaments.json // Laden der bambu_filaments.json
if (!loadJsonValue("/bambu_filaments.json", doc)) { if (!loadJsonValue("/bambu_filaments.json", doc))
{
Serial.println("Fehler beim Laden der Filament-Daten"); Serial.println("Fehler beim Laden der Filament-Daten");
return "GFL99"; // Fallback auf Generic PLA return {"GFL99", "PLA"}; // Fallback auf Generic PLA
} }
// Wenn eigener Typ
if (ownFilament != "")
{
if (doc[ownFilament].is<String>())
{
return {ownFilament, doc[ownFilament].as<String>()};
}
}
// 1. Erst versuchen wir die exakte Brand + Type Kombination zu finden
String searchKey; String searchKey;
// 1. Suche nach Brand + Type Kombination
if (brand == "Bambu" || brand == "Bambulab") { if (brand == "Bambu" || brand == "Bambulab") {
searchKey = "Bambu " + type; searchKey = "Bambu " + type;
} else if (brand == "PolyLite") { } else if (brand == "PolyLite") {
@ -109,23 +157,46 @@ String findFilamentIdx(String brand, String type) {
// Durchsuche alle Einträge nach der Brand + Type Kombination // Durchsuche alle Einträge nach der Brand + Type Kombination
for (JsonPair kv : doc.as<JsonObject>()) { for (JsonPair kv : doc.as<JsonObject>()) {
if (kv.value().as<String>() == searchKey) { if (kv.value().as<String>() == searchKey) {
return kv.key().c_str(); return {kv.key().c_str(), kv.value().as<String>()};
} }
} }
// 2. Wenn nicht gefunden, suche nach Generic + Type // 2. Wenn nicht gefunden, zerlege den type String in Wörter und suche nach jedem Wort
searchKey = "Generic " + type; // Sammle alle vorhandenen Filamenttypen aus der JSON
std::vector<String> knownTypes;
for (JsonPair kv : doc.as<JsonObject>()) { for (JsonPair kv : doc.as<JsonObject>()) {
if (kv.value().as<String>() == searchKey) { String value = kv.value().as<String>();
return kv.key().c_str(); // Extrahiere den Typ ohne Markennamen
if (value.indexOf(" ") != -1) {
value = value.substring(value.indexOf(" ") + 1);
}
if (!value.isEmpty()) {
knownTypes.push_back(value);
}
}
// Zerlege den Input-Type in Wörter
String typeStr = type;
typeStr.trim();
// Durchsuche für jedes bekannte Filament, ob es im Input vorkommt
for (const String& knownType : knownTypes) {
if (typeStr.indexOf(knownType) != -1) {
// Suche nach diesem Typ in der Original-JSON
for (JsonPair kv : doc.as<JsonObject>()) {
String value = kv.value().as<String>();
if (value.indexOf(knownType) != -1) {
return {kv.key().c_str(), knownType};
}
}
} }
} }
// 3. Wenn immer noch nichts gefunden, gebe GFL99 zurück (Generic PLA) // 3. Wenn immer noch nichts gefunden, gebe GFL99 zurück (Generic PLA)
return "GFL99"; return {"GFL99", "PLA"};
} }
bool sendMqttMessage(String payload) { bool sendMqttMessage(const String& payload) {
Serial.println("Sending MQTT message"); Serial.println("Sending MQTT message");
Serial.println(payload); Serial.println(payload);
if (client.publish(report_topic.c_str(), payload.c_str())) if (client.publish(report_topic.c_str(), payload.c_str()))
@ -156,15 +227,22 @@ bool setBambuSpool(String payload) {
int minTemp = doc["nozzle_temp_min"]; int minTemp = doc["nozzle_temp_min"];
int maxTemp = doc["nozzle_temp_max"]; int maxTemp = doc["nozzle_temp_max"];
String type = doc["type"].as<String>(); String type = doc["type"].as<String>();
(type == "PLA+") ? type = "PLA" : type;
String brand = doc["brand"].as<String>(); String brand = doc["brand"].as<String>();
String tray_info_idx = (doc["tray_info_idx"].as<String>() != "-1") ? doc["tray_info_idx"].as<String>() : ""; String tray_info_idx = (doc["tray_info_idx"].as<String>() != "-1") ? doc["tray_info_idx"].as<String>() : "";
if (tray_info_idx == "") tray_info_idx = (brand != "" && type != "") ? findFilamentIdx(brand, type) : ""; if (tray_info_idx == "") {
if (brand != "" && type != "") {
FilamentResult result = findFilamentIdx(brand, type);
tray_info_idx = result.key;
type = result.type; // Aktualisiere den type mit dem gefundenen Basistyp
}
}
String setting_id = doc["bambu_setting_id"].as<String>(); String setting_id = doc["bambu_setting_id"].as<String>();
String cali_idx = doc["cali_idx"].as<String>(); String cali_idx = doc["cali_idx"].as<String>();
doc.clear(); doc.clear();
doc["print"]["sequence_id"] = 0; doc["print"]["sequence_id"] = "0";
doc["print"]["command"] = "ams_filament_setting"; doc["print"]["command"] = "ams_filament_setting";
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255; doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254; doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
@ -172,10 +250,10 @@ bool setBambuSpool(String payload) {
doc["print"]["nozzle_temp_min"] = minTemp; doc["print"]["nozzle_temp_min"] = minTemp;
doc["print"]["nozzle_temp_max"] = maxTemp; doc["print"]["nozzle_temp_max"] = maxTemp;
doc["print"]["tray_type"] = type; doc["print"]["tray_type"] = type;
doc["print"]["cali_idx"] = (cali_idx != "") ? cali_idx : ""; //doc["print"]["cali_idx"] = (cali_idx != "") ? cali_idx : "";
doc["print"]["tray_info_idx"] = tray_info_idx; doc["print"]["tray_info_idx"] = tray_info_idx;
doc["print"]["setting_id"] = setting_id; doc["print"]["setting_id"] = setting_id;
// Serialize the JSON // Serialize the JSON
String output; String output;
serializeJson(doc, output); serializeJson(doc, output);
@ -194,13 +272,13 @@ bool setBambuSpool(String payload) {
if (cali_idx != "") { if (cali_idx != "") {
yield(); yield();
doc["print"]["sequence_id"] = 0; doc["print"]["sequence_id"] = "0";
doc["print"]["command"] = "extrusion_cali_sel"; doc["print"]["command"] = "extrusion_cali_sel";
doc["print"]["filament_id"] = tray_info_idx; doc["print"]["filament_id"] = tray_info_idx;
doc["print"]["nozzle_diameter"] = "0.4"; doc["print"]["nozzle_diameter"] = "0.4";
doc["print"]["cali_idx"] = cali_idx.toInt(); doc["print"]["cali_idx"] = cali_idx.toInt();
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254; doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255; //doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
// Serialize the JSON // Serialize the JSON
String output; String output;
@ -218,44 +296,120 @@ bool setBambuSpool(String payload) {
doc.clear(); doc.clear();
yield(); yield();
} }
/*
if (setting_id != "") {
yield();
doc["print"]["sequence_id"] = 0;
doc["print"]["command"] = "ams_filament_setting";
doc["print"]["nozzle_temp_min"] = minTemp;
doc["print"]["nozzle_temp_max"] = maxTemp;
doc["print"]["setting_id"] = setting_id;
doc["print"]["tray_color"] = color.length() == 8 ? color : color+"FF";
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
doc["print"]["tray_info_idx"] = tray_info_idx;
doc["print"]["tray_type"] = type;
// Serialize the JSON return true;
String output; }
serializeJson(doc, output);
if (sendMqttMessage(output)) { void autoSetSpool(int spoolId, uint8_t trayId) {
Serial.println("Filament Setting successfully set"); // wenn neue spule erkannt und autoSetToBambu > 0
JsonDocument spoolInfo = fetchSingleSpoolInfo(spoolId);
if (!spoolInfo.isNull())
{
// AMS und TRAY id ergänzen
spoolInfo["amsId"] = 0;
spoolInfo["trayId"] = trayId;
Serial.println("Auto set spool");
Serial.println(spoolInfo.as<String>());
setBambuSpool(spoolInfo.as<String>());
oledShowMessage("Spool set");
}
// id wieder zurücksetzen damit abgeschlossen
autoSetToBambuSpoolId = 0;
}
void updateAmsWsData(JsonDocument& doc, JsonArray& amsArray, int& ams_count, JsonObject& vtTray) {
// Fortfahren mit der bestehenden Verarbeitung, da Änderungen gefunden wurden
ams_count = amsArray.size();
for (int i = 0; i < ams_count && i < 16; i++) {
JsonObject amsObj = amsArray[i];
JsonArray trayArray = amsObj["tray"].as<JsonArray>();
ams_data[i].ams_id = i; // Setze die AMS-ID
for (int j = 0; j < trayArray.size() && j < 4; j++) { // Annahme: Maximal 4 Trays pro AMS
JsonObject trayObj = trayArray[j];
ams_data[i].trays[j].id = trayObj["id"].as<uint8_t>();
ams_data[i].trays[j].tray_info_idx = trayObj["tray_info_idx"].as<String>();
ams_data[i].trays[j].tray_type = trayObj["tray_type"].as<String>();
ams_data[i].trays[j].tray_sub_brands = trayObj["tray_sub_brands"].as<String>();
ams_data[i].trays[j].tray_color = trayObj["tray_color"].as<String>();
ams_data[i].trays[j].nozzle_temp_min = trayObj["nozzle_temp_min"].as<int>();
ams_data[i].trays[j].nozzle_temp_max = trayObj["nozzle_temp_max"].as<int>();
if (trayObj["tray_type"].as<String>() == "") ams_data[i].trays[j].setting_id = "";
ams_data[i].trays[j].cali_idx = trayObj["cali_idx"].as<String>();
}
}
// Setze ams_count auf die Anzahl der normalen AMS
ams_count = amsArray.size();
// Wenn externe Spule vorhanden, füge sie hinzu
if (doc["print"]["vt_tray"].is<JsonObject>()) {
//JsonObject vtTray = doc["print"]["vt_tray"];
int extIdx = ams_count; // Index für externe Spule
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
ams_data[extIdx].trays[0].id = 254; // Spezielle ID für externes Tray
ams_data[extIdx].trays[0].tray_info_idx = vtTray["tray_info_idx"].as<String>();
ams_data[extIdx].trays[0].tray_type = vtTray["tray_type"].as<String>();
ams_data[extIdx].trays[0].tray_sub_brands = vtTray["tray_sub_brands"].as<String>();
ams_data[extIdx].trays[0].tray_color = vtTray["tray_color"].as<String>();
ams_data[extIdx].trays[0].nozzle_temp_min = vtTray["nozzle_temp_min"].as<int>();
ams_data[extIdx].trays[0].nozzle_temp_max = vtTray["nozzle_temp_max"].as<int>();
if (doc["print"]["vt_tray"]["tray_type"].as<String>() != "")
{
//ams_data[extIdx].trays[0].setting_id = vtTray["setting_id"].as<String>();
ams_data[extIdx].trays[0].cali_idx = vtTray["cali_idx"].as<String>();
} }
else else
{ {
Serial.println("Failed to set Filament setting"); ams_data[extIdx].trays[0].setting_id = "";
return false; ams_data[extIdx].trays[0].cali_idx = "";
} }
ams_count++; // Erhöhe ams_count für die externe Spule
doc.clear();
yield();
} }
*/
return true; // Erstelle JSON für WebSocket-Clients
JsonDocument wsDoc;
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int i = 0; i < ams_count; i++) {
JsonObject amsObj = wsArray.add<JsonObject>();
amsObj["ams_id"] = ams_data[i].ams_id;
JsonArray trays = amsObj["tray"].to<JsonArray>();
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
for (int j = 0; j < maxTrays; j++) {
JsonObject trayObj = trays.add<JsonObject>();
trayObj["id"] = ams_data[i].trays[j].id;
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
trayObj["tray_sub_brands"] = ams_data[i].trays[j].tray_sub_brands;
trayObj["tray_color"] = ams_data[i].trays[j].tray_color;
trayObj["nozzle_temp_min"] = ams_data[i].trays[j].nozzle_temp_min;
trayObj["nozzle_temp_max"] = ams_data[i].trays[j].nozzle_temp_max;
trayObj["setting_id"] = ams_data[i].trays[j].setting_id;
trayObj["cali_idx"] = ams_data[i].trays[j].cali_idx;
}
}
serializeJson(wsArray, amsJsonData);
wsDoc.clear();
Serial.println("AMS data updated");
sendAmsData(nullptr);
} }
// init // init
void mqtt_callback(char* topic, byte* payload, unsigned int length) { void mqtt_callback(char* topic, byte* payload, unsigned int length) {
String message; String message;
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
message += (char)payload[i]; message += (char)payload[i];
} }
@ -263,21 +417,25 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
// JSON-Dokument parsen // JSON-Dokument parsen
JsonDocument doc; JsonDocument doc;
DeserializationError error = deserializeJson(doc, message); DeserializationError error = deserializeJson(doc, message);
if (error) { message = "";
if (error)
{
Serial.print("Fehler beim Parsen des JSON: "); Serial.print("Fehler beim Parsen des JSON: ");
Serial.println(error.c_str()); Serial.println(error.c_str());
return; return;
} }
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren // Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
if (doc["print"].containsKey("upgrade_state")) { if (doc["print"]["upgrade_state"].is<JsonObject>() || (doc["print"]["command"].is<String>() && doc["print"]["command"] == "push_status"))
{
// Prüfen ob AMS-Daten vorhanden sind // Prüfen ob AMS-Daten vorhanden sind
if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) { if (!doc["print"]["ams"].is<JsonObject>() || !doc["print"]["ams"]["ams"].is<JsonArray>())
{
return; return;
} }
JsonArray amsArray = doc["print"]["ams"]["ams"].as<JsonArray>(); JsonArray amsArray = doc["print"]["ams"]["ams"].as<JsonArray>();
// Prüfe ob sich die AMS-Daten geändert haben // Prüfe ob sich die AMS-Daten geändert haben
bool hasChanges = false; bool hasChanges = false;
@ -304,154 +462,81 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
// Vergleiche die Trays // Vergleiche die Trays
for (int j = 0; j < trayArray.size() && j < 4 && !hasChanges; j++) { for (int j = 0; j < trayArray.size() && j < 4 && !hasChanges; j++) {
JsonObject trayObj = trayArray[j]; JsonObject trayObj = trayArray[j];
if (trayObj["tray_type"].as<String>() == "") ams_data[storedIndex].trays[j].setting_id = "";
if (trayObj["setting_id"].isNull()) trayObj["setting_id"] = "";
if (trayObj["tray_info_idx"].as<String>() != ams_data[storedIndex].trays[j].tray_info_idx || if (trayObj["tray_info_idx"].as<String>() != ams_data[storedIndex].trays[j].tray_info_idx ||
trayObj["tray_type"].as<String>() != ams_data[storedIndex].trays[j].tray_type || trayObj["tray_type"].as<String>() != ams_data[storedIndex].trays[j].tray_type ||
trayObj["tray_color"].as<String>() != ams_data[storedIndex].trays[j].tray_color || trayObj["tray_color"].as<String>() != ams_data[storedIndex].trays[j].tray_color ||
(trayObj["setting_id"].as<String>() != "" && trayObj["setting_id"].as<String>() != ams_data[storedIndex].trays[j].setting_id) ||
trayObj["cali_idx"].as<String>() != ams_data[storedIndex].trays[j].cali_idx) { trayObj["cali_idx"].as<String>() != ams_data[storedIndex].trays[j].cali_idx) {
hasChanges = true; hasChanges = true;
if (autoSendToBambu && autoSetToBambuSpoolId > 0 && hasChanges)
{
autoSetSpool(autoSetToBambuSpoolId, ams_data[storedIndex].trays[j].id);
}
break; break;
} }
} }
} }
// Prüfe die externe Spule // Prüfe die externe Spule
if (!hasChanges && doc["print"].containsKey("vt_tray")) { JsonObject vtTray = doc["print"]["vt_tray"];
JsonObject vtTray = doc["print"]["vt_tray"]; if (doc["print"]["vt_tray"].is<JsonObject>()) {
bool foundExternal = false;
for (int i = 0; i < ams_count; i++) { for (int i = 0; i < ams_count; i++) {
if (ams_data[i].ams_id == 255) { if (ams_data[i].ams_id == 255) {
foundExternal = true; if (vtTray["tray_type"].as<String>() == "") ams_data[i].trays[0].setting_id = "";
if (vtTray["setting_id"].isNull()) vtTray["setting_id"] = "";
if (vtTray["tray_info_idx"].as<String>() != ams_data[i].trays[0].tray_info_idx || if (vtTray["tray_info_idx"].as<String>() != ams_data[i].trays[0].tray_info_idx ||
vtTray["tray_type"].as<String>() != ams_data[i].trays[0].tray_type || vtTray["tray_type"].as<String>() != ams_data[i].trays[0].tray_type ||
vtTray["tray_color"].as<String>() != ams_data[i].trays[0].tray_color || vtTray["tray_color"].as<String>() != ams_data[i].trays[0].tray_color ||
vtTray["cali_idx"].as<String>() != ams_data[i].trays[0].cali_idx) { (vtTray["setting_id"].as<String>() != "" && vtTray["setting_id"].as<String>() != ams_data[i].trays[0].setting_id) ||
(vtTray["tray_type"].as<String>() != "" && vtTray["cali_idx"].as<String>() != ams_data[i].trays[0].cali_idx)) {
hasChanges = true; hasChanges = true;
if (autoSendToBambu && autoSetToBambuSpoolId > 0 && hasChanges)
{
autoSetSpool(autoSetToBambuSpoolId, 254);
}
} }
break; break;
} }
} }
if (!foundExternal) hasChanges = true;
} }
if (!hasChanges) return; if (!hasChanges) return;
// Fortfahren mit der bestehenden Verarbeitung, da Änderungen gefunden wurden updateAmsWsData(doc, amsArray, ams_count, vtTray);
ams_count = amsArray.size();
for (int i = 0; i < ams_count && i < 16; i++) {
JsonObject amsObj = amsArray[i];
JsonArray trayArray = amsObj["tray"].as<JsonArray>();
ams_data[i].ams_id = i; // Setze die AMS-ID
for (int j = 0; j < trayArray.size() && j < 4; j++) { // Annahme: Maximal 4 Trays pro AMS
JsonObject trayObj = trayArray[j];
ams_data[i].trays[j].id = trayObj["id"].as<uint8_t>();
ams_data[i].trays[j].tray_info_idx = trayObj["tray_info_idx"].as<String>();
ams_data[i].trays[j].tray_type = trayObj["tray_type"].as<String>();
ams_data[i].trays[j].tray_sub_brands = trayObj["tray_sub_brands"].as<String>();
ams_data[i].trays[j].tray_color = trayObj["tray_color"].as<String>();
ams_data[i].trays[j].nozzle_temp_min = trayObj["nozzle_temp_min"].as<int>();
ams_data[i].trays[j].nozzle_temp_max = trayObj["nozzle_temp_max"].as<int>();
ams_data[i].trays[j].setting_id = trayObj["setting_id"].as<String>();
ams_data[i].trays[j].cali_idx = trayObj["cali_idx"].as<String>();
}
}
// Setze ams_count auf die Anzahl der normalen AMS
ams_count = amsArray.size();
// Wenn externe Spule vorhanden, füge sie hinzu
if (doc["print"].containsKey("vt_tray")) {
JsonObject vtTray = doc["print"]["vt_tray"];
int extIdx = ams_count; // Index für externe Spule
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
ams_data[extIdx].trays[0].id = 254; // Spezielle ID für externes Tray
ams_data[extIdx].trays[0].tray_info_idx = vtTray["tray_info_idx"].as<String>();
ams_data[extIdx].trays[0].tray_type = vtTray["tray_type"].as<String>();
ams_data[extIdx].trays[0].tray_sub_brands = vtTray["tray_sub_brands"].as<String>();
ams_data[extIdx].trays[0].tray_color = vtTray["tray_color"].as<String>();
ams_data[extIdx].trays[0].nozzle_temp_min = vtTray["nozzle_temp_min"].as<int>();
ams_data[extIdx].trays[0].nozzle_temp_max = vtTray["nozzle_temp_max"].as<int>();
ams_data[extIdx].trays[0].setting_id = vtTray["setting_id"].as<String>();
ams_data[extIdx].trays[0].cali_idx = vtTray["cali_idx"].as<String>();
ams_count++; // Erhöhe ams_count für die externe Spule
}
// Sende die aktualisierten AMS-Daten
//sendAmsData(nullptr);
// Erstelle JSON für WebSocket-Clients
JsonDocument wsDoc;
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int i = 0; i < ams_count; i++) {
JsonObject amsObj = wsArray.createNestedObject();
amsObj["ams_id"] = ams_data[i].ams_id;
JsonArray trays = amsObj.createNestedArray("tray");
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
for (int j = 0; j < maxTrays; j++) {
JsonObject trayObj = trays.createNestedObject();
trayObj["id"] = ams_data[i].trays[j].id;
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
trayObj["tray_sub_brands"] = ams_data[i].trays[j].tray_sub_brands;
trayObj["tray_color"] = ams_data[i].trays[j].tray_color;
trayObj["nozzle_temp_min"] = ams_data[i].trays[j].nozzle_temp_min;
trayObj["nozzle_temp_max"] = ams_data[i].trays[j].nozzle_temp_max;
trayObj["setting_id"] = ams_data[i].trays[j].setting_id;
trayObj["cali_idx"] = ams_data[i].trays[j].cali_idx;
}
}
serializeJson(wsArray, amsJsonData);
sendAmsData(nullptr);
} }
// Neue Bedingung für ams_filament_setting // Neue Bedingung für ams_filament_setting
else if (doc["print"]["command"] == "ams_filament_setting") { if (doc["print"]["command"] == "ams_filament_setting") {
int amsId = doc["print"]["ams_id"].as<int>(); int amsId = doc["print"]["ams_id"].as<int>();
int trayId = doc["print"]["tray_id"].as<int>(); int trayId = doc["print"]["tray_id"].as<int>();
String settingId = doc["print"]["setting_id"].as<String>(); String settingId = (doc["print"]["setting_id"].is<String>()) ? doc["print"]["setting_id"].as<String>() : "";
// Finde das entsprechende AMS und Tray // Finde das entsprechende AMS und Tray
for (int i = 0; i < ams_count; i++) { for (int i = 0; i < ams_count; i++) {
if (ams_data[i].ams_id == amsId) { if (ams_data[i].ams_id == amsId) {
// Update setting_id im entsprechenden Tray if (trayId == 254)
ams_data[i].trays[trayId].setting_id = settingId; {
// Suche AMS mit ID 255 (externe Spule)
// Erstelle neues JSON für WebSocket-Clients for (int j = 0; j < ams_count; j++) {
JsonDocument wsDoc; if (ams_data[j].ams_id == 255) {
JsonArray wsArray = wsDoc.to<JsonArray>(); ams_data[j].trays[0].setting_id = settingId;
break;
for (int j = 0; j < ams_count; j++) { }
JsonObject amsObj = wsArray.createNestedObject();
amsObj["ams_id"] = ams_data[j].ams_id;
JsonArray trays = amsObj.createNestedArray("tray");
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
for (int k = 0; k < maxTrays; k++) {
JsonObject trayObj = trays.createNestedObject();
trayObj["id"] = ams_data[j].trays[k].id;
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;
trayObj["tray_sub_brands"] = ams_data[j].trays[k].tray_sub_brands;
trayObj["tray_color"] = ams_data[j].trays[k].tray_color;
trayObj["nozzle_temp_min"] = ams_data[j].trays[k].nozzle_temp_min;
trayObj["nozzle_temp_max"] = ams_data[j].trays[k].nozzle_temp_max;
trayObj["setting_id"] = ams_data[j].trays[k].setting_id;
trayObj["cali_idx"] = ams_data[j].trays[k].cali_idx;
} }
} }
else
// Aktualisiere das globale amsJsonData {
amsJsonData = ""; ams_data[i].trays[trayId].setting_id = settingId;
serializeJson(wsArray, amsJsonData); }
// Sende an WebSocket Clients // Sende an WebSocket Clients
Serial.println("Filament setting updated");
sendAmsData(nullptr); sendAmsData(nullptr);
break; break;
} }
@ -461,15 +546,16 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
void reconnect() { void reconnect() {
// Loop until we're reconnected // Loop until we're reconnected
uint8_t retries = 0;
while (!client.connected()) { while (!client.connected()) {
Serial.print("Attempting MQTT connection..."); Serial.println("Attempting MQTT re/connection...");
bambu_connected = false; bambu_connected = false;
oledShowTopRow(); oledShowTopRow();
// Attempt to connect // Attempt to connect
if (client.connect(bambu_serialnr, bambu_username, bambu_accesscode)) { if (client.connect(bambu_serialnr, bambu_username, bambu_accesscode)) {
Serial.println("... re-connected"); Serial.println("MQTT re/connected");
// ... and resubscribe
client.subscribe(report_topic.c_str()); client.subscribe(report_topic.c_str());
bambu_connected = true; bambu_connected = true;
oledShowTopRow(); oledShowTopRow();
@ -479,14 +565,23 @@ void reconnect() {
Serial.println(" try again in 5 seconds"); Serial.println(" try again in 5 seconds");
bambu_connected = false; bambu_connected = false;
oledShowTopRow(); oledShowTopRow();
// Wait 5 seconds before retrying
yield(); yield();
vTaskDelay(5000 / portTICK_PERIOD_MS); vTaskDelay(5000 / portTICK_PERIOD_MS);
if (retries > 5) {
Serial.println("Disable Bambu MQTT Task after 5 retries");
//vTaskSuspend(BambuMqttTask);
vTaskDelete(BambuMqttTask);
break;
}
retries++;
} }
} }
} }
void mqtt_loop(void * parameter) { void mqtt_loop(void * parameter) {
Serial.println("Bambu MQTT Task gestartet");
for(;;) { for(;;) {
if (pauseBambuMqttTask) { if (pauseBambuMqttTask) {
vTaskDelay(10000); vTaskDelay(10000);
@ -500,6 +595,7 @@ void mqtt_loop(void * parameter) {
} }
client.loop(); client.loop();
yield(); yield();
esp_task_wdt_reset();
vTaskDelay(100); vTaskDelay(100);
} }
} }
@ -507,7 +603,6 @@ void mqtt_loop(void * parameter) {
bool setupMqtt() { bool setupMqtt() {
// Wenn Bambu Daten vorhanden // Wenn Bambu Daten vorhanden
bool success = loadBambuCredentials(); bool success = loadBambuCredentials();
vTaskDelay(100 / portTICK_PERIOD_MS);
if (!success) { if (!success) {
Serial.println("Failed to load Bambu credentials"); Serial.println("Failed to load Bambu credentials");
@ -540,7 +635,7 @@ bool setupMqtt() {
xTaskCreatePinnedToCore( xTaskCreatePinnedToCore(
mqtt_loop, /* Function to implement the task */ mqtt_loop, /* Function to implement the task */
"BambuMqtt", /* Name of the task */ "BambuMqtt", /* Name of the task */
10000, /* Stack size in words */ 8192, /* Stack size in words */
NULL, /* Task input parameter */ NULL, /* Task input parameter */
mqttTaskPrio, /* Priority of the task */ mqttTaskPrio, /* Priority of the task */
&BambuMqttTask, /* Task handle. */ &BambuMqttTask, /* Task handle. */
@ -571,6 +666,7 @@ bool setupMqtt() {
void bambu_restart() { void bambu_restart() {
if (BambuMqttTask) { if (BambuMqttTask) {
vTaskDelete(BambuMqttTask); vTaskDelete(BambuMqttTask);
delay(10);
} }
setupMqtt(); setupMqtt();
} }

View File

@ -28,12 +28,15 @@ extern bool bambu_connected;
extern int ams_count; extern int ams_count;
extern AMSData ams_data[MAX_AMS]; extern AMSData ams_data[MAX_AMS];
extern bool autoSendToBambu;
extern int autoSetToBambuSpoolId;
bool loadBambuCredentials(); bool loadBambuCredentials();
bool saveBambuCredentials(const String& bambu_ip, const String& bambu_serialnr, const String& bambu_accesscode); bool saveBambuCredentials(const String& bambu_ip, const String& bambu_serialnr, const String& bambu_accesscode, const bool autoSend, const String& autoSendTime);
bool setupMqtt(); bool setupMqtt();
void mqtt_loop(void * parameter); void mqtt_loop(void * parameter);
bool setBambuSpool(String payload); bool setBambuSpool(String payload);
void bambu_restart(); void bambu_restart();
extern TaskHandle_t BambuMqttTask;
#endif #endif

View File

@ -1,7 +1,8 @@
#include "commonFS.h" #include "commonFS.h"
#include <LittleFS.h>
bool saveJsonValue(const char* filename, const JsonDocument& doc) { bool saveJsonValue(const char* filename, const JsonDocument& doc) {
File file = SPIFFS.open(filename, "w"); File file = LittleFS.open(filename, "w");
if (!file) { if (!file) {
Serial.print("Fehler beim Öffnen der Datei zum Schreiben: "); Serial.print("Fehler beim Öffnen der Datei zum Schreiben: ");
Serial.println(filename); Serial.println(filename);
@ -19,7 +20,7 @@ bool saveJsonValue(const char* filename, const JsonDocument& doc) {
} }
bool loadJsonValue(const char* filename, JsonDocument& doc) { bool loadJsonValue(const char* filename, JsonDocument& doc) {
File file = SPIFFS.open(filename, "r"); File file = LittleFS.open(filename, "r");
if (!file) { if (!file) {
Serial.print("Fehler beim Öffnen der Datei zum Lesen: "); Serial.print("Fehler beim Öffnen der Datei zum Lesen: ");
Serial.println(filename); Serial.println(filename);
@ -35,23 +36,12 @@ bool loadJsonValue(const char* filename, JsonDocument& doc) {
return true; return true;
} }
bool initializeSPIFFS() { void initializeFileSystem() {
// Erster Versuch if (!LittleFS.begin(true)) {
if (SPIFFS.begin(true)) { Serial.println("LittleFS Mount Failed");
Serial.println("SPIFFS mounted successfully."); return;
return true;
} }
Serial.printf("LittleFS Total: %u bytes\n", LittleFS.totalBytes());
// Formatierung versuchen Serial.printf("LittleFS Used: %u bytes\n", LittleFS.usedBytes());
Serial.println("Failed to mount SPIFFS. Formatting..."); Serial.printf("LittleFS Free: %u bytes\n", LittleFS.totalBytes() - LittleFS.usedBytes());
SPIFFS.format();
// Zweiter Versuch nach Formatierung
if (SPIFFS.begin(true)) {
Serial.println("SPIFFS formatted and mounted successfully.");
return true;
}
Serial.println("SPIFFS initialization failed completely.");
return false;
} }

View File

@ -2,11 +2,11 @@
#define COMMONFS_H #define COMMONFS_H
#include <Arduino.h> #include <Arduino.h>
#include <SPIFFS.h>
#include <ArduinoJson.h> #include <ArduinoJson.h>
#include <LittleFS.h>
bool saveJsonValue(const char* filename, const JsonDocument& doc); bool saveJsonValue(const char* filename, const JsonDocument& doc);
bool loadJsonValue(const char* filename, JsonDocument& doc); bool loadJsonValue(const char* filename, JsonDocument& doc);
bool initializeSPIFFS(); void initializeFileSystem();
#endif #endif

View File

@ -40,6 +40,10 @@ const uint8_t webserverPort = 80;
const char* apiUrl = "/api/v1"; const char* apiUrl = "/api/v1";
// ***** API // ***** API
// ***** Bambu Auto Set Spool
uint8_t autoSetBambuAmsCounter = 60;
// ***** Bambu Auto Set Spool
// ***** Task Prios // ***** Task Prios
uint8_t rfidTaskCore = 1; uint8_t rfidTaskCore = 1;
uint8_t rfidTaskPrio = 1; uint8_t rfidTaskPrio = 1;

View File

@ -23,6 +23,8 @@ extern const uint8_t OLED_DATA_END;
extern const char* apiUrl; extern const char* apiUrl;
extern const uint8_t webserverPort; extern const uint8_t webserverPort;
extern uint8_t autoSetBambuAmsCounter;
extern const unsigned char wifi_on[]; extern const unsigned char wifi_on[];
extern const unsigned char wifi_off[]; extern const unsigned char wifi_off[];
extern const unsigned char cloud_on[]; extern const unsigned char cloud_on[];

View File

@ -20,9 +20,9 @@ void setupDisplay() {
// the library initializes this with an Adafruit splash screen. // the library initializes this with an Adafruit splash screen.
display.setTextColor(WHITE); display.setTextColor(WHITE);
display.display(); display.display();
delay(1000); // Pause for 2 seconds
oledShowTopRow(); oledShowTopRow();
delay(2000); oledShowMessage("FilaMan v" + String(VERSION));
vTaskDelay(2000 / portTICK_PERIOD_MS);
} }
void oledclearline() { void oledclearline() {
@ -117,7 +117,6 @@ std::vector<String> splitTextIntoLines(String text, uint8_t textSize) {
lines.push_back(currentLine); lines.push_back(currentLine);
} }
Serial.println(lines.size());
return lines; return lines;
} }
@ -140,8 +139,9 @@ void oledShowMultilineMessage(String message, uint8_t size) {
int totalHeight = lines.size() * lineHeight; int totalHeight = lines.size() * lineHeight;
int startY = OLED_DATA_START + ((OLED_DATA_END - OLED_DATA_START - totalHeight) / 2); int startY = OLED_DATA_START + ((OLED_DATA_END - OLED_DATA_START - totalHeight) / 2);
uint8_t lineDistance = (lines.size() == 2) ? 5 : 0;
for (size_t i = 0; i < lines.size(); i++) { for (size_t i = 0; i < lines.size(); i++) {
display.setCursor(oled_center_h(lines[i]), startY + (i * lineHeight)); display.setCursor(oled_center_h(lines[i]), startY + (i * lineHeight) + (i == 1 ? lineDistance : 0));
display.print(lines[i]); display.print(lines[i]);
} }

View File

@ -1,6 +1,4 @@
#include <Arduino.h> #include <Arduino.h>
#include <DNSServer.h>
#include <ESPmDNS.h>
#include <Wire.h> #include <Wire.h>
#include <WiFi.h> #include <WiFi.h>
@ -19,8 +17,14 @@
void setup() { void setup() {
Serial.begin(115200); Serial.begin(115200);
uint64_t chipid;
chipid = ESP.getEfuseMac(); //The chip ID is essentially its MAC address(length: 6 bytes).
Serial.printf("ESP32 Chip ID = %04X", (uint16_t)(chipid >> 32)); //print High 2 bytes
Serial.printf("%08X\n", (uint32_t)chipid); //print Low 4bytes.
// Initialize SPIFFS // Initialize SPIFFS
initializeSPIFFS(); initializeFileSystem();
// Start Display // Start Display
setupDisplay(); setupDisplay();
@ -29,7 +33,6 @@ void setup() {
initWiFi(); initWiFi();
// Webserver // Webserver
Serial.println("Starte Webserver");
setupWebserver(server); setupWebserver(server);
// Spoolman API // Spoolman API
@ -37,22 +40,27 @@ void setup() {
initSpoolman(); initSpoolman();
// Bambu MQTT // Bambu MQTT
// bambu.cpp
setupMqtt(); setupMqtt();
// mDNS // NFC Reader
Serial.println("Starte MDNS");
if (!MDNS.begin("filaman")) { // Set the hostname to "esp32.local"
Serial.println("Error setting up MDNS responder!");
while(1) {
delay(1000);
}
}
Serial.println("mDNS responder started");
startNfc(); startNfc();
start_scale(); uint8_t scaleCalibrated = start_scale();
if (scaleCalibrated == 3) {
oledShowMessage("Scale not calibrated!");
for (uint16_t i = 0; i < 50000; i++) {
yield();
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
} else if (scaleCalibrated == 0) {
oledShowMessage("HX711 not found");
for (uint16_t i = 0; i < 50000; i++) {
yield();
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
}
// WDT initialisieren mit 10 Sekunden Timeout // WDT initialisieren mit 10 Sekunden Timeout
bool panic = true; // Wenn true, löst ein WDT-Timeout einen System-Panik aus bool panic = true; // Wenn true, löst ein WDT-Timeout einen System-Panik aus
@ -66,42 +74,85 @@ void setup() {
} }
/**
* Safe interval check that handles millis() overflow
* @param currentTime Current millis() value
* @param lastTime Last recorded time
* @param interval Desired interval in milliseconds
* @return True if interval has elapsed
*/
bool intervalElapsed(unsigned long currentTime, unsigned long &lastTime, unsigned long interval) {
if (currentTime - lastTime >= interval || currentTime < lastTime) {
lastTime = currentTime;
return true;
}
return false;
}
unsigned long lastWeightReadTime = 0; unsigned long lastWeightReadTime = 0;
const unsigned long weightReadInterval = 1000; // 1 second const unsigned long weightReadInterval = 1000; // 1 second
unsigned long lastAmsSendTime = 0; unsigned long lastAutoSetBambuAmsTime = 0;
const unsigned long amsSendInterval = 60000; // 1 minute const unsigned long autoSetBambuAmsInterval = 1000; // 1 second
uint8_t autoAmsCounter = 0;
uint8_t weightSend = 0; uint8_t weightSend = 0;
int16_t lastWeight = 0; int16_t lastWeight = 0;
uint8_t wifiErrorCounter = 0;
unsigned long lastWifiCheckTime = 0;
const unsigned long wifiCheckInterval = 60000; // Überprüfe alle 60 Sekunden (60000 ms)
// ##### PROGRAM START ##### // ##### PROGRAM START #####
void loop() { void loop() {
// Überprüfe den WLAN-Status
if (WiFi.status() != WL_CONNECTED) {
wifiErrorCounter++;
wifiOn = false;
} else {
wifiErrorCounter = 0;
wifiOn = true;
}
if (wifiErrorCounter > 20) ESP.restart();
unsigned long currentMillis = millis(); unsigned long currentMillis = millis();
// Send AMS Data min every Minute // Überprüfe regelmäßig die WLAN-Verbindung
if (currentMillis - lastAmsSendTime >= amsSendInterval) { if (intervalElapsed(currentMillis, lastWifiCheckTime, wifiCheckInterval)) {
lastAmsSendTime = currentMillis; checkWiFiConnection();
sendAmsData(nullptr);
} }
// Ausgabe der Waage auf Display // Wenn Bambu auto set Spool aktiv
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0) if (autoSendToBambu && autoSetToBambuSpoolId > 0) {
{ if (intervalElapsed(currentMillis, lastAutoSetBambuAmsTime, autoSetBambuAmsInterval))
(weight < 0) ? oledShowMessage("!! -1") : oledShowWeight(weight); {
if (hasReadRfidTag == 0)
{
lastAutoSetBambuAmsTime = currentMillis;
oledShowMessage("Auto Set " + String(autoSetBambuAmsCounter - autoAmsCounter) + "s");
autoAmsCounter++;
if (autoAmsCounter >= autoSetBambuAmsCounter)
{
autoSetToBambuSpoolId = 0;
autoAmsCounter = 0;
oledShowWeight(weight);
}
}
else
{
autoAmsCounter = 0;
}
}
} }
// Wenn Waage nicht Kalibriert
if (scaleCalibrated == 3)
{
oledShowMessage("Scale not calibrated!");
vTaskDelay(5000 / portTICK_PERIOD_MS);
yield();
esp_task_wdt_reset();
return;
}
// Ausgabe der Waage auf Display
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0 && (!autoSendToBambu || autoSetToBambuSpoolId == 0))
{
(weight < 2) ? ((weight < -2) ? oledShowMessage("!! -0") : oledShowWeight(0)) : oledShowWeight(weight);
}
// Wenn Timer abgelaufen und nicht gerade ein RFID-Tag geschrieben wird // Wenn Timer abgelaufen und nicht gerade ein RFID-Tag geschrieben wird
if (currentMillis - lastWeightReadTime >= weightReadInterval && hasReadRfidTag < 3) if (currentMillis - lastWeightReadTime >= weightReadInterval && hasReadRfidTag < 3)
{ {
@ -145,6 +196,12 @@ void loop() {
oledShowIcon("success"); oledShowIcon("success");
vTaskDelay(2000 / portTICK_PERIOD_MS); vTaskDelay(2000 / portTICK_PERIOD_MS);
weightSend = 1; weightSend = 1;
autoSetToBambuSpoolId = spoolId.toInt();
if (octoEnabled)
{
updateSpoolOcto(autoSetToBambuSpoolId);
}
} }
else else
{ {

View File

@ -44,8 +44,6 @@ void payloadToJson(uint8_t *data) {
DeserializationError error = deserializeJson(doc, jsonString); DeserializationError error = deserializeJson(doc, jsonString);
if (!error) { if (!error) {
const char* version = doc["version"];
const char* protocol = doc["protocol"];
const char* color_hex = doc["color_hex"]; const char* color_hex = doc["color_hex"];
const char* type = doc["type"]; const char* type = doc["type"];
int min_temp = doc["min_temp"]; int min_temp = doc["min_temp"];
@ -55,8 +53,6 @@ void payloadToJson(uint8_t *data) {
Serial.println(); Serial.println();
Serial.println("-----------------"); Serial.println("-----------------");
Serial.println("JSON-Parsed Data:"); Serial.println("JSON-Parsed Data:");
Serial.println(version);
Serial.println(protocol);
Serial.println(color_hex); Serial.println(color_hex);
Serial.println(type); Serial.println(type);
Serial.println(min_temp); Serial.println(min_temp);
@ -93,8 +89,16 @@ bool formatNdefTag() {
return success; return success;
} }
uint16_t readTagSize()
{
uint8_t buffer[4];
memset(buffer, 0, 4);
nfc.ntag2xx_ReadPage(3, buffer);
return buffer[2]*8;
}
uint8_t ntag2xx_WriteNDEF(const char *payload) { uint8_t ntag2xx_WriteNDEF(const char *payload) {
uint8_t tagSize = 240; // 144 bytes is maximum for NTAG213 uint16_t tagSize = readTagSize();
Serial.print("Tag Size: ");Serial.println(tagSize); Serial.print("Tag Size: ");Serial.println(tagSize);
uint8_t pageBuffer[4] = {0, 0, 0, 0}; uint8_t pageBuffer[4] = {0, 0, 0, 0};
@ -136,6 +140,8 @@ uint8_t ntag2xx_WriteNDEF(const char *payload) {
if (combinedData == NULL) if (combinedData == NULL)
{ {
Serial.println("Fehler: Nicht genug Speicher vorhanden."); Serial.println("Fehler: Nicht genug Speicher vorhanden.");
oledShowMessage("Tag too small");
vTaskDelay(2000 / portTICK_PERIOD_MS);
return 0; return 0;
} }
@ -238,12 +244,14 @@ void writeJsonToTag(void *parameter) {
hasReadRfidTag = 3; hasReadRfidTag = 3;
vTaskSuspend(RfidReaderTask); vTaskSuspend(RfidReaderTask);
vTaskDelay(500 / portTICK_PERIOD_MS); vTaskDelay(50 / portTICK_PERIOD_MS);
//pauseBambuMqttTask = true; //pauseBambuMqttTask = true;
// aktualisieren der Website wenn sich der Status ändert // aktualisieren der Website wenn sich der Status ändert
sendNfcData(nullptr); sendNfcData(nullptr);
vTaskDelay(100 / portTICK_PERIOD_MS);
oledShowMessage("Waiting for NFC-Tag"); oledShowMessage("Waiting for NFC-Tag");
// Wait 10sec for tag // Wait 10sec for tag
uint8_t success = 0; uint8_t success = 0;
String uidString = ""; String uidString = "";
@ -331,7 +339,7 @@ void startWriteJsonToTag(const char* payload) {
xTaskCreate( xTaskCreate(
writeJsonToTag, // Task-Funktion writeJsonToTag, // Task-Funktion
"WriteJsonToTagTask", // Task-Name "WriteJsonToTagTask", // Task-Name
4096, // Stackgröße in Bytes 5115, // Stackgröße in Bytes
(void*)payloadCopy, // Parameter (void*)payloadCopy, // Parameter
rfidWriteTaskPrio, // Priorität rfidWriteTaskPrio, // Priorität
NULL // Task-Handle (nicht benötigt) NULL // Task-Handle (nicht benötigt)
@ -367,46 +375,51 @@ void scanRfidTask(void * parameter) {
if (uidLength == 7) if (uidLength == 7)
{ {
uint8_t data[256]; uint16_t tagSize = readTagSize();
if(tagSize > 0)
// We probably have an NTAG2xx card (though it could be Ultralight as well)
Serial.println("Seems to be an NTAG2xx tag (7 byte UID)");
for (uint8_t i = 0; i < 45; i++) {
/*
if (i < uidLength) {
uidString += String(uid[i], HEX);
if (i < uidLength - 1) {
uidString += ":"; // Optional: Trennzeichen hinzufügen
}
}
*/
if (!nfc.mifareclassic_ReadDataBlock(i, data + (i - 4) * 4))
{
break; // Stop if reading fails
}
// Check for NDEF message end
if (data[(i - 4) * 4] == 0xFE)
{
break; // End of NDEF message
}
yield();
esp_task_wdt_reset();
vTaskDelay(pdMS_TO_TICKS(1));
}
if (!decodeNdefAndReturnJson(data))
{ {
oledShowMessage("NFC-Tag unknown"); // Create a buffer depending on the size of the tag
vTaskDelay(2000 / portTICK_PERIOD_MS); uint8_t* data = (uint8_t*)malloc(tagSize);
memset(data, 0, tagSize);
// We probably have an NTAG2xx card (though it could be Ultralight as well)
Serial.println("Seems to be an NTAG2xx tag (7 byte UID)");
uint8_t numPages = readTagSize()/4;
for (uint8_t i = 4; i < 4+numPages; i++) {
if (!nfc.ntag2xx_ReadPage(i, data+(i-4) * 4))
{
break; // Stop if reading fails
}
// Check for NDEF message end
if (data[(i - 4) * 4] == 0xFE)
{
break; // End of NDEF message
}
yield();
esp_task_wdt_reset();
vTaskDelay(pdMS_TO_TICKS(1));
}
if (!decodeNdefAndReturnJson(data))
{
oledShowMessage("NFC-Tag unknown");
vTaskDelay(2000 / portTICK_PERIOD_MS);
hasReadRfidTag = 2;
}
else
{
hasReadRfidTag = 1;
}
free(data);
}
else
{
oledShowMessage("NFC-Tag read error");
hasReadRfidTag = 2; hasReadRfidTag = 2;
} }
else
{
hasReadRfidTag = 1;
}
} }
else else
{ {
@ -420,7 +433,7 @@ void scanRfidTask(void * parameter) {
//uidString = ""; //uidString = "";
nfcJsonData = ""; nfcJsonData = "";
Serial.println("Tag entfernt"); Serial.println("Tag entfernt");
oledShowWeight(0); if (!autoSendToBambu) oledShowWeight(weight);
} }
// aktualisieren der Website wenn sich der Status ändert // aktualisieren der Website wenn sich der Status ändert
@ -456,7 +469,7 @@ void startNfc() {
BaseType_t result = xTaskCreatePinnedToCore( BaseType_t result = xTaskCreatePinnedToCore(
scanRfidTask, /* Function to implement the task */ scanRfidTask, /* Function to implement the task */
"RfidReader", /* Name of the task */ "RfidReader", /* Name of the task */
10000, /* Stack size in words */ 5115, /* Stack size in words */
NULL, /* Task input parameter */ NULL, /* Task input parameter */
rfidTaskPrio, /* Priority of the task */ rfidTaskPrio, /* Priority of the task */
&RfidReaderTask, /* Task handle. */ &RfidReaderTask, /* Task handle. */

View File

@ -1,62 +1,243 @@
#include <Arduino.h> #include <Arduino.h>
#include "ota.h" #include <website.h>
#include <Update.h> #include <commonFS.h>
#include <SPIFFS.h>
#include "commonFS.h"
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final) { // Globale Variablen für Config Backups hinzufügen
static size_t contentLength = 0; String bambuCredentialsBackup;
String spoolmanUrlBackup;
// Globale Variable für den Update-Typ
static int currentUpdateCommand = 0;
// Globale Update-Variablen
static size_t updateTotalSize = 0;
static size_t updateWritten = 0;
static bool isSpiffsUpdate = false;
/**
* Compares two version strings and determines if version1 is less than version2
*
* @param version1 First version string (format: x.y.z)
* @param version2 Second version string (format: x.y.z)
* @return true if version1 is less than version2
*/
bool isVersionLessThan(const String& version1, const String& version2) {
int major1 = 0, minor1 = 0, patch1 = 0;
int major2 = 0, minor2 = 0, patch2 = 0;
if (!index) { // Parse version1
contentLength = request->contentLength(); sscanf(version1.c_str(), "%d.%d.%d", &major1, &minor1, &patch1);
Serial.printf("Update size: %u bytes\n", contentLength);
if (contentLength == 0) {
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Invalid file size\"}");
return;
}
// Determine if this is a full image (firmware + SPIFFS) or just firmware
bool isFullImage = (contentLength > 0x3D0000); // SPIFFS starts at 0x3D0000
if (isFullImage) {
// For full images, we need to make sure we have enough space and properly partition it
if (!Update.begin(ESP.getFreeSketchSpace(), U_FLASH)) {
Serial.printf("Not enough space for full image: %u bytes required\n", contentLength);
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Full image updates are not supported via OTA. Please use USB update for full images.\"}");
return;
}
} else {
// For firmware-only updates
if (!Update.begin(contentLength, U_FLASH)) {
Serial.printf("Not enough space: %u required\n", contentLength);
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Not enough space available for firmware update\"}");
return;
}
}
Serial.println(isFullImage ? "Full image update started" : "Firmware update started");
}
// Write chunk to flash // Parse version2
if (Update.write(data, len) != len) { sscanf(version2.c_str(), "%d.%d.%d", &major2, &minor2, &patch2);
Update.printError(Serial);
String errorMsg = Update.errorString(); // Compare major version
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Error writing update: " + errorMsg + "\"}"); if (major1 < major2) return true;
return; if (major1 > major2) return false;
// Major versions equal, compare minor
if (minor1 < minor2) return true;
if (minor1 > minor2) return false;
// Minor versions equal, compare patch
return patch1 < patch2;
}
void backupJsonConfigs() {
// Bambu Credentials backup
if (LittleFS.exists("/bambu_credentials.json")) {
File file = LittleFS.open("/bambu_credentials.json", "r");
if (file) {
bambuCredentialsBackup = file.readString();
file.close();
Serial.println("Bambu credentials backed up");
}
} }
if (final) { // Spoolman URL backup
if (Update.end(true)) { if (LittleFS.exists("/spoolman_url.json")) {
Serial.println("Update complete"); File file = LittleFS.open("/spoolman_url.json", "r");
request->send(200, "application/json", "{\"status\":\"success\",\"message\":\"Update successful! Device will restart...\",\"restart\":true}"); if (file) {
delay(1000); spoolmanUrlBackup = file.readString();
ESP.restart(); file.close();
} else { Serial.println("Spoolman URL backed up");
String errorMsg = Update.errorString();
Update.printError(Serial);
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Update failed: " + errorMsg + "\"}");
} }
} }
} }
void restoreJsonConfigs() {
// Restore Bambu credentials
if (bambuCredentialsBackup.length() > 0) {
File file = LittleFS.open("/bambu_credentials.json", "w");
if (file) {
file.print(bambuCredentialsBackup);
file.close();
Serial.println("Bambu credentials restored");
}
bambuCredentialsBackup = ""; // Clear backup
}
// Restore Spoolman URL
if (spoolmanUrlBackup.length() > 0) {
File file = LittleFS.open("/spoolman_url.json", "w");
if (file) {
file.print(spoolmanUrlBackup);
file.close();
Serial.println("Spoolman URL restored");
}
spoolmanUrlBackup = ""; // Clear backup
}
}
void espRestart() {
yield();
vTaskDelay(5000 / portTICK_PERIOD_MS);
ESP.restart();
}
void sendUpdateProgress(int progress, const char* status = nullptr, const char* message = nullptr) {
static int lastSentProgress = -1;
// Verhindere zu häufige Updates
if (progress == lastSentProgress && !status && !message) {
return;
}
String progressMsg = "{\"type\":\"updateProgress\",\"progress\":" + String(progress);
if (status) {
progressMsg += ",\"status\":\"" + String(status) + "\"";
}
if (message) {
progressMsg += ",\"message\":\"" + String(message) + "\"";
}
progressMsg += "}";
if (progress >= 100) {
// Sende die Nachricht nur einmal für den Abschluss
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
delay(50);
}
// Sende die Nachricht mehrmals mit Verzögerung für wichtige Updates
if (status || abs(progress - lastSentProgress) >= 10 || progress == 100) {
for (int i = 0; i < 2; i++) {
ws.textAll(progressMsg);
delay(100); // Längerer Delay zwischen Nachrichten
}
} else {
ws.textAll(progressMsg);
delay(50);
}
lastSentProgress = progress;
}
void handleUpdate(AsyncWebServer &server) {
AsyncCallbackWebHandler* updateHandler = new AsyncCallbackWebHandler();
updateHandler->setUri("/update");
updateHandler->setMethod(HTTP_POST);
// Check if current version is less than defined TOOLVERSION before proceeding with update
if (isVersionLessThan(VERSION, TOOLDVERSION)) {
updateHandler->onRequest([](AsyncWebServerRequest *request) {
request->send(400, "application/json",
"{\"success\":false,\"message\":\"Your current version is too old. Please perform a full upgrade.\"}");
});
server.addHandler(updateHandler);
return;
}
updateHandler->onUpload([](AsyncWebServerRequest *request, String filename,
size_t index, uint8_t *data, size_t len, bool final) {
if (!index) {
updateTotalSize = request->contentLength();
updateWritten = 0;
isSpiffsUpdate = (filename.indexOf("website") > -1);
if (isSpiffsUpdate) {
// Backup vor dem Update
sendUpdateProgress(0, "backup", "Backing up configurations...");
delay(200);
backupJsonConfigs();
delay(200);
const esp_partition_t *partition = esp_partition_find_first(ESP_PARTITION_TYPE_DATA, ESP_PARTITION_SUBTYPE_DATA_SPIFFS, NULL);
if (!partition || !Update.begin(partition->size, U_SPIFFS)) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
return;
}
sendUpdateProgress(5, "starting", "Starting SPIFFS update...");
delay(200);
} else {
if (!Update.begin(updateTotalSize)) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
return;
}
sendUpdateProgress(0, "starting", "Starting firmware update...");
delay(200);
}
}
if (len) {
if (Update.write(data, len) != len) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Write failed\"}");
return;
}
updateWritten += len;
int currentProgress;
// Berechne den Fortschritt basierend auf dem Update-Typ
if (isSpiffsUpdate) {
// SPIFFS: 5-75% für Upload
currentProgress = 6 + (updateWritten * 100) / updateTotalSize;
} else {
// Firmware: 0-100% für Upload
currentProgress = 1 + (updateWritten * 100) / updateTotalSize;
}
static int lastProgress = -1;
if (currentProgress != lastProgress && (currentProgress % 10 == 0 || final)) {
sendUpdateProgress(currentProgress, "uploading");
oledShowMessage("Update: " + String(currentProgress) + "%");
delay(50);
lastProgress = currentProgress;
}
}
if (final) {
if (Update.end(true)) {
if (isSpiffsUpdate) {
restoreJsonConfigs();
}
} else {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update finalization failed\"}");
}
}
});
updateHandler->onRequest([](AsyncWebServerRequest *request) {
if (Update.hasError()) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update failed\"}");
return;
}
// Erste 100% Nachricht
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
vTaskDelay(2000 / portTICK_PERIOD_MS);
AsyncWebServerResponse *response = request->beginResponse(200, "application/json",
"{\"success\":true,\"message\":\"Update successful! Restarting device...\"}");
response->addHeader("Connection", "close");
request->send(response);
// Zweite 100% Nachricht zur Sicherheit
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
espRestart();
});
server.addHandler(updateHandler);
}

View File

@ -1,8 +1,9 @@
#ifndef OTA_H #ifndef OTA_H
#define OTA_H #define OTA_H
#include <ArduinoOTA.h>
#include <ESPAsyncWebServer.h> #include <ESPAsyncWebServer.h>
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final); void handleUpdate(AsyncWebServer &server);
#endif #endif

View File

@ -3,9 +3,9 @@
#include <ArduinoJson.h> #include <ArduinoJson.h>
#include "config.h" #include "config.h"
#include "HX711.h" #include "HX711.h"
#include <EEPROM.h>
#include "display.h" #include "display.h"
#include "esp_task_wdt.h" #include "esp_task_wdt.h"
#include <Preferences.h>
HX711 scale; HX711 scale;
@ -16,6 +16,11 @@ int16_t weight = 0;
uint8_t weigthCouterToApi = 0; uint8_t weigthCouterToApi = 0;
uint8_t scale_tare_counter = 0; uint8_t scale_tare_counter = 0;
uint8_t pauseMainTask = 0; uint8_t pauseMainTask = 0;
uint8_t scaleCalibrated = 1;
Preferences preferences;
const char* NVS_NAMESPACE = "scale";
const char* NVS_KEY_CALIBRATION = "cal_value";
// ##### Funktionen für Waage ##### // ##### Funktionen für Waage #####
uint8_t tareScale() { uint8_t tareScale() {
@ -42,26 +47,28 @@ void scale_loop(void * parameter) {
weight = round(scale.get_units()); weight = round(scale.get_units());
} }
vTaskDelay(pdMS_TO_TICKS(100)); // Verzögerung, um die CPU nicht zu überlasten vTaskDelay(pdMS_TO_TICKS(100));
} }
} }
void start_scale() { uint8_t start_scale() {
Serial.println("Prüfe Calibration Value"); Serial.println("Prüfe Calibration Value");
long calibrationValue; // calibration value (see example file "Calibration.ino") long calibrationValue;
//calibrationValue = 696.0; // uncomment this if you want to set the calibration value in the sketch
EEPROM.begin(512); // NVS lesen
EEPROM.get(calVal_eepromAdress, calibrationValue); // uncomment this if you want to fetch the calibration value from eeprom preferences.begin(NVS_NAMESPACE, true); // true = readonly
calibrationValue = preferences.getLong(NVS_KEY_CALIBRATION, defaultScaleCalibrationValue);
//calibrationValue = EEPROM.read(calVal_eepromAdress); preferences.end();
Serial.print("Read Scale Calibration Value "); Serial.print("Read Scale Calibration Value ");
Serial.println(calibrationValue); Serial.println(calibrationValue);
scale.begin(LOADCELL_DOUT_PIN, LOADCELL_SCK_PIN); scale.begin(LOADCELL_DOUT_PIN, LOADCELL_SCK_PIN);
if (isnan(calibrationValue) || calibrationValue < 1) calibrationValue = defaultScaleCalibrationValue; if (isnan(calibrationValue) || calibrationValue < 1) {
calibrationValue = defaultScaleCalibrationValue;
scaleCalibrated = 0;
}
oledShowMessage("Scale Tare Please remove all"); oledShowMessage("Scale Tare Please remove all");
for (uint16_t i = 0; i < 2000; i++) { for (uint16_t i = 0; i < 2000; i++) {
@ -83,7 +90,7 @@ void start_scale() {
BaseType_t result = xTaskCreatePinnedToCore( BaseType_t result = xTaskCreatePinnedToCore(
scale_loop, /* Function to implement the task */ scale_loop, /* Function to implement the task */
"ScaleLoop", /* Name of the task */ "ScaleLoop", /* Name of the task */
10000, /* Stack size in words */ 2048, /* Stack size in words */
NULL, /* Task input parameter */ NULL, /* Task input parameter */
scaleTaskPrio, /* Priority of the task */ scaleTaskPrio, /* Priority of the task */
&ScaleTask, /* Task handle. */ &ScaleTask, /* Task handle. */
@ -94,6 +101,8 @@ void start_scale() {
} else { } else {
Serial.println("ScaleLoop-Task erfolgreich erstellt"); Serial.println("ScaleLoop-Task erfolgreich erstellt");
} }
return (scaleCalibrated == 1) ? 1 : 3;
} }
uint8_t calibrate_scale() { uint8_t calibrate_scale() {
@ -101,6 +110,7 @@ uint8_t calibrate_scale() {
//vTaskSuspend(RfidReaderTask); //vTaskSuspend(RfidReaderTask);
vTaskDelete(RfidReaderTask); vTaskDelete(RfidReaderTask);
vTaskDelete(ScaleTask);
pauseBambuMqttTask = true; pauseBambuMqttTask = true;
pauseMainTask = 1; pauseMainTask = 1;
@ -137,18 +147,19 @@ uint8_t calibrate_scale() {
{ {
Serial.print("New calibration value has been set to: "); Serial.print("New calibration value has been set to: ");
Serial.println(newCalibrationValue); Serial.println(newCalibrationValue);
Serial.print("Save this value to EEPROM adress ");
Serial.println(calVal_eepromAdress);
//EEPROM.put(calVal_eepromAdress, newCalibrationValue); // Speichern mit NVS
EEPROM.put(calVal_eepromAdress, newCalibrationValue); preferences.begin(NVS_NAMESPACE, false); // false = readwrite
EEPROM.commit(); preferences.putLong(NVS_KEY_CALIBRATION, newCalibrationValue);
preferences.end();
EEPROM.get(calVal_eepromAdress, newCalibrationValue); // Verifizieren
//newCalibrationValue = EEPROM.read(calVal_eepromAdress); preferences.begin(NVS_NAMESPACE, true);
long verifyValue = preferences.getLong(NVS_KEY_CALIBRATION, 0);
preferences.end();
Serial.print("Read Value "); Serial.print("Verified stored value: ");
Serial.println(newCalibrationValue); Serial.println(verifyValue);
Serial.println("End calibration, revome weight"); Serial.println("End calibration, revome weight");
@ -167,8 +178,6 @@ uint8_t calibrate_scale() {
vTaskDelay(pdMS_TO_TICKS(1)); vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset(); esp_task_wdt_reset();
} }
//ESP.restart();
} }
else else
{ {
@ -201,9 +210,8 @@ uint8_t calibrate_scale() {
} }
oledShowMessage("Scale Ready"); oledShowMessage("Scale Ready");
Serial.println("starte Scale Task"); Serial.println("restart Scale Task");
start_scale(); start_scale();
pauseBambuMqttTask = false; pauseBambuMqttTask = false;

View File

@ -5,7 +5,7 @@
#include "HX711.h" #include "HX711.h"
void start_scale(); uint8_t start_scale();
uint8_t calibrate_scale(); uint8_t calibrate_scale();
uint8_t tareScale(); uint8_t tareScale();
@ -14,5 +14,8 @@ extern int16_t weight;
extern uint8_t weigthCouterToApi; extern uint8_t weigthCouterToApi;
extern uint8_t scale_tare_counter; extern uint8_t scale_tare_counter;
extern uint8_t pauseMainTask; extern uint8_t pauseMainTask;
extern uint8_t scaleCalibrated;
extern TaskHandle_t ScaleTask;
#endif #endif

View File

@ -7,10 +7,16 @@
#include "nfc.h" #include "nfc.h"
#include "scale.h" #include "scale.h"
#include "esp_task_wdt.h" #include "esp_task_wdt.h"
#include <Update.h>
#include "display.h"
#include "ota.h" #include "ota.h"
#ifndef VERSION
#define VERSION "1.1.0"
#endif
// Cache-Control Header definieren // Cache-Control Header definieren
#define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr #define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche
AsyncWebServer server(webserverPort); AsyncWebServer server(webserverPort);
AsyncWebSocket ws("/ws"); AsyncWebSocket ws("/ws");
@ -18,6 +24,7 @@ AsyncWebSocket ws("/ws");
uint8_t lastSuccess = 0; uint8_t lastSuccess = 0;
uint8_t lastHasReadRfidTag = 0; uint8_t lastHasReadRfidTag = 0;
void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) { void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) {
if (type == WS_EVT_CONNECT) { if (type == WS_EVT_CONNECT) {
Serial.println("Neuer Client verbunden!"); Serial.println("Neuer Client verbunden!");
@ -28,11 +35,15 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
sendWriteResult(client, 3); sendWriteResult(client, 3);
} else if (type == WS_EVT_DISCONNECT) { } else if (type == WS_EVT_DISCONNECT) {
Serial.println("Client getrennt."); Serial.println("Client getrennt.");
} else if (type == WS_EVT_ERROR) {
Serial.printf("WebSocket Client #%u error(%u): %s\n", client->id(), *((uint16_t*)arg), (char*)data);
} else if (type == WS_EVT_PONG) {
Serial.printf("WebSocket Client #%u pong\n", client->id());
} else if (type == WS_EVT_DATA) { } else if (type == WS_EVT_DATA) {
String message = String((char*)data); String message = String((char*)data);
JsonDocument doc; JsonDocument doc;
deserializeJson(doc, message); deserializeJson(doc, message);
if (doc["type"] == "heartbeat") { if (doc["type"] == "heartbeat") {
// Sende Heartbeat-Antwort // Sende Heartbeat-Antwort
ws.text(client->id(), "{" ws.text(client->id(), "{"
@ -44,7 +55,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
} }
else if (doc["type"] == "writeNfcTag") { else if (doc["type"] == "writeNfcTag") {
if (doc.containsKey("payload")) { if (doc["payload"].is<JsonObject>()) {
// Versuche NFC-Daten zu schreiben // Versuche NFC-Daten zu schreiben
String payloadString; String payloadString;
serializeJson(doc["payload"], payloadString); serializeJson(doc["payload"], payloadString);
@ -84,6 +95,15 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
setBambuSpool(doc["payload"]); setBambuSpool(doc["payload"]);
} }
else if (doc["type"] == "setSpoolmanSettings") {
Serial.println(doc["payload"].as<String>());
if (updateSpoolBambuData(doc["payload"].as<String>())) {
ws.textAll("{\"type\":\"setSpoolmanSettings\",\"payload\":\"success\"}");
} else {
ws.textAll("{\"type\":\"setSpoolmanSettings\",\"payload\":\"error\"}");
}
}
else { else {
Serial.println("Unbekannter WebSocket-Typ: " + doc["type"].as<String>()); Serial.println("Unbekannter WebSocket-Typ: " + doc["type"].as<String>());
} }
@ -93,12 +113,12 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
// Funktion zum Laden und Ersetzen des Headers in einer HTML-Datei // Funktion zum Laden und Ersetzen des Headers in einer HTML-Datei
String loadHtmlWithHeader(const char* filename) { String loadHtmlWithHeader(const char* filename) {
Serial.println("Lade HTML-Datei: " + String(filename)); Serial.println("Lade HTML-Datei: " + String(filename));
if (!SPIFFS.exists(filename)) { if (!LittleFS.exists(filename)) {
Serial.println("Fehler: Datei nicht gefunden!"); Serial.println("Fehler: Datei nicht gefunden!");
return "Fehler: Datei nicht gefunden!"; return "Fehler: Datei nicht gefunden!";
} }
File file = SPIFFS.open(filename, "r"); File file = LittleFS.open(filename, "r");
String html = file.readString(); String html = file.readString();
file.close(); file.close();
@ -151,11 +171,22 @@ void sendNfcData(AsyncWebSocketClient *client) {
void sendAmsData(AsyncWebSocketClient *client) { void sendAmsData(AsyncWebSocketClient *client) {
if (ams_count > 0) { if (ams_count > 0) {
ws.textAll("{\"type\":\"amsData\", \"payload\":" + amsJsonData + "}"); ws.textAll("{\"type\":\"amsData\",\"payload\":" + amsJsonData + "}");
} }
} }
void setupWebserver(AsyncWebServer &server) { void setupWebserver(AsyncWebServer &server) {
// Deaktiviere alle Debug-Ausgaben
Serial.setDebugOutput(false);
// WebSocket-Optimierungen
ws.onEvent(onWsEvent);
ws.enable(true);
// Konfiguriere Server für große Uploads
server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){});
server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){});
// Lade die Spoolman-URL beim Booten // Lade die Spoolman-URL beim Booten
spoolmanUrl = loadSpoolmanUrl(); spoolmanUrl = loadSpoolmanUrl();
Serial.print("Geladene Spoolman-URL: "); Serial.print("Geladene Spoolman-URL: ");
@ -164,7 +195,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für about // Route für about
server.on("/about", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/about", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /about erhalten"); Serial.println("Anfrage für /about erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/index.html.gz", "text/html"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/index.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
@ -173,7 +204,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für Waage // Route für Waage
server.on("/waage", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/waage", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /waage erhalten"); Serial.println("Anfrage für /waage erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/waage.html.gz", "text/html"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/waage.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
@ -182,24 +213,13 @@ void setupWebserver(AsyncWebServer &server) {
// Route für RFID // Route für RFID
server.on("/", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /rfid erhalten"); Serial.println("Anfrage für /rfid erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/rfid.html.gz", "text/html"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/rfid.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
Serial.println("RFID-Seite gesendet"); Serial.println("RFID-Seite gesendet");
}); });
/*
// Neue API-Route für das Abrufen der Spool-Daten
server.on("/api/spools", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("API-Aufruf: /api/spools");
JsonDocument spoolsData = fetchSpoolsForWebsite();
String response;
serializeJson(spoolsData, response);
request->send(200, "application/json", response);
});
*/
server.on("/api/url", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/api/url", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("API-Aufruf: /api/url"); Serial.println("API-Aufruf: /api/url");
String jsonResponse = "{\"spoolman_url\": \"" + String(spoolmanUrl) + "\"}"; String jsonResponse = "{\"spoolman_url\": \"" + String(spoolmanUrl) + "\"}";
@ -209,7 +229,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für WiFi // Route für WiFi
server.on("/wifi", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/wifi", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /wifi erhalten"); Serial.println("Anfrage für /wifi erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/wifi.html.gz", "text/html"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/wifi.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
@ -219,13 +239,18 @@ void setupWebserver(AsyncWebServer &server) {
server.on("/spoolman", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/spoolman", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /spoolman erhalten"); Serial.println("Anfrage für /spoolman erhalten");
String html = loadHtmlWithHeader("/spoolman.html"); String html = loadHtmlWithHeader("/spoolman.html");
html.replace("{{spoolmanUrl}}", spoolmanUrl); html.replace("{{spoolmanUrl}}", (spoolmanUrl != "") ? spoolmanUrl : "");
html.replace("{{spoolmanOctoEnabled}}", octoEnabled ? "checked" : "");
html.replace("{{spoolmanOctoUrl}}", (octoUrl != "") ? octoUrl : "");
html.replace("{{spoolmanOctoToken}}", (octoToken != "") ? octoToken : "");
JsonDocument doc; JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) { if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>())
{
String bambuIp = doc["bambu_ip"].as<String>(); String bambuIp = doc["bambu_ip"].as<String>();
String bambuSerial = doc["bambu_serialnr"].as<String>(); String bambuSerial = doc["bambu_serialnr"].as<String>();
String bambuCode = doc["bambu_accesscode"].as<String>(); String bambuCode = doc["bambu_accesscode"].as<String>();
autoSendToBambu = doc["autoSendToBambu"].as<bool>();
bambuIp.trim(); bambuIp.trim();
bambuSerial.trim(); bambuSerial.trim();
bambuCode.trim(); bambuCode.trim();
@ -233,7 +258,17 @@ void setupWebserver(AsyncWebServer &server) {
html.replace("{{bambuIp}}", bambuIp ? bambuIp : ""); html.replace("{{bambuIp}}", bambuIp ? bambuIp : "");
html.replace("{{bambuSerial}}", bambuSerial ? bambuSerial : ""); html.replace("{{bambuSerial}}", bambuSerial ? bambuSerial : "");
html.replace("{{bambuCode}}", bambuCode ? bambuCode : ""); html.replace("{{bambuCode}}", bambuCode ? bambuCode : "");
} html.replace("{{autoSendToBambu}}", autoSendToBambu ? "checked" : "");
html.replace("{{autoSendTime}}", String(autoSetBambuAmsCounter));
}
else
{
html.replace("{{bambuIp}}", "");
html.replace("{{bambuSerial}}", "");
html.replace("{{bambuCode}}", "");
html.replace("{{autoSendToBambu}}", "");
html.replace("{{autoSendTime}}", String(autoSetBambuAmsCounter));
}
request->send(200, "text/html", html); request->send(200, "text/html", html);
}); });
@ -245,10 +280,21 @@ void setupWebserver(AsyncWebServer &server) {
return; return;
} }
if (request->getParam("octoEnabled")->value() == "true" && (!request->hasParam("octoUrl") || !request->hasParam("octoToken"))) {
request->send(400, "application/json", "{\"success\": false, \"error\": \"Missing OctoPrint URL or Token parameter\"}");
return;
}
String url = request->getParam("url")->value(); String url = request->getParam("url")->value();
bool octoEnabled = (request->getParam("octoEnabled")->value() == "true") ? true : false;
String octoUrl = request->getParam("octoUrl")->value();
String octoToken = (request->getParam("octoToken")->value() != "") ? request->getParam("octoToken")->value() : "";
url.trim(); url.trim();
octoUrl.trim();
octoToken.trim();
bool healthy = saveSpoolmanUrl(url); bool healthy = saveSpoolmanUrl(url, octoEnabled, octoUrl, octoToken);
String jsonResponse = "{\"healthy\": " + String(healthy ? "true" : "false") + "}"; String jsonResponse = "{\"healthy\": " + String(healthy ? "true" : "false") + "}";
request->send(200, "application/json", jsonResponse); request->send(200, "application/json", jsonResponse);
@ -264,16 +310,20 @@ void setupWebserver(AsyncWebServer &server) {
String bambu_ip = request->getParam("bambu_ip")->value(); String bambu_ip = request->getParam("bambu_ip")->value();
String bambu_serialnr = request->getParam("bambu_serialnr")->value(); String bambu_serialnr = request->getParam("bambu_serialnr")->value();
String bambu_accesscode = request->getParam("bambu_accesscode")->value(); String bambu_accesscode = request->getParam("bambu_accesscode")->value();
bool autoSend = (request->getParam("autoSend")->value() == "true") ? true : false;
String autoSendTime = request->getParam("autoSendTime")->value();
bambu_ip.trim(); bambu_ip.trim();
bambu_serialnr.trim(); bambu_serialnr.trim();
bambu_accesscode.trim(); bambu_accesscode.trim();
autoSendTime.trim();
if (bambu_ip.length() == 0 || bambu_serialnr.length() == 0 || bambu_accesscode.length() == 0) { if (bambu_ip.length() == 0 || bambu_serialnr.length() == 0 || bambu_accesscode.length() == 0) {
request->send(400, "application/json", "{\"success\": false, \"error\": \"Empty parameter\"}"); request->send(400, "application/json", "{\"success\": false, \"error\": \"Empty parameter\"}");
return; return;
} }
bool success = saveBambuCredentials(bambu_ip, bambu_serialnr, bambu_accesscode); bool success = saveBambuCredentials(bambu_ip, bambu_serialnr, bambu_accesscode, autoSend, autoSendTime);
request->send(200, "application/json", "{\"healthy\": " + String(success ? "true" : "false") + "}"); request->send(200, "application/json", "{\"healthy\": " + String(success ? "true" : "false") + "}");
}); });
@ -286,7 +336,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für das Laden der CSS-Datei // Route für das Laden der CSS-Datei
server.on("/style.css", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/style.css", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Lade style.css"); Serial.println("Lade style.css");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/style.css.gz", "text/css"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/style.css.gz", "text/css");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
@ -295,7 +345,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für das Logo // Route für das Logo
server.on("/logo.png", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/logo.png", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/logo.png.gz", "image/png"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/logo.png.gz", "image/png");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
@ -304,7 +354,7 @@ void setupWebserver(AsyncWebServer &server) {
// Route für Favicon // Route für Favicon
server.on("/favicon.ico", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/favicon.ico", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/favicon.ico", "image/x-icon"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/favicon.ico", "image/x-icon");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
Serial.println("favicon.ico gesendet"); Serial.println("favicon.ico gesendet");
@ -312,17 +362,26 @@ void setupWebserver(AsyncWebServer &server) {
// Route für spool_in.png // Route für spool_in.png
server.on("/spool_in.png", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/spool_in.png", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/spool_in.png.gz", "image/png"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/spool_in.png.gz", "image/png");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
Serial.println("spool_in.png gesendet"); Serial.println("spool_in.png gesendet");
}); });
// Route für set_spoolman.png
server.on("/set_spoolman.png", HTTP_GET, [](AsyncWebServerRequest *request){
AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/set_spoolman.png.gz", "image/png");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
Serial.println("set_spoolman.png gesendet");
});
// Route für JavaScript Dateien // Route für JavaScript Dateien
server.on("/spoolman.js", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/spoolman.js", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /spoolman.js erhalten"); Serial.println("Anfrage für /spoolman.js erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/spoolman.js.gz", "text/javascript"); AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/spoolman.js.gz", "text/javascript");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
@ -331,37 +390,29 @@ void setupWebserver(AsyncWebServer &server) {
server.on("/rfid.js", HTTP_GET, [](AsyncWebServerRequest *request){ server.on("/rfid.js", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("Anfrage für /rfid.js erhalten"); Serial.println("Anfrage für /rfid.js erhalten");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS,"/rfid.js.gz", "text/javascript"); AsyncWebServerResponse *response = request->beginResponse(LittleFS,"/rfid.js.gz", "text/javascript");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response); request->send(response);
Serial.println("RFID.js gesendet"); Serial.println("RFID.js gesendet");
}); });
// Route for Firmware Update // Vereinfachter Update-Handler
server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) { server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) {
// During OTA, reduce memory usage AsyncWebServerResponse *response = request->beginResponse(LittleFS, "/upgrade.html.gz", "text/html");
ws.enable(false); // Temporarily disable WebSocket
ws.cleanupClients();
Serial.println("Request for /upgrade received");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/upgrade.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip"); response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", CACHE_CONTROL); response->addHeader("Cache-Control", "no-store");
request->send(response); request->send(response);
}); });
server.on("/update", HTTP_POST, // Update-Handler registrieren
[](AsyncWebServerRequest *request) { handleUpdate(server);
// The response will be sent from handleOTAUpload when the upload is complete
}, server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){
[](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) { String fm_version = VERSION;
// Free memory before handling update String jsonResponse = "{\"version\": \""+ fm_version +"\"}";
ws.enable(false); request->send(200, "application/json", jsonResponse);
ws.cleanupClients(); });
handleOTAUpload(request, filename, index, data, len, final);
}
);
// Fehlerbehandlung für nicht gefundene Seiten // Fehlerbehandlung für nicht gefundene Seiten
server.onNotFound([](AsyncWebServerRequest *request){ server.onNotFound([](AsyncWebServerRequest *request){

View File

@ -6,8 +6,8 @@
#include "commonFS.h" #include "commonFS.h"
#include "api.h" #include "api.h"
#include <ArduinoJson.h> #include <ArduinoJson.h>
#include <ESPAsyncWebServer.h> #include <Update.h>
#include <AsyncWebSocket.h> #include <AsyncTCP.h>
#include "bambu.h" #include "bambu.h"
#include "nfc.h" #include "nfc.h"
#include "scale.h" #include "scale.h"
@ -17,7 +17,12 @@ extern String spoolmanUrl;
extern AsyncWebServer server; extern AsyncWebServer server;
extern AsyncWebSocket ws; extern AsyncWebSocket ws;
// Server-Initialisierung und Handler
void initWebServer();
void handleBody(AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total);
void setupWebserver(AsyncWebServer &server); void setupWebserver(AsyncWebServer &server);
// WebSocket-Funktionen
void sendAmsData(AsyncWebSocketClient *client); void sendAmsData(AsyncWebSocketClient *client);
void sendNfcData(AsyncWebSocketClient *client); void sendNfcData(AsyncWebSocketClient *client);
void foundNfcTag(AsyncWebSocketClient *client, uint8_t success); void foundNfcTag(AsyncWebSocketClient *client, uint8_t success);

View File

@ -3,43 +3,127 @@
#include <WiFi.h> #include <WiFi.h>
#include <esp_wifi.h> #include <esp_wifi.h>
#include <WiFiManager.h> #include <WiFiManager.h>
#include <DNSServer.h>
#include <ESPmDNS.h>
#include "display.h" #include "display.h"
#include "config.h" #include "config.h"
WiFiManager wm; WiFiManager wm;
bool wm_nonblocking = false; bool wm_nonblocking = false;
uint8_t wifiErrorCounter = 0;
void wifiSettings() {
// Optimierte WiFi-Einstellungen
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
WiFi.setSleep(false); // disable sleep mode
WiFi.setHostname("FilaMan");
esp_wifi_set_ps(WIFI_PS_NONE);
// Maximale Sendeleistung
WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power
// Optimiere TCP/IP Stack
esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N);
// Aktiviere WiFi-Roaming für bessere Stabilität
esp_wifi_set_rssi_threshold(-80);
}
void startMDNS() {
if (!MDNS.begin("filaman")) {
Serial.println("Error setting up MDNS responder!");
while(1) {
vTaskDelay(1000 / portTICK_PERIOD_MS);
}
}
Serial.println("mDNS responder started");
}
void configModeCallback (WiFiManager *myWiFiManager) {
Serial.println("Entered config mode");
oledShowTopRow();
oledShowMessage("WiFi Config Mode");
}
void initWiFi() { void initWiFi() {
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP // load Wifi settings
wifiSettings();
esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm
wm.setAPCallback(configModeCallback);
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen wm.setSaveConfigCallback([]() {
Serial.println("Configurations updated");
ESP.restart();
});
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
//wm.setConfigPortalTimeout(320); // Portal nach 5min schließen
wm.setWiFiAutoReconnect(true);
wm.setConnectTimeout(5);
oledShowTopRow();
oledShowMessage("WiFi Setup");
//bool res = wm.autoConnect("FilaMan"); // anonymous ap
if(!wm.autoConnect("FilaMan")) {
Serial.println("Failed to connect or hit timeout");
// ESP.restart();
oledShowTopRow(); oledShowTopRow();
oledShowMessage("WiFi Setup"); oledShowMessage("WiFi not connected Check Portal");
}
bool res; else {
// res = wm.autoConnect(); // auto generated AP name from chipid wifiOn = true;
res = wm.autoConnect("FilaMan"); // anonymous ap
// res = wm.autoConnect("spoolman","password"); // password protected ap //if you get here you have connected to the WiFi
Serial.println("connected...yeey :)");
if(!res) { Serial.print("IP address: ");
Serial.println("Failed to connect or hit timeout"); Serial.println(WiFi.localIP());
// ESP.restart();
oledShowTopRow(); oledShowTopRow();
oledShowMessage("WiFi not connected Check Portal"); display.display();
vTaskDelay(500 / portTICK_PERIOD_MS);
// mDNS
startMDNS();
}
}
void checkWiFiConnection() {
if (WiFi.status() != WL_CONNECTED)
{
Serial.println("WiFi connection lost. Reconnecting...");
wifiOn = false;
oledShowTopRow();
oledShowMessage("WiFi reconnecting");
WiFi.reconnect(); // Versuche, die Verbindung wiederherzustellen
vTaskDelay(5000 / portTICK_PERIOD_MS); // Warte 5 Sekunden, bevor erneut geprüft wird
if (WiFi.status() != WL_CONNECTED)
{
Serial.println("Failed to reconnect. Restarting WiFi...");
WiFi.disconnect();
Serial.println("WiFi disconnected.");
vTaskDelay(1000 / portTICK_PERIOD_MS);
wifiErrorCounter++;
//wifiSettings();
WiFi.reconnect();
Serial.println("WiFi reconnecting...");
WiFi.waitForConnectResult();
} }
else { else
{
Serial.println("WiFi reconnected.");
wifiErrorCounter = 0;
wifiOn = true; wifiOn = true;
//if you get here you have connected to the WiFi
Serial.println("connected...yeey :)");
Serial.print("IP address: ");
Serial.println(WiFi.localIP());
oledShowTopRow(); oledShowTopRow();
display.display(); startMDNS();
} }
} }
if (wifiErrorCounter >= 5)
{
Serial.println("Too many WiFi errors. Restarting...");
ESP.restart();
}
}

View File

@ -4,5 +4,6 @@
#include <Arduino.h> #include <Arduino.h>
void initWiFi(); void initWiFi();
void checkWiFiConnection();
#endif #endif