Compare commits
258 Commits
v1.5.9
...
v1.5.12-be
Author | SHA1 | Date | |
---|---|---|---|
69bf5f90fa | |||
382caeaced | |||
47bdf022ec | |||
02febfa943 | |||
257f4df800 | |||
bff6e72219 | |||
26e905050d | |||
046f770a52 | |||
2587227e78 | |||
0f19dc4f46 | |||
721dac1ead | |||
08abd1a37f | |||
da78861613 | |||
9231a303f3 | |||
d12e766cd7 | |||
af7bc23703 | |||
de39892f64 | |||
40cb835e51 | |||
eb9d9e74f4 | |||
d8af3f45e5 | |||
96bb8f9c7c | |||
b8b6893cd0 | |||
0a246c1fe4 | |||
965ea5da1e | |||
b8b6f637f2 | |||
12044b657b | |||
95433b4842 | |||
54275f2ac9 | |||
fbd9cb66f1 | |||
f1cdd3f41d | |||
d897817020 | |||
686eb22232 | |||
a2816da654 | |||
cc8f1cfd7b | |||
d195f76d5e | |||
6bed3b086c | |||
3dd4b82710 | |||
bc41205f15 | |||
f450d1efdf | |||
6e94092a74 | |||
ece510099e | |||
1f01af4da9 | |||
c5d24d5972 | |||
48556b9519 | |||
2ac8effe04 | |||
4e58407af8 | |||
d776956c5e | |||
25233f70d5 | |||
b4584364d6 | |||
33ea062773 | |||
771b0a4839 | |||
c48003e1b2 | |||
83dec4c876 | |||
dca9ef8d08 | |||
513e02b867 | |||
99babe2b4a | |||
c17ab2c434 | |||
ec7386922e | |||
1eb81fad5d | |||
9d406e3428 | |||
5c2db22a90 | |||
164c7b2af5 | |||
cd1c93c485 | |||
15219fa1e4 | |||
206db69e6d | |||
9e67af7343 | |||
9e58b042c8 | |||
55200d31cd | |||
65967ca047 | |||
86e5f7e48a | |||
7ccdde8489 | |||
619979ab14 | |||
174c48f734 | |||
fdeb6d5b61 | |||
fb7dca38f0 | |||
|
69ae5cab5f | ||
|
0e00fd8b91 | ||
|
accb02ab80 | ||
|
d7ee52ba1f | ||
|
0a02912e4a | ||
|
f133a1b321 | ||
876e9c62d8 | |||
765cb5319d | |||
9a9ed175dd | |||
|
5b04c2eb80 | ||
|
b94db80321 | ||
|
d815733550 | ||
|
afef544c66 | ||
|
6b6aec07b3 | ||
|
852a2f4c69 | ||
|
4b81703e38 | ||
7ba0c4f933 | |||
d0b793a300 | |||
7c320a87fe | |||
0777b6371d | |||
fcdf91071c | |||
ffb1117150 | |||
c317610229 | |||
73c3457f40 | |||
cf62e12aa4 | |||
|
b583ef71ad | ||
|
b991f2ee27 | ||
|
e2e0a23f0a | ||
|
537f452601 | ||
|
faaffee391 | ||
|
d536181a73 | ||
e38220739d | |||
fc48d6e67c | |||
aeb61ba462 | |||
|
7f25f3e14f | ||
150a178038 | |||
8b43f34a86 | |||
7a85ce6a04 | |||
68fa1e77a1 | |||
9c06fe6725 | |||
1cf392c1cd | |||
|
69d6ba4bcb | ||
|
21ec4e0ff3 | ||
c2a09b21a0 | |||
0937a9e9f0 | |||
818b8387c0 | |||
3f2beb6f54 | |||
56248ff2cb | |||
6a4945666e | |||
97d1519489 | |||
f608c4a19b | |||
aa2eb91d64 | |||
35d2445c6c | |||
537607ed40 | |||
7e330dca1a | |||
d943d15c0a | |||
a345b76cd2 | |||
836e48bde2 | |||
a6a8c69aee | |||
ddb4cd8e53 | |||
d45313a3ff | |||
70350e19f8 | |||
7613effccf | |||
7280d5be7f | |||
ada4a84942 | |||
e32aa6ec51 | |||
04a18469b5 | |||
1c4d5f3874 | |||
a2eb57cd7a | |||
1c619c5bcb | |||
2e05651f88 | |||
f1b803a3c1 | |||
5c4ba9f0ba | |||
19d70301f5 | |||
4fa21d3c0e | |||
f22a01127c | |||
92d377713d | |||
8732c81bb9 | |||
e7bbf45a9f | |||
a8ce964add | |||
|
69f01d1e57 | ||
|
99231786a5 | ||
8536b4f8fa | |||
c84c5fa734 | |||
2a60e149b9 | |||
7e486191b7 | |||
610479bc5a | |||
b7fa53da7e | |||
629b4276cf | |||
cb15dae87e | |||
|
abed1c9806 | ||
db1f33c2b6 | |||
174a58906c | |||
20cc9b196b | |||
ff80b05502 | |||
edfdef53f4 | |||
89a3fed7a9 | |||
f44173824f | |||
169d73bfc0 | |||
c78f36d21a | |||
054bc43f65 | |||
31c41576ee | |||
3a744bc1e6 | |||
42f76fc20a | |||
|
124f326670 | ||
af34ce45dd | |||
c0cb3ff5c9 | |||
4c754d84ff | |||
d2c85018f5 | |||
8dac49ea9e | |||
5365c0e1b9 | |||
|
4abe9d6d33 | ||
e5d0334714 | |||
16364cbd86 | |||
1b63ab668f | |||
f67ef8e905 | |||
a490b77860 | |||
52d063b619 | |||
d5c005d6f7 | |||
68866f1632 | |||
a4200e469d | |||
e5e14dfc99 | |||
863d591a17 | |||
69675f3c06 | |||
2ae3df1aab | |||
3910da9fb5 | |||
26d53929ac | |||
64e3461264 | |||
bc04db91b8 | |||
f500f8bd11 | |||
84391faffd | |||
aae93de7dd | |||
0f847a2731 | |||
aa7fc7e64b | |||
|
e0f5f48cc4 | ||
|
0b79891f83 | ||
|
11c5ca3383 | ||
|
e3c3b3f42d | ||
|
8db7765e7e | ||
|
dc97740ddc | ||
|
ababe8b842 | ||
|
62bcbb2ae8 | ||
|
62330a3fd8 | ||
|
4556730c6e | ||
|
c92a8b0957 | ||
|
b08da071c2 | ||
|
9c949e74e8 | ||
|
17fcf765fd | ||
|
95a03f92e2 | ||
|
d9e69d8c14 | ||
|
1ec09ebf3a | ||
|
7ef0cc44d5 | ||
|
fe962b2bfa | ||
|
aec07f3c6d | ||
|
b5cb5b17ea | ||
|
c3e7758920 | ||
|
66395028a6 | ||
|
64403b9599 | ||
|
ebf6688701 | ||
|
073a5f4539 | ||
|
69bd5c3eb2 | ||
a328fbc6a6 | |||
6f52cd1686 | |||
c1122ad87d | |||
|
1aeced76a2 | ||
967ec35c6a | |||
f60113aa83 | |||
63a7398979 | |||
40cb504251 | |||
41a4f8af4a | |||
e122224472 | |||
5d3a8d971f | |||
e62e5e7062 | |||
726a60882d | |||
2918b4ca77 | |||
955ba0f001 | |||
8cf7dc0b77 | |||
33e4b371ed | |||
fd832d8808 | |||
c0e213a4ac | |||
bcc00f711b | |||
78f336d5d7 | |||
ee7f8ff517 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -32,6 +32,7 @@ test/README
|
||||
data/*
|
||||
!data/
|
||||
!data/.gitkeep
|
||||
# important
|
||||
html/bambu_credentials.json
|
||||
html/spoolman_url.json
|
||||
_local/*
|
||||
|
354
CHANGELOG.md
354
CHANGELOG.md
@@ -1,5 +1,359 @@
|
||||
# Changelog
|
||||
|
||||
## [1.5.12-beta13] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta13
|
||||
- reorganize clearUserDataArea function for improved clarity and safety
|
||||
|
||||
|
||||
## [1.5.12-beta12] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta12
|
||||
|
||||
### Fixed
|
||||
- reset NFC state on API send failure to allow retry
|
||||
- update createdFilamentId reset value to 65535 for better task handling
|
||||
- update createdVendorId reset value to 65535 for improved API handling
|
||||
|
||||
|
||||
## [1.5.12-beta11] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta11
|
||||
|
||||
### Fixed
|
||||
- update spoolman ID reset values to 65535 for better API response detection
|
||||
|
||||
|
||||
## [1.5.12-beta10] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta10
|
||||
- streamline task creation in checkVendor and checkFilament functions
|
||||
|
||||
|
||||
## [1.5.12-beta9] - 2025-08-29
|
||||
### Added
|
||||
- update vendor and filament ID handling to use NULL and add delays for stability
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta9
|
||||
|
||||
|
||||
## [1.5.12-beta8] - 2025-08-29
|
||||
### Added
|
||||
- add delay to ensure proper setting of vendor and filament IDs after API state changes
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta8
|
||||
|
||||
### Fixed
|
||||
- correct color_hex key usage and comment out unused date fields in spool creation
|
||||
|
||||
|
||||
## [1.5.12-beta7] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta7
|
||||
|
||||
### Fixed
|
||||
- improve API state handling and vendor name formatting
|
||||
|
||||
|
||||
## [1.5.12-beta6] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta6
|
||||
- improve task synchronization in vendor, filament, and spool creation functions
|
||||
|
||||
|
||||
## [1.5.12-beta5] - 2025-08-29
|
||||
### Added
|
||||
- enhance NDEF decoding with detailed validation and debugging output
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta5
|
||||
|
||||
|
||||
## [1.5.12-beta4] - 2025-08-29
|
||||
### Added
|
||||
- enhance NDEF decoding to validate structure and extract JSON payload
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta4
|
||||
|
||||
|
||||
## [1.5.12-beta3] - 2025-08-29
|
||||
### Added
|
||||
- add logging for decoded JSON data in NFC processing
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta3
|
||||
|
||||
|
||||
## [1.5.12-beta2] - 2025-08-29
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta2
|
||||
|
||||
### Fixed
|
||||
- enhance filament creation logic to include dynamic comments based on payload
|
||||
|
||||
|
||||
## [1.5.12-beta1] - 2025-08-28
|
||||
### Added
|
||||
- implement filament and spool creation in Spoolman API
|
||||
- Add JSON structure comments for filament and spool creation
|
||||
- Add vendor and filament management to API; implement recycling factory handling in NFC
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.12-beta1
|
||||
- Merge branch 'main' into recyclingfabrik
|
||||
- Merge branch 'main' into recyclingfabrik
|
||||
|
||||
|
||||
## [1.5.12] - 2025-08-28
|
||||
### Added
|
||||
- add numbering to update sections in upgrade.html refactor: improve readability of checkSpoolmanInstance function
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for version v1.5.12
|
||||
- clean up library dependencies in platformio.ini
|
||||
|
||||
|
||||
## [1.5.11-beta4] - 2025-08-28
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.11-beta4
|
||||
|
||||
### Fixed
|
||||
- remove debug logging from checkSpoolmanInstance function
|
||||
|
||||
|
||||
## [1.5.11-beta3] - 2025-08-28
|
||||
### Added
|
||||
- add logging for spoolman status in checkSpoolmanInstance function
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.11-beta3
|
||||
|
||||
|
||||
## [1.5.11-beta2] - 2025-08-28
|
||||
### Added
|
||||
- add logging for healthy spoolman instance check
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.11-beta2
|
||||
|
||||
|
||||
## [1.5.11-beta1] - 2025-08-28
|
||||
### Changed
|
||||
- update platformio.ini for beta version v1.5.11-beta1
|
||||
|
||||
### Fixed
|
||||
- adjust spacing in loop structure and enable tare function in scale loop
|
||||
|
||||
|
||||
## [1.5.11] - 2025-08-27
|
||||
### Changed
|
||||
- update platformio.ini for version v1.5.11
|
||||
|
||||
|
||||
## [1.5.10] - 2025-08-27
|
||||
### Added
|
||||
- improve weight processing logic and add auto-send feature for Bambu spool ID
|
||||
- improve weight processing logic and add auto-send feature for Bambu spool ID
|
||||
- Adds a link to the spool in spoolman when reading a spool tag
|
||||
- Fixes types and some issues in the new graphics
|
||||
- Adds ENABLE_HEAP_DEBUGGING define as comment to the build flags
|
||||
- Adds data directory and further .vscode files to to .gitignore
|
||||
- Introduces new heap debugging feature and fixes some memory leaks in website feature
|
||||
- Fixes some issues with the new location tags
|
||||
- Adds new feature to write and read location tags
|
||||
- Adds slight debouncing to the scale loop weight logic
|
||||
- add loadcell desc.
|
||||
- implement multi-color filament display and styles for dropdown options
|
||||
- add remaining weight logging for PUT requests and improve error reporting in sendToApi function
|
||||
- add remaining weight logging and display after successful spool update
|
||||
- add weight field to update payload in updateSpoolTagId function
|
||||
- add auto-tare functionality and update scale handling based on touch sensor connection
|
||||
- add touch sensor connection check and update logic
|
||||
- add manual tare functionality for scale
|
||||
- add debounce handling for TTP223 touch sensor
|
||||
- add TTP223 touch sensor support and wiring configuration
|
||||
- Renamed states of NFC state machine and introduced new state machine for spoolman API
|
||||
- add forced cache refresh after removing and saving Bambu credentials
|
||||
- add functionality to remove Bambu credentials and update API handling
|
||||
- add rfid_bambu.html and update bambu connection handling
|
||||
- add error handling for missing vendor IDs in filament data
|
||||
- add WiFi connection check and restart Bambu if not connected
|
||||
- added new .step, now with correct individual parts
|
||||
- added changelog
|
||||
- Add files via upload
|
||||
- added .stp files of modifications
|
||||
- added merged picture
|
||||
- added pictures of components bought from AliE
|
||||
- Add files via upload
|
||||
- added pictures for heat insert location
|
||||
- added pictures showing heat insert location
|
||||
- remove unnecessary delay in MQTT setup and add delay before restart
|
||||
- add new 3D print file for Filaman scale
|
||||
- added Discord Server
|
||||
- add support for Spoolman Octoprint Plugin in README files
|
||||
- add OctoPrint integration with configurable fields and update functionality
|
||||
- add version comparison function and check for outdated versions before updates
|
||||
- remove unused version and protocol fields from JSON output; add error message for insufficient memory
|
||||
|
||||
### Changed
|
||||
- update platformio.ini for version v1.5.10
|
||||
- Merge branch 'main' of https://gitlab.fire-devils.org/3D-Druck/Filaman
|
||||
- Changed Amazon Link for PN532
|
||||
- Changed Amazon Link for PN532
|
||||
- update changelog and header for version v1.5.9
|
||||
- update platformio.ini for version v1.5.9
|
||||
- Enhance API to support weight updates after successful spool tag updates
|
||||
- update changelog and header for version v1.5.8
|
||||
- update platformio.ini for version v1.5.8
|
||||
- Merge pull request #45 from janecker/nfc_write_improvements
|
||||
- Introduces periodic Spoolman Healthcheck
|
||||
- Improves init - NFC reading now only starts after boot is finished
|
||||
- Further improvements on NFC writing
|
||||
- Merge pull request #44 from janecker/graphics_rework
|
||||
- Graphic rework of the NFC writing process
|
||||
- Remove unused parameter of sendNfcData()
|
||||
- Reworks startup graphics and timings
|
||||
- update changelog and header for version v1.5.7
|
||||
- update platformio.ini for version v1.5.7
|
||||
- clean up unused variables and improve .gitignore entries
|
||||
- update changelog and header for version v1.5.6
|
||||
- update webpages for version v1.5.6
|
||||
- update platformio.ini for version v1.5.6
|
||||
- Merge pull request #42 from janecker/configuration_nvs_storage
|
||||
- Merge branch 'main' into configuration_nvs_storage
|
||||
- Changes configuration storage of spoolman and bambu values
|
||||
- update changelog and header for version v1.5.5
|
||||
- update platformio.ini for version v1.5.5
|
||||
- update changelog and header for version v1.5.4
|
||||
- update platformio.ini for version v1.5.4
|
||||
- Merge branch 'main' of github.com:ManuelW77/Filaman
|
||||
- Merge pull request #39 from janecker/location_tags
|
||||
- Merge pull request #38 from janecker/scale_debouncing
|
||||
- update changelog and header for version v1.5.3
|
||||
- update platformio.ini for version v1.5.3
|
||||
- Affiliate Links
|
||||
- update changelog and header for version v1.5.2
|
||||
- update platformio.ini for version v1.5.2
|
||||
- update changelog and header for version v1.5.1
|
||||
- update version to 1.5.1 and improve OTA update handling with task management
|
||||
- update changelog and header for version v1.4.14
|
||||
- update platformio.ini for version v1.4.14
|
||||
- update changelog and header for version v1.4.13
|
||||
- update platformio.ini for version v1.4.13
|
||||
- update changelog and header for version v1.4.12
|
||||
- update platformio.ini for version v1.4.12
|
||||
- update README files to clarify PN532 DIP switch settings
|
||||
- update changelog and header for version v1.4.11
|
||||
- update platformio.ini for version v1.4.11
|
||||
- Merge branch 'main' of github.com:ManuelW77/Filaman
|
||||
- update changelog and header for version v1.4.10
|
||||
- update platformio.ini for version v1.4.10
|
||||
- Merge pull request #31 from janecker/nfc_rework
|
||||
- Introducing enum for handling the NFC state to improve code readability
|
||||
- update changelog and header for version v1.4.9
|
||||
- update platformio.ini for version v1.4.9
|
||||
- update changelog and header for version v1.4.8
|
||||
- update platformio.ini for version v1.4.8
|
||||
- Merge pull request #30 from janecker/main
|
||||
- Merge branch 'testing' into main
|
||||
- update changelog and header for version v1.4.7
|
||||
- update platformio.ini for version v1.4.7
|
||||
- Merge branch 'testing'
|
||||
- update remove button for Bambu credentials with red background
|
||||
- Merge pull request #28 from tugsi/main
|
||||
- update changelog and header for version v1.4.6
|
||||
- update platformio.ini for version v1.4.6
|
||||
- update changelog and header for version v1.4.5
|
||||
- update platformio.ini for version v1.4.5
|
||||
- Merge branch 'testing'
|
||||
- remove unused request_topic subscription and reduce MQTT task stack size
|
||||
- Merge pull request #26 from tugsi/main
|
||||
- rename report_topic to topic and update MQTT subscription logic, switched publish topic to request
|
||||
- update changelog and header for version v1.4.4
|
||||
- update platformio.ini for version v1.4.4
|
||||
- update changelog and header for version v1.4.3
|
||||
- update platformio.ini for version v1.4.3
|
||||
- update changelog and header for version v1.4.2
|
||||
- update platformio.ini for version v1.4.2
|
||||
- increase stack size for BambuMqtt task
|
||||
- update Discord Link
|
||||
- update Discord Link
|
||||
- remove commented-out subscription topic in MQTT setup
|
||||
- update changelog and header for version v1.4.1
|
||||
- update platformio.ini for version v1.4.1
|
||||
- refactor length calculation to convert total length to meters before formatting
|
||||
- Merge pull request #16 from spitzbirne32/main
|
||||
- improved housing to show display better
|
||||
- removed CAD, as they were all duplicates
|
||||
- typo in AliE link
|
||||
- Delete usermod/spitzbirne32/STL/README.md
|
||||
- Update README.md
|
||||
- moved pictures of parts into dedicated folders
|
||||
- Update README.md
|
||||
- Update README.md
|
||||
- Update README.md
|
||||
- Delete usermod/spitzbirne32/STL/ScaleTop_Heatinsert_Location_usermod_spitzbirne32_.png
|
||||
- Delete usermod/spitzbirne32/STL/Housing_Heatinsert_Location_usermod_spitzbirne32_.png
|
||||
- created folders
|
||||
- Update README.md
|
||||
- Update README.md
|
||||
- Create README.md
|
||||
- Update README.md
|
||||
- Update README.md
|
||||
- Create README.md
|
||||
- Merge pull request #15 from ManuelW77/main
|
||||
- Merge pull request #14 from janecker/scale-calibration-rework
|
||||
- Reworks the scale calibration handling
|
||||
- remove redundant scale calibration checks and enhance task management
|
||||
- enhance AMS data handling and streamline spool auto-setting logic
|
||||
- adjust stack size and improve scale calibration logic
|
||||
- update labels and input types for better clarity and functionality
|
||||
- update documentation for clarity and accuracy
|
||||
- update changelog and header for version v1.4.0
|
||||
- update NFC tag references to include NTAG213 and clarify storage capacity
|
||||
- bump version to 1.4.0
|
||||
- remove unused version and protocol fields from NFC data packet
|
||||
- sort vendors alphabetically in the dropdown list
|
||||
- Merge pull request #10 from janecker/nfc-improvements
|
||||
|
||||
### Fixed
|
||||
- Fixes issue that scale not calibrated message was not shown
|
||||
- Improves NFC writing workaround and removes debug output
|
||||
- Fixes typos in upgrade page
|
||||
- Reworks graphics of tag reading and some api fixes
|
||||
- Replaces usage of String with const char* in heap debug function
|
||||
- Merge pull request #41 from janecker/memory_leak_fixes
|
||||
- Fixes compiler warnings in nfc
|
||||
- Memory leak fixes in api and nfc, location tag fix
|
||||
- Merge pull request #40 from janecker/location_bambu_fix
|
||||
- uncomment monitor_port configuration in platformio.ini
|
||||
- update spool weight conditionally based on NFC ID
|
||||
- update weight field in update payload to only include values greater than 10
|
||||
- increase stack size for sendToApi task to improve stability
|
||||
- adjust tare weight tolerance to ignore deviations of 2g
|
||||
- improve weight stability check before sending to API
|
||||
- update touch sensor connection logic to correctly identify connection status
|
||||
- update TTP223 pin configuration and adjust touch sensor logic
|
||||
- enhance HTTP method handling in sendToApi function
|
||||
- improve HTTP client configuration and clear update documents after API calls
|
||||
- Fixes memory leak in HTTPClient by disabling connection reuse
|
||||
- update reload logic after removing and saving Bambu credentials for better cache handling
|
||||
- handle Bambu connection state by introducing bambuDisabled flag
|
||||
- handle potential undefined value for tray_info_idx in handleSpoolIn function, by @tugsi
|
||||
- Fix rfid.js-Failure with X1-Series, if you wanna send a Spool to AMS: - Uncaught TypeError: Cannot read properties of undefined (reading 'replace') at handleSpoolIn (rfid.js:493:67) at HTMLButtonElement.onclick ((Index):1:1) handleSpoolIn @ rfid.js:493 onclick @ (Index):1
|
||||
- increase MQTT buffer size and adjust task stack size
|
||||
- Fix BufferSize for larger JSONs from X-Series
|
||||
- adjust weight threshold for tare check to allow negative values
|
||||
- use unique client ID for MQTT connection to avoid conflicts
|
||||
- reload page after firmware update completion
|
||||
- increase WiFi connection timeout from 5 to 10 seconds
|
||||
- ensure valid URL format and remove trailing slash in setupWebserver
|
||||
- correct typo in console log for total length
|
||||
|
||||
|
||||
## [1.5.9] - 2025-08-11
|
||||
### Changed
|
||||
- update platformio.ini for version v1.5.9
|
||||
|
@@ -62,7 +62,7 @@ Discord Server: [https://discord.gg/my7Gvaxj2v](https://discord.gg/my7Gvaxj2v)
|
||||
- **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
|
||||
[Amazon Link](https://amzn.to/445aaa9)
|
||||
- **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
|
||||
[Amazon Link](https://amzn.to/4iO6CO4)
|
||||
[Amazon Link](https://amzn.eu/d/gy9vaBX)
|
||||
- **NFC Tags NTAG213 NTAG215:** RFID Tag
|
||||
[Amazon Link](https://amzn.to/3E071xO)
|
||||
- **TTP223 Touch Sensor (optional):** For reTARE per Button/Touch
|
||||
|
@@ -66,7 +66,7 @@ Discord Server: [https://discord.gg/my7Gvaxj2v](https://discord.gg/my7Gvaxj2v)
|
||||
- **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
|
||||
[Amazon Link](https://amzn.to/445aaa9)
|
||||
- **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
|
||||
[Amazon Link](https://amzn.to/4iO6CO4)
|
||||
[Amazon Link](https://amzn.eu/d/gy9vaBX)
|
||||
- **NFC Tags NTAG213 NTAG215:** RFID Tag
|
||||
[Amazon Link](https://amzn.to/3E071xO)
|
||||
- **TTP223 Touch Sensor (optional):** For reTARE per Button/Touch
|
||||
|
@@ -56,7 +56,7 @@
|
||||
|
||||
<div class="update-options">
|
||||
<div class="update-section">
|
||||
<h2>Firmware Update</h2>
|
||||
<h2>1) Firmware Update</h2>
|
||||
<p>Upload a new firmware file (upgrade_filaman_firmware_*.bin)</p>
|
||||
<div class="update-form">
|
||||
<form id="firmwareForm" enctype='multipart/form-data' data-type="firmware">
|
||||
@@ -67,7 +67,7 @@
|
||||
</div>
|
||||
|
||||
<div class="update-section">
|
||||
<h2>Webpage Update</h2>
|
||||
<h2>2) Webpage Update</h2>
|
||||
<p>Upload a new webpage file (upgrade_filaman_website_*.bin)</p>
|
||||
<div class="update-form">
|
||||
<form id="webpageForm" enctype='multipart/form-data' data-type="webpage">
|
||||
|
@@ -9,7 +9,7 @@
|
||||
; https://docs.platformio.org/page/projectconf.html
|
||||
|
||||
[common]
|
||||
version = "1.5.9"
|
||||
version = "1.5.12-beta13"
|
||||
to_old_version = "1.5.0"
|
||||
|
||||
##
|
||||
@@ -23,10 +23,7 @@ monitor_speed = 115200
|
||||
lib_deps =
|
||||
tzapu/WiFiManager @ ^2.0.17
|
||||
https://github.com/me-no-dev/ESPAsyncWebServer.git#master
|
||||
#me-no-dev/AsyncTCP @ ^1.1.1
|
||||
https://github.com/esphome/AsyncTCP.git
|
||||
#mathieucarbou/ESPAsyncWebServer @ ^3.6.0
|
||||
#esp32async/AsyncTCP @ ^3.3.5
|
||||
bogde/HX711 @ ^0.7.5
|
||||
adafruit/Adafruit SSD1306 @ ^2.5.13
|
||||
adafruit/Adafruit GFX Library @ ^1.11.11
|
||||
@@ -36,7 +33,6 @@ lib_deps =
|
||||
digitaldragon/SSLClient @ ^1.3.2
|
||||
|
||||
; Enable SPIFFS upload
|
||||
#board_build.filesystem = spiffs
|
||||
board_build.filesystem = littlefs
|
||||
; Update partition settings
|
||||
board_build.partitions = partitions.csv
|
||||
|
484
src/api.cpp
484
src/api.cpp
@@ -5,8 +5,22 @@
|
||||
#include <Preferences.h>
|
||||
#include "debug.h"
|
||||
#include "scale.h"
|
||||
|
||||
#include "nfc.h"
|
||||
#include <time.h>
|
||||
volatile spoolmanApiStateType spoolmanApiState = API_IDLE;
|
||||
|
||||
// Returns current date and time in ISO8601 format
|
||||
String getCurrentDateISO8601() {
|
||||
struct tm timeinfo;
|
||||
if(!getLocalTime(&timeinfo)) {
|
||||
Serial.println("Failed to obtain time");
|
||||
return "1970-01-01T00:00:00Z";
|
||||
}
|
||||
char timeStringBuff[25];
|
||||
strftime(timeStringBuff, sizeof(timeStringBuff), "%Y-%m-%dT%H:%M:%SZ", &timeinfo);
|
||||
return String(timeStringBuff);
|
||||
}
|
||||
|
||||
//bool spoolman_connected = false;
|
||||
String spoolmanUrl = "";
|
||||
bool octoEnabled = false;
|
||||
@@ -14,6 +28,11 @@ bool sendOctoUpdate = false;
|
||||
String octoUrl = "";
|
||||
String octoToken = "";
|
||||
uint16_t remainingWeight = 0;
|
||||
uint16_t createdVendorId = 0; // Store ID of newly created vendor
|
||||
uint16_t foundVendorId = 0; // Store ID of found vendor
|
||||
uint16_t foundFilamentId = 0; // Store ID of found filament
|
||||
uint16_t createdFilamentId = 0; // Store ID of newly created filament
|
||||
uint16_t createdSpoolId = 0; // Store ID of newly created spool
|
||||
bool spoolmanConnected = false;
|
||||
bool spoolmanExtraFieldsChecked = false;
|
||||
TaskHandle_t* apiTask;
|
||||
@@ -103,7 +122,7 @@ void sendToApi(void *parameter) {
|
||||
|
||||
// Wait until API is IDLE
|
||||
while(spoolmanApiState != API_IDLE){
|
||||
Serial.println("Waiting!");
|
||||
vTaskDelay(100 / portTICK_PERIOD_MS);
|
||||
yield();
|
||||
}
|
||||
spoolmanApiState = API_TRANSMITTING;
|
||||
@@ -129,10 +148,11 @@ void sendToApi(void *parameter) {
|
||||
int httpCode;
|
||||
if (httpType == "PATCH") httpCode = http.PATCH(updatePayload);
|
||||
else if (httpType == "POST") httpCode = http.POST(updatePayload);
|
||||
else if (httpType == "GET") httpCode = http.GET();
|
||||
else httpCode = http.PUT(updatePayload);
|
||||
|
||||
if (httpCode == HTTP_CODE_OK) {
|
||||
Serial.println("Spoolman erfolgreich aktualisiert");
|
||||
Serial.println("Spoolman Abfrage erfolgreich");
|
||||
|
||||
// Restgewicht der Spule auslesen
|
||||
String payload = http.getString();
|
||||
@@ -168,6 +188,86 @@ void sendToApi(void *parameter) {
|
||||
oledShowProgressBar(5, 5, "Spool Tag", ("Done: " + String(remainingWeight) + " g remain").c_str());
|
||||
remainingWeight = 0;
|
||||
break;
|
||||
case API_REQUEST_VENDOR_CREATE:
|
||||
Serial.println("Vendor successfully created!");
|
||||
createdVendorId = doc["id"].as<uint16_t>();
|
||||
Serial.print("Created Vendor ID: ");
|
||||
Serial.println(createdVendorId);
|
||||
oledShowProgressBar(1, 1, "Vendor", "Created!");
|
||||
break;
|
||||
case API_REQUEST_VENDOR_CHECK:
|
||||
if (doc.isNull() || doc.size() == 0) {
|
||||
Serial.println("Vendor not found in response");
|
||||
foundVendorId = 0;
|
||||
} else {
|
||||
foundVendorId = doc[0]["id"].as<uint16_t>();
|
||||
Serial.print("Found Vendor ID: ");
|
||||
Serial.println(foundVendorId);
|
||||
}
|
||||
break;
|
||||
case API_REQUEST_FILAMENT_CHECK:
|
||||
if (doc.isNull() || doc.size() == 0) {
|
||||
Serial.println("Filament not found in response");
|
||||
foundFilamentId = 0;
|
||||
} else {
|
||||
foundFilamentId = doc[0]["id"].as<uint16_t>();
|
||||
Serial.print("Found Filament ID: ");
|
||||
Serial.println(foundFilamentId);
|
||||
}
|
||||
break;
|
||||
case API_REQUEST_FILAMENT_CREATE:
|
||||
Serial.println("Filament successfully created!");
|
||||
createdFilamentId = doc["id"].as<uint16_t>();
|
||||
Serial.print("Created Filament ID: ");
|
||||
Serial.println(createdFilamentId);
|
||||
oledShowProgressBar(1, 1, "Filament", "Created!");
|
||||
break;
|
||||
case API_REQUEST_SPOOL_CREATE:
|
||||
Serial.println("Spool successfully created!");
|
||||
createdSpoolId = doc["id"].as<uint16_t>();
|
||||
Serial.print("Created Spool ID: ");
|
||||
Serial.println(createdSpoolId);
|
||||
oledShowProgressBar(1, 1, "Spool", "Created!");
|
||||
break;
|
||||
}
|
||||
}
|
||||
doc.clear();
|
||||
} else if (httpCode == HTTP_CODE_CREATED) {
|
||||
Serial.println("Spoolman erfolgreich erstellt");
|
||||
|
||||
// Parse response for created resources
|
||||
String payload = http.getString();
|
||||
JsonDocument doc;
|
||||
DeserializationError error = deserializeJson(doc, payload);
|
||||
if (error) {
|
||||
Serial.print("Fehler beim Parsen der JSON-Antwort: ");
|
||||
Serial.println(error.c_str());
|
||||
} else {
|
||||
switch(requestType){
|
||||
case API_REQUEST_VENDOR_CREATE:
|
||||
Serial.println("Vendor successfully created!");
|
||||
createdVendorId = doc["id"].as<uint16_t>();
|
||||
Serial.print("Created Vendor ID: ");
|
||||
Serial.println(createdVendorId);
|
||||
oledShowProgressBar(1, 1, "Vendor", "Created!");
|
||||
break;
|
||||
case API_REQUEST_FILAMENT_CREATE:
|
||||
Serial.println("Filament successfully created!");
|
||||
createdFilamentId = doc["id"].as<uint16_t>();
|
||||
Serial.print("Created Filament ID: ");
|
||||
Serial.println(createdFilamentId);
|
||||
oledShowProgressBar(1, 1, "Filament", "Created!");
|
||||
break;
|
||||
case API_REQUEST_SPOOL_CREATE:
|
||||
Serial.println("Spool successfully created!");
|
||||
createdSpoolId = doc["id"].as<uint16_t>();
|
||||
Serial.print("Created Spool ID: ");
|
||||
Serial.println(createdSpoolId);
|
||||
oledShowProgressBar(1, 1, "Spool", "Created!");
|
||||
break;
|
||||
default:
|
||||
// Handle other create operations if needed
|
||||
break;
|
||||
}
|
||||
}
|
||||
doc.clear();
|
||||
@@ -235,11 +335,19 @@ void sendToApi(void *parameter) {
|
||||
case API_REQUEST_BAMBU_UPDATE:
|
||||
oledShowProgressBar(1, 1, "Failure!", "Bambu update");
|
||||
break;
|
||||
case API_REQUEST_VENDOR_CREATE:
|
||||
oledShowProgressBar(1, 1, "Failure!", "Vendor create");
|
||||
break;
|
||||
case API_REQUEST_FILAMENT_CREATE:
|
||||
oledShowProgressBar(1, 1, "Failure!", "Filament create");
|
||||
break;
|
||||
case API_REQUEST_SPOOL_CREATE:
|
||||
oledShowProgressBar(1, 1, "Failure!", "Spool create");
|
||||
break;
|
||||
}
|
||||
Serial.println("Fehler beim Senden an Spoolman! HTTP Code: " + String(httpCode));
|
||||
|
||||
// TBD: really required?
|
||||
vTaskDelay(2000 / portTICK_PERIOD_MS);
|
||||
nfcReaderState = NFC_IDLE; // Reset NFC state to allow retry
|
||||
}
|
||||
|
||||
http.end();
|
||||
@@ -504,6 +612,367 @@ bool updateSpoolBambuData(String payload) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// #### Brand Filament
|
||||
uint16_t createVendor(String vendor) {
|
||||
// Create new vendor in Spoolman database using task system
|
||||
// Note: Due to async nature, the ID will be stored in createdVendorId global variable
|
||||
// Note: This function assumes that the caller has already ensured API is IDLE
|
||||
createdVendorId = 65535; // Reset previous value
|
||||
|
||||
String spoolsUrl = spoolmanUrl + apiUrl + "/vendor";
|
||||
Serial.print("Create vendor with URL: ");
|
||||
Serial.println(spoolsUrl);
|
||||
|
||||
// Create JSON payload for vendor creation
|
||||
JsonDocument vendorDoc;
|
||||
vendorDoc["name"] = vendor;
|
||||
vendorDoc["comment"] = "automatically generated";
|
||||
vendorDoc["empty_spool_weight"] = 180;
|
||||
vendorDoc["external_id"] = vendor;
|
||||
|
||||
String vendorPayload;
|
||||
serializeJson(vendorDoc, vendorPayload);
|
||||
Serial.print("Vendor Payload: ");
|
||||
Serial.println(vendorPayload);
|
||||
|
||||
SendToApiParams* params = new SendToApiParams();
|
||||
if (params == nullptr) {
|
||||
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
|
||||
vendorDoc.clear();
|
||||
return 0;
|
||||
}
|
||||
params->requestType = API_REQUEST_VENDOR_CREATE;
|
||||
params->httpType = "POST";
|
||||
params->spoolsUrl = spoolsUrl;
|
||||
params->updatePayload = vendorPayload;
|
||||
|
||||
// Create task without additional API state check since caller ensures synchronization
|
||||
BaseType_t result = xTaskCreate(
|
||||
sendToApi, // Task-Funktion
|
||||
"SendToApiTask", // Task-Name
|
||||
6144, // Stackgröße in Bytes
|
||||
(void*)params, // Parameter
|
||||
0, // Priorität
|
||||
NULL // Task-Handle (nicht benötigt)
|
||||
);
|
||||
|
||||
if (result != pdPASS) {
|
||||
Serial.println("Failed to create vendor task!");
|
||||
delete params;
|
||||
vendorDoc.clear();
|
||||
return 0;
|
||||
}
|
||||
|
||||
vendorDoc.clear();
|
||||
|
||||
// Wait for task completion and return the created vendor ID
|
||||
// Note: createdVendorId will be set by sendToApi when response is received
|
||||
while(createdVendorId == 65535) {
|
||||
vTaskDelay(50 / portTICK_PERIOD_MS);
|
||||
}
|
||||
|
||||
return createdVendorId;
|
||||
}
|
||||
|
||||
uint16_t checkVendor(String vendor) {
|
||||
// Check if vendor exists using task system
|
||||
foundVendorId = 65535; // Reset to invalid value to detect when API response is received
|
||||
|
||||
String vendorName = vendor;
|
||||
vendorName.trim();
|
||||
vendorName.replace(" ", "+");
|
||||
String spoolsUrl = spoolmanUrl + apiUrl + "/vendor?name=" + vendorName;
|
||||
Serial.print("Check vendor with URL: ");
|
||||
Serial.println(spoolsUrl);
|
||||
|
||||
SendToApiParams* params = new SendToApiParams();
|
||||
if (params == nullptr) {
|
||||
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
|
||||
return 0;
|
||||
}
|
||||
params->requestType = API_REQUEST_VENDOR_CHECK;
|
||||
params->httpType = "GET";
|
||||
params->spoolsUrl = spoolsUrl;
|
||||
params->updatePayload = ""; // Empty for GET request
|
||||
|
||||
// Check if API is idle before creating task
|
||||
while (spoolmanApiState != API_IDLE)
|
||||
{
|
||||
vTaskDelay(100 / portTICK_PERIOD_MS);
|
||||
}
|
||||
|
||||
// Erstelle die Task
|
||||
BaseType_t result = xTaskCreate(
|
||||
sendToApi, // Task-Funktion
|
||||
"SendToApiTask", // Task-Name
|
||||
6144, // Stackgröße in Bytes
|
||||
(void*)params, // Parameter
|
||||
0, // Priorität
|
||||
NULL // Task-Handle (nicht benötigt)
|
||||
);
|
||||
|
||||
// Wait until foundVendorId is updated by the API response (not 65535 anymore)
|
||||
while (foundVendorId == 65535)
|
||||
{
|
||||
vTaskDelay(50 / portTICK_PERIOD_MS);
|
||||
}
|
||||
|
||||
// Check if vendor was found
|
||||
if (foundVendorId == 0) {
|
||||
Serial.println("Vendor not found, creating new vendor...");
|
||||
uint16_t vendorId = createVendor(vendor);
|
||||
if (vendorId == 0) {
|
||||
Serial.println("Failed to create vendor, returning 0.");
|
||||
return 0; // Failed to create vendor
|
||||
} else {
|
||||
Serial.println("Vendor created with ID: " + String(vendorId));
|
||||
return vendorId;
|
||||
}
|
||||
} else {
|
||||
Serial.println("Vendor found: " + vendor);
|
||||
Serial.print("Vendor ID: ");
|
||||
Serial.println(foundVendorId);
|
||||
return foundVendorId;
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t createFilament(uint16_t vendorId, const JsonDocument& payload) {
|
||||
// Create new filament in Spoolman database using task system
|
||||
// Note: Due to async nature, the ID will be stored in createdFilamentId global variable
|
||||
// Note: This function assumes that the caller has already ensured API is IDLE
|
||||
createdFilamentId = 65535; // Reset previous value
|
||||
|
||||
String spoolsUrl = spoolmanUrl + apiUrl + "/filament";
|
||||
Serial.print("Create filament with URL: ");
|
||||
Serial.println(spoolsUrl);
|
||||
|
||||
// Create JSON payload for filament creation
|
||||
JsonDocument filamentDoc;
|
||||
filamentDoc["name"] = payload["color_name"].as<String>();
|
||||
filamentDoc["vendor_id"] = String(vendorId);
|
||||
filamentDoc["material"] = payload["type"].as<String>();
|
||||
filamentDoc["density"] = (payload["density"].is<String>() && payload["density"].as<String>().length() > 0) ? payload["density"].as<String>() : "1.24";
|
||||
filamentDoc["diameter"] = (payload["diameter"].is<String>() && payload["diameter"].as<String>().length() > 0) ? payload["diameter"].as<String>() : "1.75";
|
||||
filamentDoc["weight"] = String(weight);
|
||||
filamentDoc["spool_weight"] = payload["spool_weight"].as<String>();
|
||||
filamentDoc["article_number"] = payload["artnr"].as<String>();
|
||||
filamentDoc["settings_extruder_temp"] = payload["extruder_temp"].is<String>() ? payload["extruder_temp"].as<String>() : "";
|
||||
filamentDoc["settings_bed_temp"] = payload["bed_temp"].is<String>() ? payload["bed_temp"].as<String>() : "";
|
||||
|
||||
if (payload["artnr"].is<String>())
|
||||
{
|
||||
filamentDoc["external_id"] = payload["artnr"].as<String>();
|
||||
filamentDoc["comment"] = payload["url"].is<String>() ? payload["url"].as<String>() + payload["artnr"].as<String>() : "automatically generated";
|
||||
}
|
||||
else
|
||||
{
|
||||
filamentDoc["comment"] = payload["url"].is<String>() ? payload["url"].as<String>() : "automatically generated";
|
||||
}
|
||||
|
||||
if (payload["multi_color_hexes"].is<String>()) {
|
||||
filamentDoc["multi_color_hexes"] = payload["multi_color_hexes"].as<String>();
|
||||
filamentDoc["multi_color_direction"] = payload["multi_color_direction"].is<String>() ? payload["multi_color_direction"].as<String>() : "";
|
||||
}
|
||||
else
|
||||
{
|
||||
filamentDoc["color_hex"] = (payload["color_hex"].is<String>() && payload["color_hex"].as<String>().length() >= 6) ? payload["color_hex"].as<String>() : "FFFFFF";
|
||||
}
|
||||
|
||||
String filamentPayload;
|
||||
serializeJson(filamentDoc, filamentPayload);
|
||||
Serial.print("Filament Payload: ");
|
||||
Serial.println(filamentPayload);
|
||||
|
||||
SendToApiParams* params = new SendToApiParams();
|
||||
if (params == nullptr) {
|
||||
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
|
||||
filamentDoc.clear();
|
||||
return 0;
|
||||
}
|
||||
params->requestType = API_REQUEST_FILAMENT_CREATE;
|
||||
params->httpType = "POST";
|
||||
params->spoolsUrl = spoolsUrl;
|
||||
params->updatePayload = filamentPayload;
|
||||
|
||||
// Create task without additional API state check since caller ensures synchronization
|
||||
BaseType_t result = xTaskCreate(
|
||||
sendToApi, // Task-Funktion
|
||||
"SendToApiTask", // Task-Name
|
||||
6144, // Stackgröße in Bytes
|
||||
(void*)params, // Parameter
|
||||
0, // Priorität
|
||||
NULL // Task-Handle (nicht benötigt)
|
||||
);
|
||||
|
||||
if (result != pdPASS) {
|
||||
Serial.println("Failed to create filament task!");
|
||||
delete params;
|
||||
filamentDoc.clear();
|
||||
return 0;
|
||||
}
|
||||
|
||||
filamentDoc.clear();
|
||||
|
||||
// Wait for task completion and return the created filament ID
|
||||
// Note: createdFilamentId will be set by sendToApi when response is received
|
||||
while(createdFilamentId == 65535) {
|
||||
vTaskDelay(50 / portTICK_PERIOD_MS);
|
||||
}
|
||||
|
||||
return createdFilamentId;
|
||||
}
|
||||
|
||||
uint16_t checkFilament(uint16_t vendorId, const JsonDocument& payload) {
|
||||
// Check if filament exists using task system
|
||||
foundFilamentId = 65535; // Reset to invalid value to detect when API response is received
|
||||
|
||||
String spoolsUrl = spoolmanUrl + apiUrl + "/filament?vendor.id=" + String(vendorId) + "&external_id=" + String(payload["artnr"].as<String>());
|
||||
Serial.print("Check filament with URL: ");
|
||||
Serial.println(spoolsUrl);
|
||||
|
||||
SendToApiParams* params = new SendToApiParams();
|
||||
if (params == nullptr) {
|
||||
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
|
||||
return 0;
|
||||
}
|
||||
params->requestType = API_REQUEST_FILAMENT_CHECK;
|
||||
params->httpType = "GET";
|
||||
params->spoolsUrl = spoolsUrl;
|
||||
params->updatePayload = ""; // Empty for GET request
|
||||
|
||||
// Erstelle die Task
|
||||
BaseType_t result = xTaskCreate(
|
||||
sendToApi, // Task-Funktion
|
||||
"SendToApiTask", // Task-Name
|
||||
6144, // Stackgröße in Bytes
|
||||
(void*)params, // Parameter
|
||||
0, // Priorität
|
||||
NULL // Task-Handle (nicht benötigt)
|
||||
);
|
||||
|
||||
// Wait until foundFilamentId is updated by the API response (not 65535 anymore)
|
||||
while (foundFilamentId == 65535) {
|
||||
vTaskDelay(50 / portTICK_PERIOD_MS);
|
||||
}
|
||||
|
||||
// Check if filament was found
|
||||
if (foundFilamentId == 0) {
|
||||
Serial.println("Filament not found, creating new filament...");
|
||||
uint16_t filamentId = createFilament(vendorId, payload);
|
||||
if (filamentId == 0) {
|
||||
Serial.println("Failed to create filament, returning 0.");
|
||||
return 0; // Failed to create filament
|
||||
} else {
|
||||
Serial.println("Filament created with ID: " + String(filamentId));
|
||||
return filamentId;
|
||||
}
|
||||
} else {
|
||||
Serial.println("Filament found for vendor ID: " + String(vendorId));
|
||||
Serial.print("Filament ID: ");
|
||||
Serial.println(foundFilamentId);
|
||||
return foundFilamentId;
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t createSpool(uint16_t vendorId, uint16_t filamentId, JsonDocument& payload, String uidString) {
|
||||
// Create new spool in Spoolman database using task system
|
||||
// Note: Due to async nature, the ID will be stored in createdSpoolId global variable
|
||||
// Note: This function assumes that the caller has already ensured API is IDLE
|
||||
createdSpoolId = 65535; // Reset to invalid value to detect when API response is received
|
||||
|
||||
String spoolsUrl = spoolmanUrl + apiUrl + "/spool";
|
||||
Serial.print("Create spool with URL: ");
|
||||
Serial.println(spoolsUrl);
|
||||
//String currentDate = getCurrentDateISO8601();
|
||||
|
||||
// Create JSON payload for spool creation
|
||||
JsonDocument spoolDoc;
|
||||
//spoolDoc["first_used"] = String(currentDate);
|
||||
//spoolDoc["last_used"] = String(currentDate);
|
||||
spoolDoc["filament_id"] = String(filamentId);
|
||||
spoolDoc["initial_weight"] = weight > 10 ? String(weight-payload["spool_weight"].as<int>()) : "1000";
|
||||
spoolDoc["spool_weight"] = (payload["spool_weight"].is<String>() && payload["spool_weight"].as<String>().length() > 0) ? payload["spool_weight"].as<String>() : "180";
|
||||
spoolDoc["remaining_weight"] = (payload["weight"].is<String>() && payload["weight"].as<String>().length() > 0) ? payload["weight"].as<String>() : "1000";
|
||||
spoolDoc["lot_nr"] = (payload["lotnr"].is<String>() && payload["lotnr"].as<String>().length() > 0) ? payload["lotnr"].as<String>() : "";
|
||||
spoolDoc["comment"] = "automatically generated";
|
||||
spoolDoc["extra"]["nfc_id"] = "\"" + uidString + "\"";
|
||||
|
||||
String spoolPayload;
|
||||
serializeJson(spoolDoc, spoolPayload);
|
||||
Serial.print("Spool Payload: ");
|
||||
Serial.println(spoolPayload);
|
||||
spoolDoc.clear();
|
||||
|
||||
SendToApiParams* params = new SendToApiParams();
|
||||
if (params == nullptr) {
|
||||
Serial.println("Fehler: Kann Speicher für Task-Parameter nicht allokieren.");
|
||||
spoolDoc.clear();
|
||||
return 0;
|
||||
}
|
||||
params->requestType = API_REQUEST_SPOOL_CREATE;
|
||||
params->httpType = "POST";
|
||||
params->spoolsUrl = spoolsUrl;
|
||||
params->updatePayload = spoolPayload;
|
||||
|
||||
// Create task without additional API state check since caller ensures synchronization
|
||||
BaseType_t result = xTaskCreate(
|
||||
sendToApi, // Task-Funktion
|
||||
"SendToApiTask", // Task-Name
|
||||
6144, // Stackgröße in Bytes
|
||||
(void*)params, // Parameter
|
||||
0, // Priorität
|
||||
NULL // Task-Handle (nicht benötigt)
|
||||
);
|
||||
|
||||
if (result != pdPASS) {
|
||||
Serial.println("Failed to create spool task!");
|
||||
delete params;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Wait for task completion and return the created spool ID
|
||||
// Note: createdSpoolId will be set by sendToApi when response is received
|
||||
while(createdSpoolId == 65535) {
|
||||
vTaskDelay(50 / portTICK_PERIOD_MS);
|
||||
}
|
||||
|
||||
// Write data to tag with startWriteJsonToTag
|
||||
// void startWriteJsonToTag(const bool isSpoolTag, const char* payload);
|
||||
payload["sm_id"].set(String(createdSpoolId));
|
||||
|
||||
String payloadString;
|
||||
serializeJson(payload, payloadString);
|
||||
|
||||
nfcReaderState = NFC_IDLE;
|
||||
vTaskDelay(50 / portTICK_PERIOD_MS);
|
||||
startWriteJsonToTag(true, payloadString.c_str());
|
||||
|
||||
return createdSpoolId;
|
||||
}
|
||||
|
||||
bool createBrandFilament(JsonDocument& payload, String uidString) {
|
||||
uint16_t vendorId = checkVendor(payload["brand"].as<String>());
|
||||
if (vendorId == 0) {
|
||||
Serial.println("ERROR: Failed to create/find vendor");
|
||||
return false;
|
||||
}
|
||||
|
||||
uint16_t filamentId = checkFilament(vendorId, payload);
|
||||
if (filamentId == 0) {
|
||||
Serial.println("ERROR: Failed to create/find filament");
|
||||
return false;
|
||||
}
|
||||
|
||||
uint16_t spoolId = createSpool(vendorId, filamentId, payload, uidString);
|
||||
if (spoolId == 0) {
|
||||
Serial.println("ERROR: Failed to create spool");
|
||||
return false;
|
||||
}
|
||||
|
||||
Serial.println("SUCCESS: Brand filament created with Spool ID: " + String(spoolId));
|
||||
return true;
|
||||
}
|
||||
|
||||
// #### Spoolman init
|
||||
bool checkSpoolmanExtraFields() {
|
||||
// Only check extra fields if they have not been checked before
|
||||
@@ -713,9 +1182,10 @@ bool checkSpoolmanInstance() {
|
||||
Serial.println("Error contacting spoolman instance! HTTP Code: " + String(httpCode));
|
||||
}
|
||||
http.end();
|
||||
returnValue = false;
|
||||
spoolmanApiState = API_IDLE;
|
||||
}else{
|
||||
}
|
||||
else
|
||||
{
|
||||
// If the check is skipped, return the previous status
|
||||
Serial.println("Skipping spoolman healthcheck, API is active.");
|
||||
returnValue = spoolmanConnected;
|
||||
|
@@ -17,7 +17,12 @@ typedef enum {
|
||||
API_REQUEST_BAMBU_UPDATE,
|
||||
API_REQUEST_SPOOL_TAG_ID_UPDATE,
|
||||
API_REQUEST_SPOOL_WEIGHT_UPDATE,
|
||||
API_REQUEST_SPOOL_LOCATION_UPDATE
|
||||
API_REQUEST_SPOOL_LOCATION_UPDATE,
|
||||
API_REQUEST_VENDOR_CREATE,
|
||||
API_REQUEST_VENDOR_CHECK,
|
||||
API_REQUEST_FILAMENT_CHECK,
|
||||
API_REQUEST_FILAMENT_CREATE,
|
||||
API_REQUEST_SPOOL_CREATE
|
||||
} SpoolmanApiRequestType;
|
||||
|
||||
extern volatile spoolmanApiStateType spoolmanApiState;
|
||||
@@ -40,5 +45,6 @@ uint8_t updateSpoolLocation(String spoolId, String location);
|
||||
bool initSpoolman(); // Neue Funktion zum Initialisieren von Spoolman
|
||||
bool updateSpoolBambuData(String payload); // Neue Funktion zum Aktualisieren der Bambu-Daten
|
||||
bool updateSpoolOcto(int spoolId); // Neue Funktion zum Aktualisieren der Octo-Daten
|
||||
bool createBrandFilament(JsonDocument& payload, String uidString);
|
||||
|
||||
#endif
|
||||
|
@@ -33,6 +33,7 @@ AMSData ams_data[MAX_AMS]; // Definition des Arrays;
|
||||
bool removeBambuCredentials() {
|
||||
if (BambuMqttTask) {
|
||||
vTaskDelete(BambuMqttTask);
|
||||
BambuMqttTask = NULL;
|
||||
}
|
||||
|
||||
Preferences preferences;
|
||||
@@ -63,6 +64,7 @@ bool removeBambuCredentials() {
|
||||
bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode, bool autoSend, const String& autoSendTime) {
|
||||
if (BambuMqttTask) {
|
||||
vTaskDelete(BambuMqttTask);
|
||||
BambuMqttTask = NULL;
|
||||
}
|
||||
|
||||
bambuCredentials.ip = ip.c_str();
|
||||
@@ -593,6 +595,7 @@ void reconnect() {
|
||||
Serial.println("Disable Bambu MQTT Task after 5 retries");
|
||||
//vTaskSuspend(BambuMqttTask);
|
||||
vTaskDelete(BambuMqttTask);
|
||||
BambuMqttTask = NULL;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -681,6 +684,7 @@ void bambu_restart() {
|
||||
|
||||
if (BambuMqttTask) {
|
||||
vTaskDelete(BambuMqttTask);
|
||||
BambuMqttTask = NULL;
|
||||
delay(10);
|
||||
}
|
||||
setupMqtt();
|
||||
|
22
src/main.cpp
22
src/main.cpp
@@ -171,7 +171,9 @@ void loop() {
|
||||
oledShowMessage("Scale not calibrated");
|
||||
vTaskDelay(1000 / portTICK_PERIOD_MS);
|
||||
}
|
||||
}else{
|
||||
}
|
||||
else
|
||||
{
|
||||
// Ausgabe der Waage auf Display
|
||||
if(pauseMainTask == 0)
|
||||
{
|
||||
@@ -225,7 +227,8 @@ void loop() {
|
||||
lastWeight = weight;
|
||||
|
||||
// Wenn ein Tag mit SM id erkannte wurde und der Waage Counter anspricht an SM Senden
|
||||
if (activeSpoolId != "" && weigthCouterToApi > 3 && weightSend == 0 && nfcReaderState == NFC_READ_SUCCESS && tagProcessed == false && spoolmanApiState == API_IDLE) {
|
||||
if (activeSpoolId != "" && weigthCouterToApi > 3 && weightSend == 0 && nfcReaderState == NFC_READ_SUCCESS && tagProcessed == false && spoolmanApiState == API_IDLE)
|
||||
{
|
||||
// set the current tag as processed to prevent it beeing processed again
|
||||
tagProcessed = true;
|
||||
|
||||
@@ -233,6 +236,11 @@ void loop() {
|
||||
{
|
||||
weightSend = 1;
|
||||
|
||||
// Set Bambu spool ID for auto-send if enabled
|
||||
if (bambuCredentials.autosend_enable)
|
||||
{
|
||||
autoSetToBambuSpoolId = activeSpoolId.toInt();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -241,13 +249,9 @@ void loop() {
|
||||
}
|
||||
}
|
||||
|
||||
if(sendOctoUpdate && spoolmanApiState == API_IDLE){
|
||||
autoSetToBambuSpoolId = activeSpoolId.toInt();
|
||||
|
||||
if(octoEnabled)
|
||||
{
|
||||
updateSpoolOcto(autoSetToBambuSpoolId);
|
||||
}
|
||||
if(octoEnabled && sendOctoUpdate && spoolmanApiState == API_IDLE)
|
||||
{
|
||||
updateSpoolOcto(autoSetToBambuSpoolId);
|
||||
sendOctoUpdate = false;
|
||||
}
|
||||
}
|
||||
|
673
src/nfc.cpp
673
src/nfc.cpp
@@ -39,7 +39,6 @@ volatile nfcReaderStateType nfcReaderState = NFC_IDLE;
|
||||
// 6 = reading
|
||||
// ***** PN532
|
||||
|
||||
|
||||
// ##### Funktionen für RFID #####
|
||||
void payloadToJson(uint8_t *data) {
|
||||
const char* startJson = strchr((char*)data, '{');
|
||||
@@ -60,7 +59,7 @@ void payloadToJson(uint8_t *data) {
|
||||
int min_temp = doc["min_temp"];
|
||||
int max_temp = doc["max_temp"];
|
||||
const char* brand = doc["brand"];
|
||||
|
||||
|
||||
Serial.println();
|
||||
Serial.println("-----------------");
|
||||
Serial.println("JSON-Parsed Data:");
|
||||
@@ -100,9 +99,7 @@ bool formatNdefTag() {
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
uint16_t readTagSize()
|
||||
}uint16_t readTagSize()
|
||||
{
|
||||
uint8_t buffer[4];
|
||||
memset(buffer, 0, 4);
|
||||
@@ -110,115 +107,596 @@ uint16_t readTagSize()
|
||||
return buffer[2]*8;
|
||||
}
|
||||
|
||||
String detectNtagType()
|
||||
{
|
||||
// Read capability container from page 3 to determine exact NTAG type
|
||||
uint8_t ccBuffer[4];
|
||||
memset(ccBuffer, 0, 4);
|
||||
|
||||
if (!nfc.ntag2xx_ReadPage(3, ccBuffer)) {
|
||||
Serial.println("Failed to read capability container");
|
||||
return "UNKNOWN";
|
||||
}
|
||||
|
||||
// Also read configuration pages to get more info
|
||||
uint8_t configBuffer[4];
|
||||
memset(configBuffer, 0, 4);
|
||||
|
||||
Serial.print("Capability Container: ");
|
||||
for (int i = 0; i < 4; i++) {
|
||||
if (ccBuffer[i] < 0x10) Serial.print("0");
|
||||
Serial.print(ccBuffer[i], HEX);
|
||||
Serial.print(" ");
|
||||
}
|
||||
Serial.println();
|
||||
|
||||
// NTAG type detection based on capability container
|
||||
// CC[2] contains the data area size in bytes / 8
|
||||
uint16_t dataAreaSize = ccBuffer[2] * 8;
|
||||
|
||||
Serial.print("Data area size from CC: ");
|
||||
Serial.println(dataAreaSize);
|
||||
|
||||
// Try to read different configuration pages to determine exact type
|
||||
String tagType = "UNKNOWN";
|
||||
|
||||
// Try to read page 41 (NTAG213 ends at page 39, so this should fail)
|
||||
uint8_t testBuffer[4];
|
||||
bool canReadPage41 = nfc.ntag2xx_ReadPage(41, testBuffer);
|
||||
|
||||
// Try to read page 130 (NTAG215 ends at page 129, so this should fail for NTAG213/215)
|
||||
bool canReadPage130 = nfc.ntag2xx_ReadPage(130, testBuffer);
|
||||
|
||||
if (dataAreaSize <= 180 && !canReadPage41) {
|
||||
tagType = "NTAG213";
|
||||
Serial.println("Detected: NTAG213 (cannot read beyond page 39)");
|
||||
} else if (dataAreaSize <= 540 && canReadPage41 && !canReadPage130) {
|
||||
tagType = "NTAG215";
|
||||
Serial.println("Detected: NTAG215 (can read page 41, cannot read page 130)");
|
||||
} else if (dataAreaSize <= 928 && canReadPage130) {
|
||||
tagType = "NTAG216";
|
||||
Serial.println("Detected: NTAG216 (can read page 130)");
|
||||
} else {
|
||||
// Fallback: use data area size from capability container
|
||||
if (dataAreaSize <= 180) {
|
||||
tagType = "NTAG213";
|
||||
Serial.println("Fallback detection: NTAG213 based on data area size");
|
||||
} else if (dataAreaSize <= 540) {
|
||||
tagType = "NTAG215";
|
||||
Serial.println("Fallback detection: NTAG215 based on data area size");
|
||||
} else {
|
||||
tagType = "NTAG216";
|
||||
Serial.println("Fallback detection: NTAG216 based on data area size");
|
||||
}
|
||||
}
|
||||
|
||||
return tagType;
|
||||
}
|
||||
|
||||
uint16_t getAvailableUserDataSize()
|
||||
{
|
||||
String tagType = detectNtagType();
|
||||
uint16_t userDataSize = 0;
|
||||
|
||||
if (tagType == "NTAG213") {
|
||||
// NTAG213: User data from page 4-39 (36 pages * 4 bytes = 144 bytes)
|
||||
userDataSize = 144;
|
||||
Serial.println("NTAG213 confirmed - 144 bytes user data available");
|
||||
} else if (tagType == "NTAG215") {
|
||||
// NTAG215: User data from page 4-129 (126 pages * 4 bytes = 504 bytes)
|
||||
userDataSize = 504;
|
||||
Serial.println("NTAG215 confirmed - 504 bytes user data available");
|
||||
} else if (tagType == "NTAG216") {
|
||||
// NTAG216: User data from page 4-225 (222 pages * 4 bytes = 888 bytes)
|
||||
userDataSize = 888;
|
||||
Serial.println("NTAG216 confirmed - 888 bytes user data available");
|
||||
} else {
|
||||
// Unknown tag type, use conservative estimate
|
||||
uint16_t tagSize = readTagSize();
|
||||
userDataSize = tagSize - 60; // Reserve 60 bytes for headers/config
|
||||
Serial.print("Unknown NTAG type, using conservative estimate: ");
|
||||
Serial.println(userDataSize);
|
||||
}
|
||||
|
||||
return userDataSize;
|
||||
}
|
||||
|
||||
uint16_t getMaxUserDataPages()
|
||||
{
|
||||
String tagType = detectNtagType();
|
||||
uint16_t maxPages = 0;
|
||||
|
||||
if (tagType == "NTAG213") {
|
||||
maxPages = 39; // Pages 4-39 are user data
|
||||
} else if (tagType == "NTAG215") {
|
||||
maxPages = 129; // Pages 4-129 are user data
|
||||
} else if (tagType == "NTAG216") {
|
||||
maxPages = 225; // Pages 4-225 are user data
|
||||
} else {
|
||||
// Conservative fallback
|
||||
maxPages = 39;
|
||||
Serial.println("Unknown tag type, using NTAG213 page limit as fallback");
|
||||
}
|
||||
|
||||
Serial.print("Maximum writable page: ");
|
||||
Serial.println(maxPages);
|
||||
return maxPages;
|
||||
}
|
||||
|
||||
bool initializeNdefStructure() {
|
||||
// Write minimal NDEF structure without destroying the tag
|
||||
// This creates a clean slate while preserving tag functionality
|
||||
|
||||
Serial.println("Initialisiere sichere NDEF-Struktur...");
|
||||
|
||||
// Minimal NDEF structure: TLV with empty message
|
||||
uint8_t minimalNdef[8] = {
|
||||
0x03, // NDEF Message TLV Tag
|
||||
0x03, // Length (3 bytes for minimal empty record)
|
||||
0xD0, // NDEF Record Header (TNF=0x0:Empty + SR + ME + MB)
|
||||
0x00, // Type Length (0 = empty record)
|
||||
0x00, // Payload Length (0 = empty record)
|
||||
0xFE, // Terminator TLV
|
||||
0x00, 0x00 // Padding
|
||||
};
|
||||
|
||||
// Write the minimal structure starting at page 4
|
||||
uint8_t pageBuffer[4];
|
||||
|
||||
for (int i = 0; i < 8; i += 4) {
|
||||
memcpy(pageBuffer, &minimalNdef[i], 4);
|
||||
|
||||
if (!nfc.ntag2xx_WritePage(4 + (i / 4), pageBuffer)) {
|
||||
Serial.print("Fehler beim Initialisieren von Seite ");
|
||||
Serial.println(4 + (i / 4));
|
||||
return false;
|
||||
}
|
||||
|
||||
Serial.print("Seite ");
|
||||
Serial.print(4 + (i / 4));
|
||||
Serial.print(" initialisiert: ");
|
||||
for (int j = 0; j < 4; j++) {
|
||||
if (pageBuffer[j] < 0x10) Serial.print("0");
|
||||
Serial.print(pageBuffer[j], HEX);
|
||||
Serial.print(" ");
|
||||
}
|
||||
Serial.println();
|
||||
}
|
||||
|
||||
Serial.println("✓ Sichere NDEF-Struktur initialisiert");
|
||||
Serial.println("✓ Tag bleibt funktionsfähig und überschreibbar");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool clearUserDataArea() {
|
||||
// IMPORTANT: Only clear user data pages, NOT configuration pages
|
||||
// NTAG layout: Pages 0-3 (header), 4-N (user data), N+1-N+3 (config) - NEVER touch config!
|
||||
String tagType = detectNtagType();
|
||||
|
||||
// Calculate safe user data page ranges (NEVER touch config pages!)
|
||||
uint16_t firstUserPage = 4;
|
||||
uint16_t lastUserPage = 0;
|
||||
|
||||
if (tagType == "NTAG213") {
|
||||
lastUserPage = 39; // Pages 40-42 are config - DO NOT TOUCH!
|
||||
Serial.println("NTAG213: Sichere Löschung Seiten 4-39");
|
||||
} else if (tagType == "NTAG215") {
|
||||
lastUserPage = 129; // Pages 130-132 are config - DO NOT TOUCH!
|
||||
Serial.println("NTAG215: Sichere Löschung Seiten 4-129");
|
||||
} else if (tagType == "NTAG216") {
|
||||
lastUserPage = 225; // Pages 226-228 are config - DO NOT TOUCH!
|
||||
Serial.println("NTAG216: Sichere Löschung Seiten 4-225");
|
||||
} else {
|
||||
// Conservative fallback - only clear a small safe area
|
||||
lastUserPage = 39;
|
||||
Serial.println("UNKNOWN TAG: Konservative Löschung Seiten 4-39");
|
||||
}
|
||||
|
||||
Serial.println("WARNUNG: Vollständiges Löschen kann Tag beschädigen!");
|
||||
Serial.println("Verwende stattdessen selective NDEF-Überschreibung...");
|
||||
|
||||
// Instead of clearing everything, just write a minimal NDEF structure
|
||||
// This is much safer and preserves tag integrity
|
||||
return initializeNdefStructure();
|
||||
}
|
||||
|
||||
uint8_t ntag2xx_WriteNDEF(const char *payload) {
|
||||
// Determine exact tag type and capabilities first
|
||||
String tagType = detectNtagType();
|
||||
uint16_t tagSize = readTagSize();
|
||||
Serial.print("Tag Size: ");Serial.println(tagSize);
|
||||
uint16_t availableUserData = getAvailableUserDataSize();
|
||||
uint16_t maxWritablePage = getMaxUserDataPages();
|
||||
|
||||
Serial.println("=== NFC TAG ANALYSIS ===");
|
||||
Serial.print("Tag Type: ");Serial.println(tagType);
|
||||
Serial.print("Total Tag Size: ");Serial.println(tagSize);
|
||||
Serial.print("Available User Data: ");Serial.println(availableUserData);
|
||||
Serial.print("Max Writable Page: ");Serial.println(maxWritablePage);
|
||||
Serial.println("========================");
|
||||
|
||||
uint8_t pageBuffer[4] = {0, 0, 0, 0};
|
||||
Serial.println("Beginne mit dem Schreiben der NDEF-Nachricht...");
|
||||
|
||||
// Figure out how long the string is
|
||||
uint8_t len = strlen(payload);
|
||||
uint16_t payloadLen = strlen(payload);
|
||||
Serial.print("Länge der Payload: ");
|
||||
Serial.println(len);
|
||||
Serial.println(payloadLen);
|
||||
|
||||
Serial.print("Payload: ");Serial.println(payload);
|
||||
|
||||
// Setup the record header
|
||||
// See NFCForum-TS-Type-2-Tag_1.1.pdf for details
|
||||
uint8_t pageHeader[21] = {
|
||||
/* NDEF Message TLV - JSON Record */
|
||||
0x03, /* Tag Field (0x03 = NDEF Message) */
|
||||
(uint8_t)(len+3+16), /* Payload Length (including NDEF header) */
|
||||
0xD2, /* NDEF Record Header (TNF=0x2:MIME Media + SR + ME + MB) */
|
||||
0x10, /* Type Length for the record type indicator */
|
||||
(uint8_t)(len), /* Payload len */
|
||||
'a', 'p', 'p', 'l', 'i', 'c', 'a', 't', 'i', 'o', 'n', '/', 'j', 's', 'o', 'n'
|
||||
};
|
||||
// MIME type for JSON
|
||||
const char mimeType[] = "application/json";
|
||||
uint8_t mimeTypeLen = strlen(mimeType);
|
||||
|
||||
// Calculate NDEF record size
|
||||
uint8_t ndefRecordHeaderSize = 3; // Header byte + Type Length + Payload Length (short record)
|
||||
uint16_t ndefRecordSize = ndefRecordHeaderSize + mimeTypeLen + payloadLen;
|
||||
|
||||
// Calculate TLV size - need to check if we need extended length format
|
||||
uint8_t tlvHeaderSize;
|
||||
uint16_t totalTlvSize;
|
||||
|
||||
if (ndefRecordSize <= 254) {
|
||||
// Standard TLV format: Tag (1) + Length (1) + Value (ndefRecordSize)
|
||||
tlvHeaderSize = 2;
|
||||
totalTlvSize = tlvHeaderSize + ndefRecordSize + 1; // +1 for terminator TLV
|
||||
} else {
|
||||
// Extended TLV format: Tag (1) + 0xFF + Length (2) + Value (ndefRecordSize)
|
||||
tlvHeaderSize = 4;
|
||||
totalTlvSize = tlvHeaderSize + ndefRecordSize + 1; // +1 for terminator TLV
|
||||
}
|
||||
|
||||
// Make sure the URI payload will fit in dataLen (include 0xFE trailer)
|
||||
if ((len < 1) || (len + 1 > (tagSize - sizeof(pageHeader))))
|
||||
{
|
||||
Serial.print("NDEF Record Size: ");
|
||||
Serial.println(ndefRecordSize);
|
||||
Serial.print("Total TLV Size: ");
|
||||
Serial.println(totalTlvSize);
|
||||
|
||||
// Check if the message fits in the available user data space
|
||||
if (totalTlvSize > availableUserData) {
|
||||
Serial.println();
|
||||
Serial.println("!!!!!!!!!!!!!!!!!!!!!!!!");
|
||||
Serial.println("Fehler: Die Nutzlast passt nicht in die Datenlänge.");
|
||||
Serial.println("FEHLER: Payload zu groß für diesen Tag-Typ!");
|
||||
Serial.print("Tag-Typ: ");Serial.println(tagType);
|
||||
Serial.print("Benötigt: ");Serial.print(totalTlvSize);Serial.println(" Bytes");
|
||||
Serial.print("Verfügbar: ");Serial.print(availableUserData);Serial.println(" Bytes");
|
||||
Serial.print("Überschuss: ");Serial.print(totalTlvSize - availableUserData);Serial.println(" Bytes");
|
||||
|
||||
if (tagType == "NTAG213") {
|
||||
Serial.println("EMPFEHLUNG: Verwenden Sie einen NTAG215 (504 Bytes) oder NTAG216 (888 Bytes) Tag!");
|
||||
Serial.println("Oder kürzen Sie die Payload um mindestens " + String(totalTlvSize - availableUserData) + " Bytes.");
|
||||
}
|
||||
Serial.println("!!!!!!!!!!!!!!!!!!!!!!!!");
|
||||
Serial.println();
|
||||
|
||||
oledShowMessage("Tag zu klein für Payload");
|
||||
vTaskDelay(3000 / portTICK_PERIOD_MS);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Kombiniere Header und Payload
|
||||
int totalSize = sizeof(pageHeader) + len;
|
||||
uint8_t* combinedData = (uint8_t*) malloc(totalSize);
|
||||
if (combinedData == NULL)
|
||||
{
|
||||
Serial.println("Fehler: Nicht genug Speicher vorhanden.");
|
||||
oledShowMessage("Tag too small");
|
||||
Serial.println("✓ Payload passt in den Tag - Schreibvorgang wird fortgesetzt");
|
||||
|
||||
// IMPORTANT: Use safe NDEF initialization instead of aggressive clearing
|
||||
Serial.println("Schritt 1: Sichere NDEF-Initialisierung...");
|
||||
if (!initializeNdefStructure()) {
|
||||
Serial.println("FEHLER: Konnte NDEF-Struktur nicht initialisieren!");
|
||||
oledShowMessage("NDEF init failed");
|
||||
vTaskDelay(2000 / portTICK_PERIOD_MS);
|
||||
return 0;
|
||||
}
|
||||
Serial.println("✓ NDEF-Struktur sicher initialisiert");
|
||||
|
||||
// Allocate memory for the complete TLV structure
|
||||
uint8_t* tlvData = (uint8_t*) malloc(totalTlvSize);
|
||||
if (tlvData == NULL) {
|
||||
Serial.println("Fehler: Nicht genug Speicher für TLV-Daten vorhanden.");
|
||||
oledShowMessage("Memory error");
|
||||
vTaskDelay(2000 / portTICK_PERIOD_MS);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Kombiniere Header und Payload
|
||||
memcpy(combinedData, pageHeader, sizeof(pageHeader));
|
||||
memcpy(&combinedData[sizeof(pageHeader)], payload, len);
|
||||
// Build TLV structure
|
||||
uint16_t offset = 0;
|
||||
|
||||
// TLV Header
|
||||
tlvData[offset++] = 0x03; // NDEF Message TLV Tag
|
||||
|
||||
if (ndefRecordSize <= 254) {
|
||||
// Standard length format
|
||||
tlvData[offset++] = (uint8_t)ndefRecordSize;
|
||||
} else {
|
||||
// Extended length format
|
||||
tlvData[offset++] = 0xFF;
|
||||
tlvData[offset++] = (uint8_t)(ndefRecordSize >> 8); // High byte
|
||||
tlvData[offset++] = (uint8_t)(ndefRecordSize & 0xFF); // Low byte
|
||||
}
|
||||
|
||||
// Schreibe die Seiten
|
||||
uint8_t a = 0;
|
||||
uint8_t i = 0;
|
||||
while (totalSize > 0) {
|
||||
// NDEF Record Header
|
||||
tlvData[offset++] = 0xD2; // NDEF Record Header (TNF=0x2:MIME Media + SR + ME + MB)
|
||||
tlvData[offset++] = mimeTypeLen; // Type Length
|
||||
tlvData[offset++] = (uint8_t)payloadLen; // Payload Length (short record format)
|
||||
|
||||
// MIME Type
|
||||
memcpy(&tlvData[offset], mimeType, mimeTypeLen);
|
||||
offset += mimeTypeLen;
|
||||
|
||||
// JSON Payload
|
||||
memcpy(&tlvData[offset], payload, payloadLen);
|
||||
offset += payloadLen;
|
||||
|
||||
// Terminator TLV
|
||||
tlvData[offset] = 0xFE;
|
||||
|
||||
Serial.print("Gesamt-TLV-Länge: ");
|
||||
Serial.println(offset + 1);
|
||||
|
||||
// Debug: Print first 64 bytes of TLV data
|
||||
Serial.println("TLV Daten (erste 64 Bytes):");
|
||||
for (int i = 0; i < min((int)(offset + 1), 64); i++) {
|
||||
if (tlvData[i] < 0x10) Serial.print("0");
|
||||
Serial.print(tlvData[i], HEX);
|
||||
Serial.print(" ");
|
||||
if ((i + 1) % 16 == 0) Serial.println();
|
||||
}
|
||||
Serial.println();
|
||||
|
||||
// Write data to tag pages (starting from page 4)
|
||||
uint16_t bytesWritten = 0;
|
||||
uint8_t pageNumber = 4;
|
||||
uint16_t totalBytes = offset + 1;
|
||||
|
||||
Serial.println("Schritt 2: Schreibe neue NDEF-Daten...");
|
||||
Serial.print("Schreibe ");
|
||||
Serial.print(totalBytes);
|
||||
Serial.print(" Bytes in ");
|
||||
Serial.print((totalBytes + 3) / 4); // Round up division
|
||||
Serial.println(" Seiten...");
|
||||
|
||||
while (bytesWritten < totalBytes && pageNumber <= maxWritablePage) {
|
||||
// Clear page buffer
|
||||
memset(pageBuffer, 0, 4);
|
||||
int bytesToWrite = (totalSize < 4) ? totalSize : 4;
|
||||
memcpy(pageBuffer, combinedData + a, bytesToWrite);
|
||||
|
||||
// Calculate how many bytes to write to this page
|
||||
uint16_t bytesToWrite = min(4, (int)(totalBytes - bytesWritten));
|
||||
|
||||
// Copy data to page buffer
|
||||
memcpy(pageBuffer, &tlvData[bytesWritten], bytesToWrite);
|
||||
|
||||
//uint8_t uid[] = { 0, 0, 0, 0, 0, 0, 0 }; // Buffer to store the returned UID
|
||||
//uint8_t uidLength;
|
||||
//nfc.readPassiveTargetID(PN532_MIFARE_ISO14443A, uid, &uidLength, 100);
|
||||
|
||||
if (!(nfc.ntag2xx_WritePage(4+i, pageBuffer)))
|
||||
{
|
||||
Serial.println("Fehler beim Schreiben der Seite.");
|
||||
free(combinedData);
|
||||
// Write page to tag
|
||||
if (!nfc.ntag2xx_WritePage(pageNumber, pageBuffer)) {
|
||||
Serial.print("FEHLER beim Schreiben der Seite ");
|
||||
Serial.println(pageNumber);
|
||||
Serial.print("Möglicherweise Page-Limit erreicht für ");
|
||||
Serial.println(tagType);
|
||||
free(tlvData);
|
||||
return 0;
|
||||
}
|
||||
|
||||
yield();
|
||||
//esp_task_wdt_reset();
|
||||
Serial.print("Seite ");
|
||||
Serial.print(pageNumber);
|
||||
Serial.print(" ✓: ");
|
||||
for (int i = 0; i < 4; i++) {
|
||||
if (pageBuffer[i] < 0x10) Serial.print("0");
|
||||
Serial.print(pageBuffer[i], HEX);
|
||||
Serial.print(" ");
|
||||
}
|
||||
Serial.println();
|
||||
|
||||
i++;
|
||||
a += 4;
|
||||
totalSize -= bytesToWrite;
|
||||
bytesWritten += bytesToWrite;
|
||||
pageNumber++;
|
||||
|
||||
yield();
|
||||
vTaskDelay(5 / portTICK_PERIOD_MS); // Small delay between page writes
|
||||
}
|
||||
|
||||
// Ensure the NDEF message is properly terminated
|
||||
memset(pageBuffer, 0, 4);
|
||||
pageBuffer[0] = 0xFE; // NDEF record footer
|
||||
if (!(nfc.ntag2xx_WritePage(4+i, pageBuffer)))
|
||||
{
|
||||
Serial.println("Fehler beim Schreiben des End-Bits.");
|
||||
free(combinedData);
|
||||
free(tlvData);
|
||||
|
||||
if (bytesWritten < totalBytes) {
|
||||
Serial.println("WARNUNG: Nicht alle Daten konnten geschrieben werden!");
|
||||
Serial.print("Geschrieben: ");
|
||||
Serial.print(bytesWritten);
|
||||
Serial.print(" von ");
|
||||
Serial.print(totalBytes);
|
||||
Serial.println(" Bytes");
|
||||
Serial.print("Gestoppt bei Seite: ");
|
||||
Serial.println(pageNumber - 1);
|
||||
return 0;
|
||||
}
|
||||
|
||||
Serial.println("NDEF-Nachricht erfolgreich geschrieben.");
|
||||
free(combinedData);
|
||||
|
||||
Serial.println();
|
||||
Serial.println("✓ NDEF-Nachricht erfolgreich geschrieben!");
|
||||
Serial.print("✓ Tag-Typ: ");Serial.println(tagType);
|
||||
Serial.print("✓ Insgesamt ");Serial.print(bytesWritten);Serial.println(" Bytes geschrieben");
|
||||
Serial.print("✓ Verwendete Seiten: 4-");Serial.println(pageNumber - 1);
|
||||
Serial.print("✓ Speicher-Auslastung: ");
|
||||
Serial.print((bytesWritten * 100) / availableUserData);
|
||||
Serial.println("%");
|
||||
Serial.println("✓ Bestehende Daten wurden überschrieben");
|
||||
Serial.println();
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
bool decodeNdefAndReturnJson(const byte* encodedMessage) {
|
||||
bool decodeNdefAndReturnJson(const byte* encodedMessage, String uidString) {
|
||||
oledShowProgressBar(1, octoEnabled?5:4, "Reading", "Decoding data");
|
||||
|
||||
byte typeLength = encodedMessage[3];
|
||||
byte payloadLength = encodedMessage[4];
|
||||
// Debug: Print first 32 bytes of the raw data
|
||||
Serial.println("Raw NDEF data (first 32 bytes):");
|
||||
for (int i = 0; i < 32; i++) {
|
||||
if (encodedMessage[i] < 0x10) Serial.print("0");
|
||||
Serial.print(encodedMessage[i], HEX);
|
||||
Serial.print(" ");
|
||||
if ((i + 1) % 16 == 0) Serial.println();
|
||||
}
|
||||
Serial.println();
|
||||
|
||||
// Look for the NDEF TLV structure starting from the beginning
|
||||
int tlvOffset = 0;
|
||||
bool foundNdefTlv = false;
|
||||
|
||||
// Search for NDEF TLV (0x03) in the first few bytes
|
||||
for (int i = 0; i < 16; i++) {
|
||||
if (encodedMessage[i] == 0x03) {
|
||||
tlvOffset = i;
|
||||
foundNdefTlv = true;
|
||||
Serial.print("Found NDEF TLV at offset: ");
|
||||
Serial.println(tlvOffset);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundNdefTlv) {
|
||||
Serial.println("No NDEF TLV found in tag data");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get the NDEF message length from TLV
|
||||
uint16_t ndefMessageLength = 0;
|
||||
int ndefRecordOffset = 0;
|
||||
|
||||
if (encodedMessage[tlvOffset + 1] == 0xFF) {
|
||||
// Extended length format: next 2 bytes contain the actual length
|
||||
ndefMessageLength = (encodedMessage[tlvOffset + 2] << 8) | encodedMessage[tlvOffset + 3];
|
||||
ndefRecordOffset = tlvOffset + 4; // Skip TLV tag + 0xFF + 2 length bytes
|
||||
Serial.print("NDEF Message Length (extended): ");
|
||||
} else {
|
||||
// Standard length format: single byte contains the length
|
||||
ndefMessageLength = encodedMessage[tlvOffset + 1];
|
||||
ndefRecordOffset = tlvOffset + 2; // Skip TLV tag + 1 length byte
|
||||
Serial.print("NDEF Message Length (standard): ");
|
||||
}
|
||||
Serial.println(ndefMessageLength);
|
||||
|
||||
// Get pointer to NDEF record
|
||||
const byte* ndefRecord = &encodedMessage[ndefRecordOffset];
|
||||
|
||||
// Parse NDEF record header
|
||||
byte recordHeader = ndefRecord[0];
|
||||
byte typeLength = ndefRecord[1];
|
||||
|
||||
Serial.print("NDEF Record Header: 0x");
|
||||
Serial.println(recordHeader, HEX);
|
||||
Serial.print("Type Length: ");
|
||||
Serial.println(typeLength);
|
||||
|
||||
// Determine payload length (can be 1 or 4 bytes depending on SR flag)
|
||||
uint32_t payloadLength = 0;
|
||||
byte payloadLengthBytes = 1;
|
||||
byte payloadLengthOffset = 2;
|
||||
|
||||
// Check if Short Record (SR) flag is set (bit 4)
|
||||
if (recordHeader & 0x10) { // SR flag
|
||||
payloadLength = ndefRecord[2];
|
||||
payloadLengthBytes = 1;
|
||||
payloadLengthOffset = 2;
|
||||
} else {
|
||||
// Long record format (4 bytes for payload length)
|
||||
payloadLength = (ndefRecord[2] << 24) | (ndefRecord[3] << 16) |
|
||||
(ndefRecord[4] << 8) | ndefRecord[5];
|
||||
payloadLengthBytes = 4;
|
||||
payloadLengthOffset = 2;
|
||||
}
|
||||
|
||||
Serial.print("Payload Length: ");
|
||||
Serial.println(payloadLength);
|
||||
Serial.print("Payload Length Bytes: ");
|
||||
Serial.println(payloadLengthBytes);
|
||||
|
||||
// Check for ID field (if IL flag is set)
|
||||
byte idLength = 0;
|
||||
if (recordHeader & 0x08) { // IL flag
|
||||
idLength = ndefRecord[payloadLengthOffset + payloadLengthBytes];
|
||||
Serial.print("ID Length: ");
|
||||
Serial.println(idLength);
|
||||
}
|
||||
|
||||
// Calculate offset to payload
|
||||
byte payloadOffset = 1 + 1 + payloadLengthBytes + typeLength + idLength;
|
||||
|
||||
Serial.print("Calculated payload offset: ");
|
||||
Serial.println(payloadOffset);
|
||||
|
||||
// Verify we have enough data
|
||||
if (payloadOffset + payloadLength > ndefMessageLength) {
|
||||
Serial.println("Invalid NDEF structure - payload extends beyond message");
|
||||
Serial.print("Payload offset + length: ");
|
||||
Serial.print(payloadOffset + payloadLength);
|
||||
Serial.print(", NDEF message length: ");
|
||||
Serial.println(ndefMessageLength);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Print the record type for debugging
|
||||
Serial.print("Record Type: ");
|
||||
for (int i = 0; i < typeLength; i++) {
|
||||
Serial.print((char)ndefRecord[1 + 1 + payloadLengthBytes + i]);
|
||||
}
|
||||
Serial.println();
|
||||
|
||||
nfcJsonData = "";
|
||||
|
||||
for (int i = 2; i < payloadLength+2; i++)
|
||||
{
|
||||
nfcJsonData += (char)encodedMessage[3 + typeLength + i];
|
||||
// Extract JSON payload with validation
|
||||
uint32_t actualJsonLength = 0;
|
||||
for (uint32_t i = 0; i < payloadLength; i++) {
|
||||
byte currentByte = ndefRecord[payloadOffset + i];
|
||||
|
||||
// Stop at null terminator or if we find the end of JSON
|
||||
if (currentByte == 0x00) {
|
||||
Serial.print("Found null terminator at position: ");
|
||||
Serial.println(i);
|
||||
break;
|
||||
}
|
||||
|
||||
// Only add printable characters and common JSON characters
|
||||
if (currentByte >= 32 && currentByte <= 126) {
|
||||
nfcJsonData += (char)currentByte;
|
||||
actualJsonLength++;
|
||||
} else {
|
||||
Serial.print("Skipping non-printable byte at position ");
|
||||
Serial.print(i);
|
||||
Serial.print(": 0x");
|
||||
Serial.println(currentByte, HEX);
|
||||
}
|
||||
|
||||
// Check if we've reached the end of a JSON object
|
||||
if (currentByte == '}') {
|
||||
// Count opening and closing braces to detect complete JSON
|
||||
int braceCount = 0;
|
||||
for (uint32_t j = 0; j <= i; j++) {
|
||||
if (ndefRecord[payloadOffset + j] == '{') braceCount++;
|
||||
else if (ndefRecord[payloadOffset + j] == '}') braceCount--;
|
||||
}
|
||||
|
||||
if (braceCount == 0) {
|
||||
Serial.print("Found complete JSON object at position: ");
|
||||
Serial.println(i);
|
||||
actualJsonLength = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Serial.print("Actual JSON length extracted: ");
|
||||
Serial.println(actualJsonLength);
|
||||
Serial.print("Total nfcJsonData length: ");
|
||||
Serial.println(nfcJsonData.length());
|
||||
Serial.println("=== DECODED JSON DATA START ===");
|
||||
Serial.println(nfcJsonData);
|
||||
Serial.println("=== DECODED JSON DATA END ===");
|
||||
|
||||
// Check if JSON was truncated
|
||||
if (nfcJsonData.length() < payloadLength && !nfcJsonData.endsWith("}")) {
|
||||
Serial.println("WARNING: JSON payload appears to be truncated!");
|
||||
Serial.print("Expected payload length: ");
|
||||
Serial.println(payloadLength);
|
||||
Serial.print("Actual extracted length: ");
|
||||
Serial.println(nfcJsonData.length());
|
||||
}
|
||||
|
||||
// Trim any trailing whitespace or invalid characters
|
||||
nfcJsonData.trim();
|
||||
|
||||
// JSON-Dokument verarbeiten
|
||||
JsonDocument doc; // Passen Sie die Größe an den JSON-Inhalt an
|
||||
JsonDocument doc;
|
||||
DeserializationError error = deserializeJson(doc, nfcJsonData);
|
||||
if (error)
|
||||
{
|
||||
@@ -235,7 +713,7 @@ bool decodeNdefAndReturnJson(const byte* encodedMessage) {
|
||||
// Sende die aktualisierten AMS-Daten an alle WebSocket-Clients
|
||||
Serial.println("JSON-Dokument erfolgreich verarbeitet");
|
||||
Serial.println(doc.as<String>());
|
||||
if (doc["sm_id"].is<String>() && doc["sm_id"] != "")
|
||||
if (doc["sm_id"].is<String>() && doc["sm_id"] != "" && doc["sm_id"] != "0")
|
||||
{
|
||||
oledShowProgressBar(2, octoEnabled?5:4, "Spool Tag", "Weighing");
|
||||
Serial.println("SPOOL-ID gefunden: " + doc["sm_id"].as<String>());
|
||||
@@ -255,6 +733,16 @@ bool decodeNdefAndReturnJson(const byte* encodedMessage) {
|
||||
oledShowProgressBar(1, 1, "Failure", "Scan spool first");
|
||||
}
|
||||
}
|
||||
// Brand Filament not registered to Spoolman
|
||||
else if ((!doc["sm_id"].is<String>() || (doc["sm_id"].is<String>() && (doc["sm_id"] == "0" || doc["sm_id"] == "")))
|
||||
&& doc["brand"].is<String>() && doc["artnr"].is<String>())
|
||||
{
|
||||
doc["sm_id"] = "0"; // Ensure sm_id is set to 0
|
||||
// If no sm_id is present but the brand is Brand Filament then
|
||||
// create a new spool, maybe brand too, in Spoolman
|
||||
Serial.println("New Brand Filament Tag found!");
|
||||
createBrandFilament(doc, uidString);
|
||||
}
|
||||
else
|
||||
{
|
||||
Serial.println("Keine SPOOL-ID gefunden.");
|
||||
@@ -375,6 +863,9 @@ void writeJsonToTag(void *parameter) {
|
||||
nfcReadingTaskSuspendRequest = false;
|
||||
pauseBambuMqttTask = false;
|
||||
|
||||
free(params->payload);
|
||||
delete params;
|
||||
|
||||
vTaskDelete(NULL);
|
||||
}
|
||||
|
||||
@@ -431,7 +922,19 @@ void scanRfidTask(void * parameter) {
|
||||
|
||||
oledShowProgressBar(0, octoEnabled?5:4, "Reading", "Detecting tag");
|
||||
|
||||
//vTaskDelay(500 / portTICK_PERIOD_MS);
|
||||
// Wait 1 second after tag detection to stabilize connection
|
||||
Serial.println("Tag detected, waiting 1 second for stabilization...");
|
||||
vTaskDelay(1000 / portTICK_PERIOD_MS);
|
||||
|
||||
// create Tag UID string
|
||||
String uidString = "";
|
||||
for (uint8_t i = 0; i < uidLength; i++) {
|
||||
//TBD: Rework to remove all the string operations
|
||||
uidString += String(uid[i], HEX);
|
||||
if (i < uidLength - 1) {
|
||||
uidString += ":"; // Optional: Trennzeichen hinzufügen
|
||||
}
|
||||
}
|
||||
|
||||
if (uidLength == 7)
|
||||
{
|
||||
@@ -444,25 +947,35 @@ void scanRfidTask(void * parameter) {
|
||||
|
||||
// We probably have an NTAG2xx card (though it could be Ultralight as well)
|
||||
Serial.println("Seems to be an NTAG2xx tag (7 byte UID)");
|
||||
Serial.print("Tag size: ");
|
||||
Serial.print(tagSize);
|
||||
Serial.println(" bytes");
|
||||
|
||||
uint8_t numPages = readTagSize()/4;
|
||||
|
||||
for (uint8_t i = 4; i < 4+numPages; i++) {
|
||||
|
||||
if (!nfc.ntag2xx_ReadPage(i, data+(i-4) * 4))
|
||||
{
|
||||
break; // Stop if reading fails
|
||||
}
|
||||
|
||||
// Check for NDEF message end
|
||||
if (data[(i - 4) * 4] == 0xFE)
|
||||
{
|
||||
Serial.println("Found NDEF message end marker");
|
||||
break; // End of NDEF message
|
||||
}
|
||||
|
||||
yield();
|
||||
esp_task_wdt_reset();
|
||||
vTaskDelay(pdMS_TO_TICKS(1));
|
||||
// Increased delay to ensure stable reading
|
||||
vTaskDelay(pdMS_TO_TICKS(5)); // Increased from 1ms to 5ms
|
||||
}
|
||||
|
||||
if (!decodeNdefAndReturnJson(data))
|
||||
|
||||
Serial.println("Tag reading completed, starting NDEF decode...");
|
||||
|
||||
if (!decodeNdefAndReturnJson(data, uidString))
|
||||
{
|
||||
oledShowProgressBar(1, 1, "Failure", "Unknown tag");
|
||||
nfcReaderState = NFC_READ_ERROR;
|
||||
@@ -497,6 +1010,18 @@ void scanRfidTask(void * parameter) {
|
||||
Serial.println("Tag entfernt");
|
||||
if (!bambuCredentials.autosend_enable) oledShowWeight(weight);
|
||||
}
|
||||
// Reset state after successful read when tag is removed
|
||||
else if (!success && nfcReaderState == NFC_READ_SUCCESS)
|
||||
{
|
||||
nfcReaderState = NFC_IDLE;
|
||||
Serial.println("Tag nach erfolgreichem Lesen entfernt - bereit für nächsten Tag");
|
||||
}
|
||||
|
||||
// Add a longer pause after successful reading to prevent immediate re-reading
|
||||
if (nfcReaderState == NFC_READ_SUCCESS) {
|
||||
Serial.println("Tag erfolgreich gelesen - warte 5 Sekunden vor nächstem Scan");
|
||||
vTaskDelay(5000 / portTICK_PERIOD_MS); // 5 second pause
|
||||
}
|
||||
|
||||
// aktualisieren der Website wenn sich der Status ändert
|
||||
sendNfcData();
|
||||
|
@@ -48,6 +48,9 @@ void scale_loop(void * parameter) {
|
||||
Serial.println("Scale Loop started");
|
||||
Serial.println("++++++++++++++++++++++++++++++");
|
||||
|
||||
vTaskDelay(pdMS_TO_TICKS(500));
|
||||
scale_tare_counter = 10; // damit beim Starten der Waage automatisch getart wird
|
||||
|
||||
for(;;) {
|
||||
if (scale.is_ready())
|
||||
{
|
||||
@@ -120,7 +123,7 @@ void start_scale(bool touchSensorConnected) {
|
||||
if (scale.wait_ready_timeout(1000))
|
||||
{
|
||||
scale.set_scale(calibrationValue); // this value is obtained by calibrating the scale with known weights; see the README for details
|
||||
scale.tare();
|
||||
//scale.tare();
|
||||
}
|
||||
|
||||
// Display Gewicht
|
||||
|
@@ -48,9 +48,15 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
|
||||
} else if (type == WS_EVT_PONG) {
|
||||
Serial.printf("WebSocket Client #%u pong\n", client->id());
|
||||
} else if (type == WS_EVT_DATA) {
|
||||
String message = String((char*)data);
|
||||
JsonDocument doc;
|
||||
deserializeJson(doc, message);
|
||||
DeserializationError error = deserializeJson(doc, (char*)data, len);
|
||||
//String message = String((char*)data);
|
||||
//deserializeJson(doc, message);
|
||||
|
||||
if (error) {
|
||||
Serial.println("JSON deserialization failed: " + String(error.c_str()));
|
||||
return;
|
||||
}
|
||||
|
||||
if (doc["type"] == "heartbeat") {
|
||||
// Sende Heartbeat-Antwort
|
||||
|
Reference in New Issue
Block a user