Compare commits

...

11 Commits

Author SHA1 Message Date
a3aef819c8 docs: update changelog for version 1.2.99
Some checks failed
Release Workflow / route (push) Successful in 8s
Release Workflow / verify-provider (push) Successful in 4s
Release Workflow / github-release (push) Has been skipped
Release Workflow / gitea-release (push) Has been cancelled
2025-02-21 17:42:40 +01:00
a62b5ec933 docs: update webpages for version v1.2.99 2025-02-21 17:42:40 +01:00
1a8cf7a58f docs: add SPIFFS change detection and binary copying to release workflows 2025-02-21 17:42:04 +01:00
b0b3d41c84 docs: add backup and restore functions for JSON configurations during OTA updates 2025-02-21 17:38:20 +01:00
38b68aecfc docs: update JSON field type checks from JsonObject to String for improved validation 2025-02-21 17:11:48 +01:00
4992f5f433 docs: update JSON handling in API and Bambu modules for improved object management 2025-02-21 16:53:45 +01:00
5cbbe1d231 docs: update platformio.ini dependencies and improve version handling in website.cpp 2025-02-21 16:35:19 +01:00
9b29460d64 docs: update Cache-Control header to reflect a 1-week duration 2025-02-21 15:15:01 +01:00
dd14d475b7 docs: remove version definition from website.cpp 2025-02-21 15:14:36 +01:00
9e6cd3b451 docs: optimize WiFi and WebSocket settings; enhance TCP/IP stack configuration 2025-02-21 15:07:16 +01:00
c1be6ca582 docs: update upgrade page title and heading; adjust cache control duration 2025-02-21 14:35:12 +01:00
11 changed files with 166 additions and 49 deletions

View File

@ -74,6 +74,12 @@ jobs:
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin
# Copy SPIFFS binary if SPIFFS changed
if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then
echo "SPIFFS changes detected, copying SPIFFS binary..."
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
fi
# Create full binary (always)
(cd .pio/build/esp32dev && \
esptool.py --chip esp32 merge_bin \
@ -87,12 +93,6 @@ jobs:
0x10000 firmware.bin \
0x390000 spiffs.bin)
# Only copy SPIFFS binary if data changed
if [[ "${{ steps.check_data.outputs.DATA_CHANGED }}" == "true" ]]; then
echo "Data changes detected, copying SPIFFS binary..."
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
fi
# Verify file sizes
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)

View File

@ -36,6 +36,17 @@ jobs:
else
echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Check for SPIFFS changes
id: check_spiffs
run: |
git fetch --unshallow || true
CHANGED_FILES=$(git diff --name-only HEAD^..HEAD)
if echo "$CHANGED_FILES" | grep -q "^data/\|^html/"; then
echo "SPIFFS_CHANGED=true" >> $GITHUB_OUTPUT
else
echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT
fi
- name: Build Firmware
run: |
@ -49,6 +60,12 @@ jobs:
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin
# Copy SPIFFS binary if SPIFFS changed
if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then
echo "SPIFFS changes detected, copying SPIFFS binary..."
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin
fi
# Create full binary (always)
(cd .pio/build/esp32dev && \
esptool.py --chip esp32 merge_bin \

View File

@ -1,5 +1,21 @@
# Changelog
## [1.2.99] - 2025-02-21
### Added
- add SPIFFS change detection and binary copying to release workflows
- add backup and restore functions for JSON configurations during OTA updates
### Changed
- update webpages for version v1.2.99
- update JSON field type checks from JsonObject to String for improved validation
- update JSON handling in API and Bambu modules for improved object management
- update platformio.ini dependencies and improve version handling in website.cpp
- update Cache-Control header to reflect a 1-week duration
- remove version definition from website.cpp
- optimize WiFi and WebSocket settings; enhance TCP/IP stack configuration
- update upgrade page title and heading; adjust cache control duration
## [1.2.98] - 2025-02-21
### Changed
- update webpages for version v1.2.98

View File

@ -1,9 +1,10 @@
<!DOCTYPE html>
<!-- head --><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>FilaMan - Firmware Update</title>
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
@ -49,7 +50,8 @@
<!-- head -->
<div class="content">
<h1>System Update</h1>
<h1>Firmware Upgrade</h1>
<div class="warning">
<strong>Warning:</strong> Do not power off the device during update.
</div>

View File

@ -9,7 +9,7 @@
; https://docs.platformio.org/page/projectconf.html
[common]
version = "1.2.98"
version = "1.2.99"
[env:esp32dev]
platform = espressif32
@ -19,10 +19,11 @@ monitor_speed = 115200
lib_deps =
tzapu/WiFiManager @ ^2.0.17
#https://github.com/me-no-dev/ESPAsyncWebServer.git#master
https://github.com/me-no-dev/ESPAsyncWebServer.git#master
#me-no-dev/AsyncTCP @ ^1.1.1
mathieucarbou/ESPAsyncWebServer @ ^3.6.0
esp32async/AsyncTCP @ ^3.3.5
https://github.com/esphome/AsyncTCP.git
#mathieucarbou/ESPAsyncWebServer @ ^3.6.0
#esp32async/AsyncTCP @ ^3.3.5
bogde/HX711 @ ^0.7.5
adafruit/Adafruit SSD1306 @ ^2.5.13
adafruit/Adafruit GFX Library @ ^1.11.11
@ -45,7 +46,7 @@ build_flags =
-fdata-sections
-DNDEBUG
-mtext-section-literals
'-D VERSION="${common.version}"'
-DVERSION=\"${common.version}\"
-DASYNCWEBSERVER_REGEX
-DCORE_DEBUG_LEVEL=3
-DCONFIG_ARDUHAL_LOG_COLORS=1
@ -54,6 +55,11 @@ build_flags =
-DCONFIG_ESP32_PANIC_PRINT_REBOOT
-DBOOT_APP_PARTITION_OTA_0=1
-DCONFIG_LOG_DEFAULT_LEVEL=3
-DCONFIG_LWIP_TCP_MSL=60000
-DCONFIG_LWIP_TCP_WND_DEFAULT=8192
-DCONFIG_LWIP_TCP_SND_BUF_DEFAULT=4096
-DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096
-DCONFIG_LWIP_MAX_ACTIVE_TCP=16
extra_scripts =
scripts/extra_script.py

View File

@ -60,10 +60,10 @@ JsonDocument fetchSpoolsForWebsite() {
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.createNestedObject();
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
JsonObject filament = filteredSpool.createNestedObject("filament");
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
filament["sm_id"] = spool["id"];
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
@ -73,7 +73,7 @@ JsonDocument fetchSpoolsForWebsite() {
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
JsonObject vendor = filament.createNestedObject("vendor");
JsonObject vendor = filament["vendor"].to<JsonObject>();
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
}
@ -110,13 +110,13 @@ JsonDocument fetchAllSpoolsInfo() {
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.createNestedObject();
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
filteredSpool["price"] = spool["price"];
filteredSpool["remaining_weight"] = spool["remaining_weight"];
filteredSpool["used_weight"] = spool["used_weight"];
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
JsonObject filament = filteredSpool.createNestedObject("filament");
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
@ -125,11 +125,11 @@ JsonDocument fetchAllSpoolsInfo() {
filament["spool_weight"] = spool["filament"]["spool_weight"];
filament["color_hex"] = spool["filament"]["color_hex"];
JsonObject vendor = filament.createNestedObject("vendor");
JsonObject vendor = filament["vendor"].to<JsonObject>();
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
JsonObject extra = filament.createNestedObject("extra");
JsonObject extra = filament["extra"].to<JsonObject>();
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
@ -186,7 +186,7 @@ bool updateSpoolTagId(String uidString, const char* payload) {
}
// Überprüfe, ob die erforderlichen Felder vorhanden sind
if (!doc.containsKey("sm_id") || doc["sm_id"] == "") {
if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") {
Serial.println("Keine Spoolman-ID gefunden.");
return false;
}
@ -368,7 +368,7 @@ bool checkSpoolmanExtraFields() {
for (uint8_t s = 0; s < extraLength; s++) {
bool found = false;
for (JsonObject field : doc.as<JsonArray>()) {
if (field.containsKey("key") && field["key"] == extraFields[s]) {
if (field["key"].is<String>() && field["key"] == extraFields[s]) {
Serial.println("Feld gefunden: " + extraFields[s]);
found = true;
break;
@ -430,7 +430,7 @@ bool checkSpoolmanInstance(const String& url) {
String payload = http.getString();
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (!error && doc.containsKey("status")) {
if (!error && doc["status"].is<String>()) {
const char* status = doc["status"];
http.end();
@ -469,7 +469,7 @@ bool saveSpoolmanUrl(const String& url) {
String loadSpoolmanUrl() {
JsonDocument doc;
if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) {
if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) {
return doc["url"].as<String>();
}
Serial.println("Keine gültige Spoolman-URL gefunden.");

View File

@ -58,7 +58,7 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
bool loadBambuCredentials() {
JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
// Temporäre Strings für die Werte
String ip = doc["bambu_ip"].as<String>();
String code = doc["bambu_accesscode"].as<String>();
@ -270,9 +270,9 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
}
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
if (doc["print"].containsKey("upgrade_state")) {
if (doc["print"]["upgrade_state"].is<String>()) {
// Prüfen ob AMS-Daten vorhanden sind
if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) {
if (!doc["print"]["ams"].is<String>() || !doc["print"]["ams"]["ams"].is<String>()) {
return;
}
@ -315,7 +315,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
}
// Prüfe die externe Spule
if (!hasChanges && doc["print"].containsKey("vt_tray")) {
if (!hasChanges && doc["print"]["vt_tray"].is<String>()) {
JsonObject vtTray = doc["print"]["vt_tray"];
bool foundExternal = false;
@ -363,7 +363,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
ams_count = amsArray.size();
// Wenn externe Spule vorhanden, füge sie hinzu
if (doc["print"].containsKey("vt_tray")) {
if (doc["print"]["vt_tray"].is<String>()) {
JsonObject vtTray = doc["print"]["vt_tray"];
int extIdx = ams_count; // Index für externe Spule
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
@ -387,14 +387,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int i = 0; i < ams_count; i++) {
JsonObject amsObj = wsArray.createNestedObject();
JsonObject amsObj = wsArray.add<JsonObject>();
amsObj["ams_id"] = ams_data[i].ams_id;
JsonArray trays = amsObj.createNestedArray("tray");
JsonArray trays = amsObj["tray"].to<JsonArray>();
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
for (int j = 0; j < maxTrays; j++) {
JsonObject trayObj = trays.createNestedObject();
JsonObject trayObj = trays.add<JsonObject>();
trayObj["id"] = ams_data[i].trays[j].id;
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
@ -427,14 +427,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int j = 0; j < ams_count; j++) {
JsonObject amsObj = wsArray.createNestedObject();
JsonObject amsObj = wsArray.add<JsonObject>();
amsObj["ams_id"] = ams_data[j].ams_id;
JsonArray trays = amsObj.createNestedArray("tray");
JsonArray trays = amsObj["tray"].to<JsonArray>();
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
for (int k = 0; k < maxTrays; k++) {
JsonObject trayObj = trays.createNestedObject();
JsonObject trayObj = trays.add<JsonObject>();
trayObj["id"] = ams_data[j].trays[k].id;
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;

View File

@ -29,12 +29,37 @@ void stopAllTasks() {
Serial.println("All tasks stopped");
}
void backupJsonConfigs() {
const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"};
for (const char* config : configs) {
if (SPIFFS.exists(config)) {
String backupPath = String(config) + ".bak";
SPIFFS.remove(backupPath);
SPIFFS.rename(config, backupPath);
}
}
}
void restoreJsonConfigs() {
const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"};
for (const char* config : configs) {
String backupPath = String(config) + ".bak";
if (SPIFFS.exists(backupPath)) {
SPIFFS.remove(config);
SPIFFS.rename(backupPath, config);
}
}
}
void performStageTwo() {
if (!SPIFFS.begin(true)) {
Serial.println("Error: Could not mount SPIFFS for stage 2");
return;
}
// Backup JSON configs before update
backupJsonConfigs();
File firmwareFile = SPIFFS.open("/firmware.bin", "r");
if (!firmwareFile) {
Serial.println("Error: Could not open firmware.bin from SPIFFS");
@ -71,6 +96,10 @@ void performStageTwo() {
firmwareFile.close();
SPIFFS.remove("/firmware.bin"); // Cleanup
// Restore JSON configs after update
restoreJsonConfigs();
Serial.println("Stage 2 update successful, restarting...");
delay(500);
ESP.restart();

View File

@ -8,8 +8,11 @@
#define UPDATE_SIZE_UNKNOWN 0xFFFFFFFF
#endif
void stopAllTasks();
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final);
void checkForStagedUpdate();
void performStageTwo();
void stopAllTasks();
void backupJsonConfigs();
void restoreJsonConfigs();
#endif

View File

@ -7,12 +7,14 @@
#include "nfc.h"
#include "scale.h"
#include "esp_task_wdt.h"
#include "ota.h"
#include <Update.h>
#ifndef VERSION
#define VERSION "1.1.0"
#endif
// Cache-Control Header definieren
#define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr
#define VERSION "1.0.0"
#define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche
AsyncWebServer server(webserverPort);
AsyncWebSocket ws("/ws");
@ -46,7 +48,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
}
else if (doc["type"] == "writeNfcTag") {
if (doc.containsKey("payload")) {
if (doc["payload"].is<String>()) {
// Versuche NFC-Daten zu schreiben
String payloadString;
serializeJson(doc["payload"], payloadString);
@ -153,11 +155,15 @@ void sendNfcData(AsyncWebSocketClient *client) {
void sendAmsData(AsyncWebSocketClient *client) {
if (ams_count > 0) {
ws.textAll("{\"type\":\"amsData\", \"payload\":" + amsJsonData + "}");
ws.textAll("{\"type\":\"amsData\",\"payload\":" + amsJsonData + "}");
}
}
void setupWebserver(AsyncWebServer &server) {
// WebSocket-Optimierungen
ws.onEvent(onWsEvent);
ws.enable(true);
// Konfiguriere Server für große Uploads
server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){});
server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){});
@ -228,7 +234,7 @@ void setupWebserver(AsyncWebServer &server) {
html.replace("{{spoolmanUrl}}", spoolmanUrl);
JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
String bambuIp = doc["bambu_ip"].as<String>();
String bambuSerial = doc["bambu_serialnr"].as<String>();
String bambuCode = doc["bambu_accesscode"].as<String>();
@ -414,7 +420,8 @@ void setupWebserver(AsyncWebServer &server) {
);
server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){
String jsonResponse = "{\"version\": \"" VERSION "\"}";
String fm_version = VERSION;
String jsonResponse = "{\"version\": \""+ fm_version +"\"}";
request->send(200, "application/json", jsonResponse);
});
@ -436,26 +443,29 @@ void setupWebserver(AsyncWebServer &server) {
}
void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) {
static bool isSpiffsUpdate = false;
if (!index) {
// Start eines neuen Uploads
Serial.println("Update Start: " + filename);
// Überprüfe den Dateityp basierend auf dem Dateinamen
bool isFirmware = filename.startsWith("filaman_");
bool isWebpage = filename.startsWith("webpage_");
isSpiffsUpdate = filename.startsWith("webpage_");
if (!isFirmware && !isWebpage) {
if (!isFirmware && !isSpiffsUpdate) {
request->send(400, "application/json", "{\"message\":\"Invalid file type. File must start with 'filaman_' or 'webpage_'\"}");
return;
}
// Wähle den Update-Typ basierend auf dem Dateinamen
if (isWebpage) {
if (isSpiffsUpdate) {
if (!Update.begin(SPIFFS.totalBytes(), U_SPIFFS)) {
Update.printError(Serial);
request->send(400, "application/json", "{\"message\":\"SPIFFS Update failed: " + String(Update.errorString()) + "\"}");
return;
}
// Backup JSON configs before SPIFFS update
backupJsonConfigs();
} else {
if (!Update.begin(UPDATE_SIZE_UNKNOWN, U_FLASH)) {
Update.printError(Serial);
@ -477,8 +487,34 @@ void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, siz
request->send(400, "application/json", "{\"message\":\"Update failed: " + String(Update.errorString()) + "\"}");
return;
}
if (isSpiffsUpdate) {
// Restore JSON configs after SPIFFS update
restoreJsonConfigs();
}
request->send(200, "application/json", "{\"message\":\"Update successful!\", \"restart\": true}");
delay(500);
ESP.restart();
}
}
void backupJsonConfigs() {
const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"};
for (const char* config : configs) {
if (SPIFFS.exists(config)) {
String backupPath = String(config) + ".bak";
SPIFFS.remove(backupPath);
SPIFFS.rename(config, backupPath);
}
}
}
void restoreJsonConfigs() {
const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"};
for (const char* config : configs) {
String backupPath = String(config) + ".bak";
if (SPIFFS.exists(backupPath)) {
SPIFFS.remove(config);
SPIFFS.rename(backupPath, config);
}
}
}

View File

@ -10,11 +10,19 @@ WiFiManager wm;
bool wm_nonblocking = false;
void initWiFi() {
// Optimierte WiFi-Einstellungen
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
WiFi.setSleep(false); // disable sleep mode
esp_wifi_set_ps(WIFI_PS_NONE);
// Maximale Sendeleistung
WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power
//esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm
// Optimiere TCP/IP Stack
esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N);
// Aktiviere WiFi-Roaming für bessere Stabilität
esp_wifi_set_rssi_threshold(-80);
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen