Compare commits
	
		
			22 Commits
		
	
	
		
			v1.2.95
			...
			1de283b62f
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 1de283b62f | |||
| f1eb78eb38 | |||
| 8a65b86475 | |||
| a3aef819c8 | |||
| a62b5ec933 | |||
| 1a8cf7a58f | |||
| b0b3d41c84 | |||
| 38b68aecfc | |||
| 4992f5f433 | |||
| 5cbbe1d231 | |||
| 9b29460d64 | |||
| dd14d475b7 | |||
| 9e6cd3b451 | |||
| c1be6ca582 | |||
| 265ff0c787 | |||
| 67eca82ac5 | |||
| 568db90db0 | |||
| 2dfd53d64a | |||
| 262a2fcbd4 | |||
| 3770de15d3 | |||
| 75a74ec9bd | |||
| 979adcbb14 | 
							
								
								
									
										102
									
								
								.github/workflows/providers/gitea-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										102
									
								
								.github/workflows/providers/gitea-release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -3,9 +3,6 @@ name: Gitea Release | ||||
| on: | ||||
|   workflow_call: | ||||
|     inputs: | ||||
|       gitea_ref_name: | ||||
|         required: true | ||||
|         type: string | ||||
|       gitea_server_url: | ||||
|         required: true | ||||
|         type: string | ||||
| @@ -48,29 +45,66 @@ jobs: | ||||
|           echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT | ||||
|         fi | ||||
|      | ||||
|     - name: Check for Data changes | ||||
|       id: check_data | ||||
|       run: | | ||||
|         git fetch --unshallow || true | ||||
|         CHANGED_FILES=$(git diff --name-only HEAD^..HEAD) | ||||
|         if echo "$CHANGED_FILES" | grep -q "^data/"; then | ||||
|           echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT | ||||
|         else | ||||
|           echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT | ||||
|         fi | ||||
|      | ||||
|     - name: Get version from platformio.ini | ||||
|       id: get_version | ||||
|       run: | | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|         echo "VERSION=$VERSION" >> $GITHUB_OUTPUT | ||||
|  | ||||
|     - name: Build Firmware | ||||
|       run: | | ||||
|         # Get version from platformio.ini | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|          | ||||
|         # Always build firmware | ||||
|         # Always build firmware and SPIFFS | ||||
|         echo "Building firmware and SPIFFS..." | ||||
|         pio run -e esp32dev | ||||
|         pio run -t buildfs | ||||
|          | ||||
|         # Copy firmware binary | ||||
|         cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin | ||||
|          | ||||
|         # Only build SPIFFS if changed | ||||
|         # Copy SPIFFS binary if SPIFFS changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Building SPIFFS due to changes..." | ||||
|           echo "SPIFFS changes detected, copying SPIFFS binary..." | ||||
|           cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin | ||||
|         fi | ||||
|          | ||||
|         # Create full binary (always) | ||||
|         (cd .pio/build/esp32dev && \ | ||||
|         esptool.py --chip esp32 merge_bin \ | ||||
|           --fill-flash-size 4MB \ | ||||
|           --flash_mode dio \ | ||||
|           --flash_freq 40m \ | ||||
|           --flash_size 4MB \ | ||||
|           -o filaman_full_${VERSION}.bin \ | ||||
|           0x0000 bootloader.bin \ | ||||
|           0x8000 partitions.bin \ | ||||
|           0x10000 firmware.bin \ | ||||
|           0x390000 spiffs.bin) | ||||
|          | ||||
|         # Verify file sizes | ||||
|         echo "File sizes:" | ||||
|         (cd .pio/build/esp32dev && ls -lh *.bin) | ||||
|          | ||||
|     - name: Prepare binaries | ||||
|       run: | | ||||
|         cd .pio/build/esp32dev | ||||
|         VERSION=$(grep '^version = ' ../../platformio.ini | cut -d'"' -f2) | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|          | ||||
|         # Create full binary only if SPIFFS changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Creating full binary..." | ||||
|           cd .pio/build/esp32dev && \ | ||||
|           esptool.py --chip esp32 merge_bin \ | ||||
|             --fill-flash-size 4MB \ | ||||
|             --flash_mode dio \ | ||||
| @@ -85,16 +119,24 @@ jobs: | ||||
|          | ||||
|         # Verify file sizes | ||||
|         echo "File sizes:" | ||||
|         ls -lh *.bin | ||||
|         cd .pio/build/esp32dev && ls -lh *.bin | ||||
|  | ||||
|     - name: Read CHANGELOG.md | ||||
|       id: changelog | ||||
|       run: | | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|         CHANGELOG=$(awk "/## \\[$VERSION\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md) | ||||
|         echo "CHANGES<<EOF" >> $GITHUB_OUTPUT | ||||
|         echo "$CHANGELOG" >> $GITHUB_OUTPUT | ||||
|         echo "EOF" >> $GITHUB_OUTPUT | ||||
|          | ||||
|     - name: Create Release | ||||
|       env: | ||||
|         TOKEN: ${{ secrets.GITEA_TOKEN }} | ||||
|       run: | | ||||
|         TAG="${{ inputs.gitea_ref_name }}" | ||||
|         API_URL="${{ inputs.gitea_server_url }}/api/v1" | ||||
|         REPO="${{ inputs.gitea_repository }}" | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|          | ||||
|         # Create release | ||||
|         RESPONSE=$(curl -k -s \ | ||||
| @@ -102,8 +144,8 @@ jobs: | ||||
|           -H "Authorization: token ${TOKEN}" \ | ||||
|           -H "Content-Type: application/json" \ | ||||
|           -d "{ | ||||
|             \"tag_name\":\"${TAG}\", | ||||
|             \"name\":\"Release ${TAG}\", | ||||
|             \"tag_name\":\"v${VERSION}\", | ||||
|             \"name\":\"Release ${VERSION}\", | ||||
|             \"body\":\"${{ steps.changelog.outputs.CHANGES }}\" | ||||
|           }" \ | ||||
|           "${API_URL}/repos/${REPO}/releases") | ||||
| @@ -112,30 +154,30 @@ jobs: | ||||
|          | ||||
|         if [ -n "$RELEASE_ID" ]; then | ||||
|           echo "Release created with ID: $RELEASE_ID" | ||||
|           cd .pio/build/esp32dev | ||||
|            | ||||
|           # Always upload firmware | ||||
|           if [ -f "filaman_${VERSION}.bin" ]; then | ||||
|             curl -k -s \ | ||||
|               -X POST \ | ||||
|               -H "Authorization: token ${TOKEN}" \ | ||||
|               -H "Content-Type: application/octet-stream" \ | ||||
|               --data-binary "@filaman_${VERSION}.bin" \ | ||||
|               "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=filaman_${VERSION}.bin" | ||||
|           fi | ||||
|            | ||||
|           # Upload SPIFFS and full binary only if they exist | ||||
|           for file in webpage_${VERSION}.bin filaman_full_${VERSION}.bin; do | ||||
|             if [ -f "$file" ]; then | ||||
|           # Always upload firmware and full binary | ||||
|           for file in filaman_${VERSION}.bin filaman_full_${VERSION}.bin; do | ||||
|             if [ -f ".pio/build/esp32dev/$file" ]; then | ||||
|               echo "Uploading $file..." | ||||
|               curl -k -s \ | ||||
|                 -X POST \ | ||||
|                 -H "Authorization: token ${TOKEN}" \ | ||||
|                 -H "Content-Type: application/octet-stream" \ | ||||
|                 --data-binary "@$file" \ | ||||
|                 --data-binary "@.pio/build/esp32dev/$file" \ | ||||
|                 "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=$file" | ||||
|             fi | ||||
|           done | ||||
|            | ||||
|           # Upload SPIFFS binary only if it exists (data changes) | ||||
|           if [ -f ".pio/build/esp32dev/webpage_${VERSION}.bin" ]; then | ||||
|             echo "Uploading webpage binary..." | ||||
|             curl -k -s \ | ||||
|               -X POST \ | ||||
|               -H "Authorization: token ${TOKEN}" \ | ||||
|               -H "Content-Type: application/octet-stream" \ | ||||
|               --data-binary "@.pio/build/esp32dev/webpage_${VERSION}.bin" \ | ||||
|               "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=webpage_${VERSION}.bin" | ||||
|           fi | ||||
|         else | ||||
|           echo "Failed to create release. Response:" | ||||
|           echo "$RESPONSE" | ||||
|   | ||||
							
								
								
									
										54
									
								
								.github/workflows/providers/github-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										54
									
								
								.github/workflows/providers/github-release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -26,6 +26,17 @@ jobs: | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install xxd | ||||
|      | ||||
|     - name: Check for Data changes | ||||
|       id: check_data | ||||
|       run: | | ||||
|         git fetch --unshallow || true | ||||
|         CHANGED_FILES=$(git diff --name-only HEAD^..HEAD) | ||||
|         if echo "$CHANGED_FILES" | grep -q "^data/"; then | ||||
|           echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT | ||||
|         else | ||||
|           echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT | ||||
|         fi | ||||
|  | ||||
|     - name: Check for SPIFFS changes | ||||
|       id: check_spiffs | ||||
|       run: | | ||||
| @@ -39,27 +50,24 @@ jobs: | ||||
|      | ||||
|     - name: Build Firmware | ||||
|       run: | | ||||
|         # Get version from platformio.ini | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Always build firmware | ||||
|         # Always build firmware and SPIFFS | ||||
|         echo "Building firmware and SPIFFS..." | ||||
|         pio run -e esp32dev | ||||
|         pio run -t buildfs | ||||
|          | ||||
|         # Copy firmware binary | ||||
|         cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin | ||||
|          | ||||
|         # Only build SPIFFS if changed | ||||
|         # Copy SPIFFS binary if SPIFFS changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Building SPIFFS due to changes..." | ||||
|           echo "SPIFFS changes detected, copying SPIFFS binary..." | ||||
|           cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin | ||||
|         fi | ||||
|          | ||||
|     - name: Prepare binaries | ||||
|       run: | | ||||
|         cd .pio/build/esp32dev | ||||
|         VERSION=$(grep '^version = ' ../../platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Create full binary only if SPIFFS changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Creating full binary..." | ||||
|         # Create full binary (always) | ||||
|         (cd .pio/build/esp32dev && \ | ||||
|         esptool.py --chip esp32 merge_bin \ | ||||
|           --fill-flash-size 4MB \ | ||||
|           --flash_mode dio \ | ||||
| @@ -69,17 +77,23 @@ jobs: | ||||
|           0x0000 bootloader.bin \ | ||||
|           0x8000 partitions.bin \ | ||||
|           0x10000 firmware.bin \ | ||||
|             0x390000 spiffs.bin | ||||
|           0x390000 spiffs.bin) | ||||
|          | ||||
|         # Only copy SPIFFS binary if data changed | ||||
|         if [[ "${{ steps.check_data.outputs.DATA_CHANGED }}" == "true" ]]; then | ||||
|           echo "Data changes detected, copying SPIFFS binary..." | ||||
|           cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin | ||||
|         fi | ||||
|          | ||||
|         # Verify file sizes | ||||
|         echo "File sizes:" | ||||
|         ls -lh *.bin | ||||
|         (cd .pio/build/esp32dev && ls -lh *.bin) | ||||
|      | ||||
|     - name: Get version from tag | ||||
|     - name: Get version from platformio.ini | ||||
|       id: get_version | ||||
|       run: | | ||||
|         echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|         echo "VERSION=$VERSION" >> $GITHUB_OUTPUT | ||||
|        | ||||
|     - name: Read CHANGELOG.md | ||||
|       id: changelog | ||||
| @@ -94,8 +108,10 @@ jobs: | ||||
|       env: | ||||
|         GH_TOKEN: ${{ github.token }} | ||||
|       run: | | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|          | ||||
|         # Create release with available files | ||||
|         cd .pio/build/esp32dev | ||||
|         VERSION=$(grep '^version = ' ../../platformio.ini | cut -d'"' -f2) | ||||
|         FILES_TO_UPLOAD="" | ||||
|          | ||||
|         # Always add firmware | ||||
| @@ -114,8 +130,8 @@ jobs: | ||||
|          | ||||
|         # Create release with available files | ||||
|         if [ -n "$FILES_TO_UPLOAD" ]; then | ||||
|           gh release create "${{ github.ref_name }}" \ | ||||
|             --title "Release ${{ steps.get_version.outputs.VERSION }}" \ | ||||
|           gh release create "v${VERSION}" \ | ||||
|             --title "Release ${VERSION}" \ | ||||
|             --notes "${{ steps.changelog.outputs.CHANGES }}" \ | ||||
|             $FILES_TO_UPLOAD | ||||
|         else | ||||
|   | ||||
							
								
								
									
										41
									
								
								CHANGELOG.md
									
									
									
									
									
								
							
							
						
						
									
										41
									
								
								CHANGELOG.md
									
									
									
									
									
								
							| @@ -1,5 +1,46 @@ | ||||
| # Changelog | ||||
|  | ||||
| ## [1.2.100] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.100 | ||||
| - remove OTA handling and JSON backup/restore functions | ||||
|  | ||||
|  | ||||
| ## [1.2.99] - 2025-02-21 | ||||
| ### Added | ||||
| - add SPIFFS change detection and binary copying to release workflows | ||||
| - add backup and restore functions for JSON configurations during OTA updates | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.2.99 | ||||
| - update JSON field type checks from JsonObject to String for improved validation | ||||
| - update JSON handling in API and Bambu modules for improved object management | ||||
| - update platformio.ini dependencies and improve version handling in website.cpp | ||||
| - update Cache-Control header to reflect a 1-week duration | ||||
| - remove version definition from website.cpp | ||||
| - optimize WiFi and WebSocket settings; enhance TCP/IP stack configuration | ||||
| - update upgrade page title and heading; adjust cache control duration | ||||
|  | ||||
|  | ||||
| ## [1.2.98] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.98 | ||||
|  | ||||
|  | ||||
| ## [1.2.97] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.97 | ||||
| - streamline Gitea and GitHub release workflows to check for data changes and update binary handling | ||||
|  | ||||
|  | ||||
| ## [1.2.96] - 2025-02-21 | ||||
| ### Added | ||||
| - add SPIFFS build step to Gitea and GitHub release workflows | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.2.96 | ||||
|  | ||||
|  | ||||
| ## [1.2.95] - 2025-02-21 | ||||
| ### Added | ||||
| - enhance update process with separate forms for firmware and webpage uploads, including validation and improved UI | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| <!DOCTYPE html> | ||||
| <!-- head --><!DOCTYPE html> | ||||
| <html lang="en"> | ||||
| <head> | ||||
|     <meta charset="UTF-8"> | ||||
|     <meta name="viewport" content="width=device-width, initial-scale=1.0"> | ||||
|     <title>FilaMan - Firmware Update</title> | ||||
|     <title>FilaMan - Filament Management Tool</title> | ||||
|     <link rel="icon" type="image/png" href="/favicon.ico"> | ||||
|     <link rel="stylesheet" href="style.css"> | ||||
|     <script> | ||||
|         fetch('/api/version') | ||||
| @@ -49,7 +50,8 @@ | ||||
| <!-- head --> | ||||
|      | ||||
|     <div class="content"> | ||||
|         <h1>System Update</h1> | ||||
|         <h1>Firmware Upgrade</h1> | ||||
|  | ||||
|         <div class="warning"> | ||||
|             <strong>Warning:</strong> Do not power off the device during update. | ||||
|         </div> | ||||
| @@ -79,7 +81,7 @@ | ||||
|         </div> | ||||
|  | ||||
|         <div class="progress-container" style="display: none;"> | ||||
|             <div class="progress-bar">0%</</div> | ||||
|             <div class="progress-bar">0%</div> | ||||
|         </div> | ||||
|         <div class="status"></div> | ||||
|     </div> | ||||
| @@ -104,6 +106,42 @@ | ||||
|             color: #666; | ||||
|             margin-bottom: 1rem; | ||||
|         } | ||||
|         .progress-container { | ||||
|             margin: 20px 0; | ||||
|             background: #f0f0f0; | ||||
|             border-radius: 4px; | ||||
|             overflow: hidden; | ||||
|         } | ||||
|         .progress-bar { | ||||
|             width: 0; | ||||
|             height: 20px; | ||||
|             background: #4CAF50; | ||||
|             transition: width 0.3s ease-in-out; | ||||
|             text-align: center; | ||||
|             line-height: 20px; | ||||
|             color: white; | ||||
|         } | ||||
|         .status { | ||||
|             margin-top: 20px; | ||||
|             padding: 10px; | ||||
|             border-radius: 4px; | ||||
|             display: none; | ||||
|         } | ||||
|         .status.success { | ||||
|             background: #e8f5e9; | ||||
|             color: #2e7d32; | ||||
|         } | ||||
|         .status.error { | ||||
|             background: #ffebee; | ||||
|             color: #c62828; | ||||
|         } | ||||
|         .warning { | ||||
|             background: #fff3e0; | ||||
|             color: #e65100; | ||||
|             padding: 15px; | ||||
|             border-radius: 4px; | ||||
|             margin-bottom: 20px; | ||||
|         } | ||||
|     </style> | ||||
|  | ||||
|     <script> | ||||
| @@ -113,8 +151,13 @@ | ||||
|             statusContainer.style.display = 'none'; | ||||
|         } | ||||
|  | ||||
|         const progress = document.querySelector('.progress-bar'); | ||||
|         const progressContainer = document.querySelector('.progress-container'); | ||||
|         const status = document.querySelector('.status'); | ||||
|  | ||||
|         function handleUpdate(e) { | ||||
|             e.preventDefault(); | ||||
|             const form = e.target; | ||||
|             const file = form.update.files[0]; | ||||
|             const updateType = form.dataset.type; | ||||
|  | ||||
| @@ -133,24 +176,14 @@ | ||||
|                 return; | ||||
|             } | ||||
|              | ||||
|             log(`Selected file: ${file.name} (${file.size} bytes)`); | ||||
|              | ||||
|             // Aktiviere Fortschrittsanzeige | ||||
|             progress.style.display = 'block'; | ||||
|             form.style.display = 'none'; | ||||
|  | ||||
|             // Erstelle FormData für den Upload | ||||
|             const formData = new FormData(); | ||||
|             formData.append('update', file); | ||||
|              | ||||
|             const progress = document.querySelector('.progress-bar'); | ||||
|             const progressContainer = document.querySelector('.progress-container'); | ||||
|             const status = document.querySelector('.status'); | ||||
|              | ||||
|             progressContainer.style.display = 'block'; | ||||
|             status.style.display = 'none'; | ||||
|             status.className = 'status'; | ||||
|  | ||||
|             // Reset progress bar | ||||
|             progress.style.width = '0%'; | ||||
|             progress.textContent = '0%'; | ||||
|  | ||||
|             // Disable both forms during update | ||||
|             document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true); | ||||
|  | ||||
| @@ -215,6 +248,8 @@ | ||||
|                 document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false); | ||||
|             }; | ||||
|  | ||||
|             const formData = new FormData(); | ||||
|             formData.append('update', file); | ||||
|             xhr.send(formData); | ||||
|         } | ||||
|  | ||||
|   | ||||
| @@ -9,7 +9,7 @@ | ||||
| ; https://docs.platformio.org/page/projectconf.html | ||||
|  | ||||
| [common] | ||||
| version = "1.2.95" | ||||
| version = "1.2.100" | ||||
|  | ||||
| [env:esp32dev] | ||||
| platform = espressif32 | ||||
| @@ -19,10 +19,11 @@ monitor_speed = 115200 | ||||
|  | ||||
| lib_deps = | ||||
|     tzapu/WiFiManager @ ^2.0.17 | ||||
|     #https://github.com/me-no-dev/ESPAsyncWebServer.git#master | ||||
|     https://github.com/me-no-dev/ESPAsyncWebServer.git#master | ||||
|     #me-no-dev/AsyncTCP @ ^1.1.1 | ||||
|     mathieucarbou/ESPAsyncWebServer @ ^3.6.0 | ||||
|     esp32async/AsyncTCP @ ^3.3.5 | ||||
|     https://github.com/esphome/AsyncTCP.git | ||||
|     #mathieucarbou/ESPAsyncWebServer @ ^3.6.0 | ||||
|     #esp32async/AsyncTCP @ ^3.3.5 | ||||
|     bogde/HX711 @ ^0.7.5 | ||||
|     adafruit/Adafruit SSD1306 @ ^2.5.13 | ||||
|     adafruit/Adafruit GFX Library @ ^1.11.11 | ||||
| @@ -45,7 +46,7 @@ build_flags = | ||||
|     -fdata-sections | ||||
|     -DNDEBUG | ||||
|     -mtext-section-literals | ||||
|     '-D VERSION="${common.version}"' | ||||
|     -DVERSION=\"${common.version}\" | ||||
|     -DASYNCWEBSERVER_REGEX | ||||
|     -DCORE_DEBUG_LEVEL=3 | ||||
|     -DCONFIG_ARDUHAL_LOG_COLORS=1 | ||||
| @@ -54,6 +55,11 @@ build_flags = | ||||
|     -DCONFIG_ESP32_PANIC_PRINT_REBOOT | ||||
|     -DBOOT_APP_PARTITION_OTA_0=1 | ||||
|     -DCONFIG_LOG_DEFAULT_LEVEL=3 | ||||
|     -DCONFIG_LWIP_TCP_MSL=60000 | ||||
|     -DCONFIG_LWIP_TCP_WND_DEFAULT=8192 | ||||
|     -DCONFIG_LWIP_TCP_SND_BUF_DEFAULT=4096 | ||||
|     -DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096 | ||||
|     -DCONFIG_LWIP_MAX_ACTIVE_TCP=16 | ||||
|      | ||||
| extra_scripts =  | ||||
|     scripts/extra_script.py | ||||
|   | ||||
							
								
								
									
										22
									
								
								src/api.cpp
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								src/api.cpp
									
									
									
									
									
								
							| @@ -60,10 +60,10 @@ JsonDocument fetchSpoolsForWebsite() { | ||||
|             JsonArray filteredSpools = filteredDoc.to<JsonArray>(); | ||||
|  | ||||
|             for (JsonObject spool : spools) { | ||||
|                 JsonObject filteredSpool = filteredSpools.createNestedObject(); | ||||
|                 JsonObject filteredSpool = filteredSpools.add<JsonObject>(); | ||||
|                 filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"]; | ||||
|  | ||||
|                 JsonObject filament = filteredSpool.createNestedObject("filament"); | ||||
|                 JsonObject filament = filteredSpool["filament"].to<JsonObject>(); | ||||
|                 filament["sm_id"] = spool["id"]; | ||||
|                 filament["id"] = spool["filament"]["id"]; | ||||
|                 filament["name"] = spool["filament"]["name"]; | ||||
| @@ -73,7 +73,7 @@ JsonDocument fetchSpoolsForWebsite() { | ||||
|                 filament["price_meter"] = spool["filament"]["extra"]["price_meter"]; | ||||
|                 filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"]; | ||||
|  | ||||
|                 JsonObject vendor = filament.createNestedObject("vendor"); | ||||
|                 JsonObject vendor = filament["vendor"].to<JsonObject>(); | ||||
|                 vendor["id"] = spool["filament"]["vendor"]["id"]; | ||||
|                 vendor["name"] = spool["filament"]["vendor"]["name"]; | ||||
|             } | ||||
| @@ -110,13 +110,13 @@ JsonDocument fetchAllSpoolsInfo() { | ||||
|             JsonArray filteredSpools = filteredDoc.to<JsonArray>(); | ||||
|  | ||||
|             for (JsonObject spool : spools) { | ||||
|                 JsonObject filteredSpool = filteredSpools.createNestedObject(); | ||||
|                 JsonObject filteredSpool = filteredSpools.add<JsonObject>(); | ||||
|                 filteredSpool["price"] = spool["price"]; | ||||
|                 filteredSpool["remaining_weight"] = spool["remaining_weight"]; | ||||
|                 filteredSpool["used_weight"] = spool["used_weight"]; | ||||
|                 filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"]; | ||||
|  | ||||
|                 JsonObject filament = filteredSpool.createNestedObject("filament"); | ||||
|                 JsonObject filament = filteredSpool["filament"].to<JsonObject>(); | ||||
|                 filament["id"] = spool["filament"]["id"]; | ||||
|                 filament["name"] = spool["filament"]["name"]; | ||||
|                 filament["material"] = spool["filament"]["material"]; | ||||
| @@ -125,11 +125,11 @@ JsonDocument fetchAllSpoolsInfo() { | ||||
|                 filament["spool_weight"] = spool["filament"]["spool_weight"]; | ||||
|                 filament["color_hex"] = spool["filament"]["color_hex"]; | ||||
|  | ||||
|                 JsonObject vendor = filament.createNestedObject("vendor"); | ||||
|                 JsonObject vendor = filament["vendor"].to<JsonObject>(); | ||||
|                 vendor["id"] = spool["filament"]["vendor"]["id"]; | ||||
|                 vendor["name"] = spool["filament"]["vendor"]["name"]; | ||||
|  | ||||
|                 JsonObject extra = filament.createNestedObject("extra"); | ||||
|                 JsonObject extra = filament["extra"].to<JsonObject>(); | ||||
|                 extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"]; | ||||
|                 extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"]; | ||||
|                 extra["price_meter"] = spool["filament"]["extra"]["price_meter"]; | ||||
| @@ -186,7 +186,7 @@ bool updateSpoolTagId(String uidString, const char* payload) { | ||||
|     } | ||||
|      | ||||
|     // Überprüfe, ob die erforderlichen Felder vorhanden sind | ||||
|     if (!doc.containsKey("sm_id") || doc["sm_id"] == "") { | ||||
|     if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") { | ||||
|         Serial.println("Keine Spoolman-ID gefunden."); | ||||
|         return false; | ||||
|     } | ||||
| @@ -368,7 +368,7 @@ bool checkSpoolmanExtraFields() { | ||||
|                 for (uint8_t s = 0; s < extraLength; s++) { | ||||
|                     bool found = false; | ||||
|                     for (JsonObject field : doc.as<JsonArray>()) { | ||||
|                         if (field.containsKey("key") && field["key"] == extraFields[s]) { | ||||
|                         if (field["key"].is<String>() && field["key"] == extraFields[s]) { | ||||
|                             Serial.println("Feld gefunden: " + extraFields[s]); | ||||
|                             found = true; | ||||
|                             break; | ||||
| @@ -430,7 +430,7 @@ bool checkSpoolmanInstance(const String& url) { | ||||
|             String payload = http.getString(); | ||||
|             JsonDocument doc; | ||||
|             DeserializationError error = deserializeJson(doc, payload); | ||||
|             if (!error && doc.containsKey("status")) { | ||||
|             if (!error && doc["status"].is<String>()) { | ||||
|                 const char* status = doc["status"]; | ||||
|                 http.end(); | ||||
|  | ||||
| @@ -469,7 +469,7 @@ bool saveSpoolmanUrl(const String& url) { | ||||
|  | ||||
| String loadSpoolmanUrl() { | ||||
|     JsonDocument doc; | ||||
|     if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) { | ||||
|     if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) { | ||||
|         return doc["url"].as<String>(); | ||||
|     } | ||||
|     Serial.println("Keine gültige Spoolman-URL gefunden."); | ||||
|   | ||||
| @@ -58,7 +58,7 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String | ||||
|  | ||||
| bool loadBambuCredentials() { | ||||
|     JsonDocument doc; | ||||
|     if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) { | ||||
|     if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) { | ||||
|         // Temporäre Strings für die Werte | ||||
|         String ip = doc["bambu_ip"].as<String>(); | ||||
|         String code = doc["bambu_accesscode"].as<String>(); | ||||
| @@ -270,9 +270,9 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|     } | ||||
|  | ||||
|     // Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren | ||||
|     if (doc["print"].containsKey("upgrade_state")) { | ||||
|     if (doc["print"]["upgrade_state"].is<String>()) { | ||||
|         // Prüfen ob AMS-Daten vorhanden sind | ||||
|         if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) { | ||||
|         if (!doc["print"]["ams"].is<String>() || !doc["print"]["ams"]["ams"].is<String>()) { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
| @@ -315,7 +315,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|         } | ||||
|  | ||||
|         // Prüfe die externe Spule | ||||
|         if (!hasChanges && doc["print"].containsKey("vt_tray")) { | ||||
|         if (!hasChanges && doc["print"]["vt_tray"].is<String>()) { | ||||
|             JsonObject vtTray = doc["print"]["vt_tray"]; | ||||
|             bool foundExternal = false; | ||||
|              | ||||
| @@ -363,7 +363,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|         ams_count = amsArray.size(); | ||||
|  | ||||
|         // Wenn externe Spule vorhanden, füge sie hinzu | ||||
|         if (doc["print"].containsKey("vt_tray")) { | ||||
|         if (doc["print"]["vt_tray"].is<String>()) { | ||||
|             JsonObject vtTray = doc["print"]["vt_tray"]; | ||||
|             int extIdx = ams_count;  // Index für externe Spule | ||||
|             ams_data[extIdx].ams_id = 255;  // Spezielle ID für externe Spule | ||||
| @@ -387,14 +387,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|         JsonArray wsArray = wsDoc.to<JsonArray>(); | ||||
|  | ||||
|         for (int i = 0; i < ams_count; i++) { | ||||
|             JsonObject amsObj = wsArray.createNestedObject(); | ||||
|             JsonObject amsObj = wsArray.add<JsonObject>(); | ||||
|             amsObj["ams_id"] = ams_data[i].ams_id; | ||||
|  | ||||
|             JsonArray trays = amsObj.createNestedArray("tray"); | ||||
|             JsonArray trays = amsObj["tray"].to<JsonArray>(); | ||||
|             int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4; | ||||
|              | ||||
|             for (int j = 0; j < maxTrays; j++) { | ||||
|                 JsonObject trayObj = trays.createNestedObject(); | ||||
|                 JsonObject trayObj = trays.add<JsonObject>(); | ||||
|                 trayObj["id"] = ams_data[i].trays[j].id; | ||||
|                 trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx; | ||||
|                 trayObj["tray_type"] = ams_data[i].trays[j].tray_type; | ||||
| @@ -427,14 +427,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|                 JsonArray wsArray = wsDoc.to<JsonArray>(); | ||||
|  | ||||
|                 for (int j = 0; j < ams_count; j++) { | ||||
|                     JsonObject amsObj = wsArray.createNestedObject(); | ||||
|                     JsonObject amsObj = wsArray.add<JsonObject>(); | ||||
|                     amsObj["ams_id"] = ams_data[j].ams_id; | ||||
|  | ||||
|                     JsonArray trays = amsObj.createNestedArray("tray"); | ||||
|                     JsonArray trays = amsObj["tray"].to<JsonArray>(); | ||||
|                     int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4; | ||||
|                      | ||||
|                     for (int k = 0; k < maxTrays; k++) { | ||||
|                         JsonObject trayObj = trays.createNestedObject(); | ||||
|                         JsonObject trayObj = trays.add<JsonObject>(); | ||||
|                         trayObj["id"] = ams_data[j].trays[k].id; | ||||
|                         trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx; | ||||
|                         trayObj["tray_type"] = ams_data[j].trays[k].tray_type; | ||||
|   | ||||
							
								
								
									
										240
									
								
								src/ota.cpp
									
									
									
									
									
								
							
							
						
						
									
										240
									
								
								src/ota.cpp
									
									
									
									
									
								
							| @@ -1,240 +0,0 @@ | ||||
| #include <Arduino.h> | ||||
| #include "ota.h" | ||||
| #include <Update.h> | ||||
| #include <SPIFFS.h> | ||||
| #include "commonFS.h" | ||||
| #include "bambu.h" | ||||
| #include "scale.h" | ||||
| #include "nfc.h" | ||||
|  | ||||
| #define UPLOAD_TIMEOUT_MS 60000  // 60 Sekunden Timeout für den gesamten Upload | ||||
| #define CHUNK_RESPONSE_TIMEOUT_MS 10000  // 10 Sekunden Timeout pro Chunk | ||||
| #define MAX_FAILED_CHUNKS 3  // Maximale Anzahl fehlgeschlagener Chunks bevor Abbruch | ||||
| #define MAX_FILE_SIZE 4194304    // 4MB Limit | ||||
|  | ||||
| static bool tasksAreStopped = false; | ||||
| static uint32_t lastChunkTime = 0; | ||||
| static size_t failedChunks = 0; | ||||
| static size_t expectedOffset = 0; | ||||
| static size_t totalSize = 0; | ||||
|  | ||||
| void stopAllTasks() { | ||||
|     Serial.println("Stopping RFID Reader"); | ||||
|     if (RfidReaderTask) vTaskSuspend(RfidReaderTask); | ||||
|     Serial.println("Stopping Bambu"); | ||||
|     if (BambuMqttTask) vTaskSuspend(BambuMqttTask); | ||||
|     Serial.println("Stopping Scale"); | ||||
|     if (ScaleTask) vTaskSuspend(ScaleTask); | ||||
|     vTaskDelay(100 / portTICK_PERIOD_MS); | ||||
|     Serial.println("All tasks stopped"); | ||||
| } | ||||
|  | ||||
| void performStageTwo() { | ||||
|     if (!SPIFFS.begin(true)) { | ||||
|         Serial.println("Error: Could not mount SPIFFS for stage 2"); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     File firmwareFile = SPIFFS.open("/firmware.bin", "r"); | ||||
|     if (!firmwareFile) { | ||||
|         Serial.println("Error: Could not open firmware.bin from SPIFFS"); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     size_t firmwareSize = firmwareFile.size(); | ||||
|     size_t maxAppSpace = (ESP.getFreeSketchSpace() - 0x1000) & 0xFFFFF000; | ||||
|  | ||||
|     Serial.printf("Stage 2 - Firmware size: %u bytes\n", firmwareSize); | ||||
|     Serial.printf("Available space: %u bytes\n", maxAppSpace); | ||||
|  | ||||
|     if (firmwareSize > maxAppSpace) { | ||||
|         Serial.printf("Error: Not enough space for firmware. Need %u bytes but only have %u bytes\n",  | ||||
|                     firmwareSize, maxAppSpace); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (!Update.begin(firmwareSize)) { | ||||
|         Update.printError(Serial); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     size_t written = Update.writeStream(firmwareFile); | ||||
|     if (written != firmwareSize) { | ||||
|         Update.printError(Serial); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (!Update.end(true)) { | ||||
|         Update.printError(Serial); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     firmwareFile.close(); | ||||
|     SPIFFS.remove("/firmware.bin"); // Cleanup | ||||
|     Serial.println("Stage 2 update successful, restarting..."); | ||||
|     delay(500); | ||||
|     ESP.restart(); | ||||
| } | ||||
|  | ||||
| void checkForStagedUpdate() { | ||||
|     if (!SPIFFS.begin(true)) { | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (SPIFFS.exists("/firmware.bin")) { | ||||
|         Serial.println("Found staged firmware update, initiating stage 2..."); | ||||
|         performStageTwo(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final) { | ||||
|     static File stagingFile; | ||||
|     static uint32_t uploadStartTime = 0; | ||||
|      | ||||
|     if (!index) { | ||||
|         // Überprüfe Gesamtgröße im Header | ||||
|         if (request->hasHeader("X-Total-Size")) { | ||||
|             totalSize = request->header("X-Total-Size").toInt(); | ||||
|             if (totalSize > MAX_FILE_SIZE) { | ||||
|                 request->send(413, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"File too large\"}"); | ||||
|                 return; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         uploadStartTime = millis(); | ||||
|         lastChunkTime = millis(); | ||||
|         expectedOffset = 0; | ||||
|         failedChunks = 0; | ||||
|          | ||||
|         bool isSpiffsUpdate = filename.endsWith("_spiffs.bin"); | ||||
|         Serial.printf("Update Start: %s (type: %s)\n", filename.c_str(), isSpiffsUpdate ? "SPIFFS" : "OTA"); | ||||
|         Serial.printf("Total size: %u bytes\n", totalSize); | ||||
|          | ||||
|         // Überprüfe Header für Chunk-Informationen | ||||
|         if (request->hasHeader("X-Chunk-Offset")) { | ||||
|             String offsetStr = request->header("X-Chunk-Offset"); | ||||
|             expectedOffset = offsetStr.toInt(); | ||||
|         } | ||||
|  | ||||
|         if (request->contentLength() == 0) { | ||||
|             request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Invalid file size\"}"); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         if (!tasksAreStopped) { | ||||
|             stopAllTasks(); | ||||
|             tasksAreStopped = true; | ||||
|         } | ||||
|  | ||||
|         if (isSpiffsUpdate) { | ||||
|             if (!SPIFFS.begin(true)) { | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Could not mount SPIFFS\"}"); | ||||
|                 return; | ||||
|             } | ||||
|              | ||||
|             if (!Update.begin(totalSize > 0 ? totalSize : request->contentLength(), U_SPIFFS)) { | ||||
|                 Update.printError(Serial); | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"SPIFFS update initialization failed\"}"); | ||||
|                 return; | ||||
|             } | ||||
|         } else { | ||||
|             stagingFile = SPIFFS.open("/firmware.bin", "w"); | ||||
|             if (!stagingFile) { | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Could not create staging file\"}"); | ||||
|                 return; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Chunk Validierung | ||||
|     if (request->hasHeader("X-Chunk-Offset")) { | ||||
|         size_t chunkOffset = request->header("X-Chunk-Offset").toInt(); | ||||
|         if (chunkOffset != expectedOffset) { | ||||
|             failedChunks++; | ||||
|             if (failedChunks >= MAX_FAILED_CHUNKS) { | ||||
|                 if (stagingFile) { | ||||
|                     stagingFile.close(); | ||||
|                     SPIFFS.remove("/firmware.bin"); | ||||
|                 } | ||||
|                 Update.abort(); | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Too many failed chunks\"}"); | ||||
|                 return; | ||||
|             } | ||||
|             request->send(400, "application/json",  | ||||
|                         "{\"status\":\"error\",\"message\":\"Invalid chunk offset\"}"); | ||||
|             return; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Timeout Überprüfungen | ||||
|     uint32_t currentTime = millis(); | ||||
|     if (currentTime - uploadStartTime > UPLOAD_TIMEOUT_MS) { | ||||
|         if (stagingFile) { | ||||
|             stagingFile.close(); | ||||
|             SPIFFS.remove("/firmware.bin"); | ||||
|         } | ||||
|         Update.abort(); | ||||
|         request->send(408, "application/json", "{\"status\":\"error\",\"message\":\"Upload timeout\"}"); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (currentTime - lastChunkTime > CHUNK_RESPONSE_TIMEOUT_MS) { | ||||
|         if (stagingFile) { | ||||
|             stagingFile.close(); | ||||
|             SPIFFS.remove("/firmware.bin"); | ||||
|         } | ||||
|         Update.abort(); | ||||
|         request->send(408, "application/json", "{\"status\":\"error\",\"message\":\"Chunk timeout\"}"); | ||||
|         return; | ||||
|     } | ||||
|     lastChunkTime = currentTime; | ||||
|  | ||||
|     if (stagingFile) { | ||||
|         size_t written = stagingFile.write(data, len); | ||||
|         if (written != len) { | ||||
|             stagingFile.close(); | ||||
|             SPIFFS.remove("/firmware.bin"); | ||||
|             request->send(400, "application/json",  | ||||
|                         "{\"status\":\"error\",\"message\":\"Write to SPIFFS failed\"}"); | ||||
|             return; | ||||
|         } | ||||
|     } else { | ||||
|         if (Update.write(data, len) != len) { | ||||
|             Update.printError(Serial); | ||||
|             request->send(400, "application/json",  | ||||
|                         "{\"status\":\"error\",\"message\":\"Write failed\"}"); | ||||
|             return; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     expectedOffset += len; | ||||
|  | ||||
|     if (final) { | ||||
|         if (stagingFile) { | ||||
|             stagingFile.close(); | ||||
|             Serial.println("Stage 1 complete - firmware staged in SPIFFS"); | ||||
|             request->send(200, "application/json",  | ||||
|                         "{\"status\":\"success\",\"message\":\"Update staged successfully! Starting stage 2...\"}"); | ||||
|             delay(100); | ||||
|             performStageTwo(); | ||||
|         } else { | ||||
|             if (!Update.end(true)) { | ||||
|                 Update.printError(Serial); | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Update failed\"}"); | ||||
|                 return; | ||||
|             } | ||||
|             Serial.println("SPIFFS update successful, restarting..."); | ||||
|             request->send(200, "application/json",  | ||||
|                         "{\"status\":\"success\",\"message\":\"SPIFFS update successful! Device will restart...\",\"restart\":true}"); | ||||
|             delay(500); | ||||
|             ESP.restart(); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
							
								
								
									
										15
									
								
								src/ota.h
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								src/ota.h
									
									
									
									
									
								
							| @@ -1,15 +0,0 @@ | ||||
| #ifndef OTA_H | ||||
| #define OTA_H | ||||
|  | ||||
| #include <ESPAsyncWebServer.h> | ||||
|  | ||||
| // Update size unknown constant, falls nicht bereits definiert | ||||
| #ifndef UPDATE_SIZE_UNKNOWN | ||||
| #define UPDATE_SIZE_UNKNOWN 0xFFFFFFFF | ||||
| #endif | ||||
|  | ||||
| void stopAllTasks(); | ||||
| void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final); | ||||
| void checkForStagedUpdate(); | ||||
|  | ||||
| #endif | ||||
| @@ -7,12 +7,14 @@ | ||||
| #include "nfc.h" | ||||
| #include "scale.h" | ||||
| #include "esp_task_wdt.h" | ||||
| #include "ota.h" | ||||
| #include <Update.h> | ||||
|  | ||||
| #ifndef VERSION | ||||
|   #define VERSION "1.1.0" | ||||
| #endif | ||||
|  | ||||
| // Cache-Control Header definieren | ||||
| #define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr | ||||
| #define VERSION "1.0.0" | ||||
| #define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche | ||||
|  | ||||
| AsyncWebServer server(webserverPort); | ||||
| AsyncWebSocket ws("/ws"); | ||||
| @@ -46,7 +48,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp | ||||
|         } | ||||
|  | ||||
|         else if (doc["type"] == "writeNfcTag") { | ||||
|             if (doc.containsKey("payload")) { | ||||
|             if (doc["payload"].is<String>()) { | ||||
|                 // Versuche NFC-Daten zu schreiben | ||||
|                 String payloadString; | ||||
|                 serializeJson(doc["payload"], payloadString); | ||||
| @@ -158,6 +160,10 @@ void sendAmsData(AsyncWebSocketClient *client) { | ||||
| } | ||||
|  | ||||
| void setupWebserver(AsyncWebServer &server) { | ||||
|     // WebSocket-Optimierungen | ||||
|     ws.onEvent(onWsEvent); | ||||
|     ws.enable(true); | ||||
|  | ||||
|     // Konfiguriere Server für große Uploads | ||||
|     server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){}); | ||||
|     server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){}); | ||||
| @@ -228,7 +234,7 @@ void setupWebserver(AsyncWebServer &server) { | ||||
|         html.replace("{{spoolmanUrl}}", spoolmanUrl); | ||||
|  | ||||
|         JsonDocument doc; | ||||
|         if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) { | ||||
|         if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) { | ||||
|             String bambuIp = doc["bambu_ip"].as<String>(); | ||||
|             String bambuSerial = doc["bambu_serialnr"].as<String>(); | ||||
|             String bambuCode = doc["bambu_accesscode"].as<String>(); | ||||
| @@ -414,7 +420,8 @@ void setupWebserver(AsyncWebServer &server) { | ||||
|     ); | ||||
|  | ||||
|     server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){ | ||||
|         String jsonResponse = "{\"version\": \"" VERSION "\"}"; | ||||
|         String fm_version = VERSION; | ||||
|         String jsonResponse = "{\"version\": \""+ fm_version +"\"}"; | ||||
|         request->send(200, "application/json", jsonResponse); | ||||
|     }); | ||||
|  | ||||
| @@ -436,26 +443,29 @@ void setupWebserver(AsyncWebServer &server) { | ||||
| } | ||||
|  | ||||
| void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) { | ||||
|     static bool isSpiffsUpdate = false; | ||||
|     if (!index) { | ||||
|         // Start eines neuen Uploads | ||||
|         Serial.println("Update Start: " + filename); | ||||
|          | ||||
|         // Überprüfe den Dateityp basierend auf dem Dateinamen | ||||
|         bool isFirmware = filename.startsWith("filaman_"); | ||||
|         bool isWebpage = filename.startsWith("webpage_"); | ||||
|         isSpiffsUpdate = filename.startsWith("webpage_"); | ||||
|          | ||||
|         if (!isFirmware && !isWebpage) { | ||||
|         if (!isFirmware && !isSpiffsUpdate) { | ||||
|             request->send(400, "application/json", "{\"message\":\"Invalid file type. File must start with 'filaman_' or 'webpage_'\"}"); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         // Wähle den Update-Typ basierend auf dem Dateinamen | ||||
|         if (isWebpage) { | ||||
|         if (isSpiffsUpdate) { | ||||
|             if (!Update.begin(SPIFFS.totalBytes(), U_SPIFFS)) { | ||||
|                 Update.printError(Serial); | ||||
|                 request->send(400, "application/json", "{\"message\":\"SPIFFS Update failed: " + String(Update.errorString()) + "\"}"); | ||||
|                 return; | ||||
|             } | ||||
|             // Backup JSON configs before SPIFFS update | ||||
|             backupJsonConfigs(); | ||||
|         } else { | ||||
|             if (!Update.begin(UPDATE_SIZE_UNKNOWN, U_FLASH)) { | ||||
|                 Update.printError(Serial); | ||||
| @@ -477,8 +487,34 @@ void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, siz | ||||
|             request->send(400, "application/json", "{\"message\":\"Update failed: " + String(Update.errorString()) + "\"}"); | ||||
|             return; | ||||
|         } | ||||
|         if (isSpiffsUpdate) { | ||||
|             // Restore JSON configs after SPIFFS update | ||||
|             restoreJsonConfigs(); | ||||
|         } | ||||
|         request->send(200, "application/json", "{\"message\":\"Update successful!\", \"restart\": true}"); | ||||
|         delay(500); | ||||
|         ESP.restart(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| void backupJsonConfigs() { | ||||
|     const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"}; | ||||
|     for (const char* config : configs) { | ||||
|         if (SPIFFS.exists(config)) { | ||||
|             String backupPath = String(config) + ".bak"; | ||||
|             SPIFFS.remove(backupPath); | ||||
|             SPIFFS.rename(config, backupPath); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| void restoreJsonConfigs() { | ||||
|     const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"}; | ||||
|     for (const char* config : configs) { | ||||
|         String backupPath = String(config) + ".bak"; | ||||
|         if (SPIFFS.exists(backupPath)) { | ||||
|             SPIFFS.remove(config); | ||||
|             SPIFFS.rename(backupPath, config); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -29,4 +29,8 @@ void sendNfcData(AsyncWebSocketClient *client); | ||||
| void foundNfcTag(AsyncWebSocketClient *client, uint8_t success); | ||||
| void sendWriteResult(AsyncWebSocketClient *client, uint8_t success); | ||||
|  | ||||
| // Upgrade-Funktionen | ||||
| void backupJsonConfigs(); | ||||
| void restoreJsonConfigs(); | ||||
|  | ||||
| #endif | ||||
|   | ||||
							
								
								
									
										10
									
								
								src/wlan.cpp
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								src/wlan.cpp
									
									
									
									
									
								
							| @@ -10,11 +10,19 @@ WiFiManager wm; | ||||
| bool wm_nonblocking = false; | ||||
|  | ||||
| void initWiFi() { | ||||
|     // Optimierte WiFi-Einstellungen | ||||
|     WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP | ||||
|     WiFi.setSleep(false); // disable sleep mode | ||||
|     esp_wifi_set_ps(WIFI_PS_NONE); | ||||
|      | ||||
|     //esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm | ||||
|     // Maximale Sendeleistung | ||||
|     WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power | ||||
|    | ||||
|     // Optimiere TCP/IP Stack | ||||
|     esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N); | ||||
|      | ||||
|     // Aktiviere WiFi-Roaming für bessere Stabilität | ||||
|     esp_wifi_set_rssi_threshold(-80); | ||||
|    | ||||
|     if(wm_nonblocking) wm.setConfigPortalBlocking(false); | ||||
|     wm.setConfigPortalTimeout(320); // Portal nach 5min schließen | ||||
|   | ||||
		Reference in New Issue
	
	Block a user