Compare commits
	
		
			114 Commits
		
	
	
		
			v1.2.95
			...
			36d50cbe7f
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 36d50cbe7f | |||
| 9148d207c7 | |||
| 5f6fef9448 | |||
| 946202de0e | |||
| 41a3717347 | |||
| 255c820439 | |||
| aef3ba77ba | |||
| 2592c3a497 | |||
| a48c5dfef0 | |||
| 00554d0b09 | |||
| 05a91cd8d8 | |||
| 7cf113eaff | |||
| 44d27adab2 | |||
| e0a2dff5fe | |||
| 519a089684 | |||
| ef053bb2b6 | |||
| 0a91c7b269 | |||
| 875d9d2b70 | |||
| 52840b9b0b | |||
| da1fc7678f | |||
| 982bb5aa21 | |||
| 007737db13 | |||
| 17e5949201 | |||
| 6a57186091 | |||
| babd3f47a0 | |||
| 5372fe10fe | |||
| e0c9d90892 | |||
| e5f5d1961b | |||
| 31a960fb9e | |||
| 3c2e75b77a | |||
| 367143c456 | |||
| fbde4b764f | |||
| e57f4216d4 | |||
| b8beb992d6 | |||
| 4234b2254e | |||
| b8faf79163 | |||
| d35afaff46 | |||
| a8a00372b5 | |||
| 72f4eab588 | |||
| afa4eddc00 | |||
| b0888e7e63 | |||
| 238a84a8a2 | |||
| 59cc00ca13 | |||
| ab083f5f57 | |||
| c111573206 | |||
| 52b2494e52 | |||
| 069ec2d7a1 | |||
| 94e35ae86e | |||
| d71e3d8184 | |||
| bb166aa29f | |||
| 0d718023f8 | |||
| b16781043f | |||
| dff184ff25 | |||
| 0ce281221d | |||
| bc26c160e8 | |||
| c25f41db75 | |||
| e107c17f50 | |||
| 85b9d03ebd | |||
| 17b188626a | |||
| a534c5f872 | |||
| 93f7582790 | |||
| 46acc63756 | |||
| 67a9e1bdce | |||
| 2b75b64b4a | |||
| 8d003295e7 | |||
| f89500946a | |||
| 14e745ff06 | |||
| d058397fa2 | |||
| 622f5403a7 | |||
| 92b78a86dd | |||
| ec399390e8 | |||
| 909c4e9b5e | |||
| f4b20bfffd | |||
| 78464215a9 | |||
| 4365f0463a | |||
| 727bc0e760 | |||
| 04604013eb | |||
| cf5fc5f6f1 | |||
| 945a4ccce6 | |||
| 7cf9e2d145 | |||
| 9db4e338ea | |||
| dea6ca2c66 | |||
| e224e72e41 | |||
| 306c517da7 | |||
| 0337bbabe0 | |||
| bde14e50e0 | |||
| 9c656a9bd0 | |||
| eae552017d | |||
| a77918da41 | |||
| 262dad38a6 | |||
| cfc9f103cf | |||
| 0117302672 | |||
| 1de283b62f | |||
| f1eb78eb38 | |||
| 8a65b86475 | |||
| a3aef819c8 | |||
| a62b5ec933 | |||
| 1a8cf7a58f | |||
| b0b3d41c84 | |||
| 38b68aecfc | |||
| 4992f5f433 | |||
| 5cbbe1d231 | |||
| 9b29460d64 | |||
| dd14d475b7 | |||
| 9e6cd3b451 | |||
| c1be6ca582 | |||
| 265ff0c787 | |||
| 67eca82ac5 | |||
| 568db90db0 | |||
| 2dfd53d64a | |||
| 262a2fcbd4 | |||
| 3770de15d3 | |||
| 75a74ec9bd | |||
| 979adcbb14 | 
							
								
								
									
										163
									
								
								.github/workflows/gitea-release.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										163
									
								
								.github/workflows/gitea-release.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,163 @@ | ||||
| name: Gitea Release | ||||
|  | ||||
| on: | ||||
|   workflow_call: | ||||
|  | ||||
| jobs: | ||||
|   create-release: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|      | ||||
|     - name: Set up Python | ||||
|       uses: actions/setup-python@v4 | ||||
|       with: | ||||
|         python-version: '3.x' | ||||
|      | ||||
|     - name: Install PlatformIO | ||||
|       run: | | ||||
|         python -m pip install --upgrade pip | ||||
|         pip install --upgrade platformio esptool | ||||
|      | ||||
|     - name: Install xxd | ||||
|       run: | | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install xxd | ||||
|      | ||||
|     - name: Build Firmware | ||||
|       run: | | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Build firmware and SPIFFS | ||||
|         echo "Building firmware and SPIFFS..." | ||||
|         pio run -e esp32dev | ||||
|         pio run -t buildfs | ||||
|          | ||||
|         # Copy firmware binary | ||||
|         cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin | ||||
|          | ||||
|         # Create SPIFFS binary | ||||
|         cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin | ||||
|          | ||||
|         # Create full binary | ||||
|         (cd .pio/build/esp32dev &&  | ||||
|         esptool.py --chip esp32 merge_bin \ | ||||
|           --fill-flash-size 4MB \ | ||||
|           --flash_mode dio \ | ||||
|           --flash_freq 40m \ | ||||
|           --flash_size 4MB \ | ||||
|           -o filaman_full_${VERSION}.bin \ | ||||
|           0x1000 bootloader.bin \ | ||||
|           0x8000 partitions.bin \ | ||||
|           0x10000 firmware.bin \ | ||||
|           0x390000 spiffs.bin) | ||||
|          | ||||
|         # Verify file sizes | ||||
|         echo "File sizes:" | ||||
|         (cd .pio/build/esp32dev && ls -lh *.bin) | ||||
|      | ||||
|     - name: Get version from platformio.ini | ||||
|       id: get_version | ||||
|       run: | | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|         echo "VERSION=$VERSION" >> $GITHUB_OUTPUT | ||||
|        | ||||
|     - name: Read CHANGELOG.md | ||||
|       id: changelog | ||||
|       run: | | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|         CHANGELOG=$(awk "/## \\[$VERSION\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md) | ||||
|         echo "CHANGES<<EOF" >> $GITHUB_OUTPUT | ||||
|         echo "$CHANGELOG" >> $GITHUB_OUTPUT | ||||
|         echo "EOF" >> $GITHUB_OUTPUT | ||||
|  | ||||
|     - name: Determine Gitea URL | ||||
|       id: gitea_url | ||||
|       run: | | ||||
|         echo "Debug Environment:" | ||||
|         echo "GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-not set}" | ||||
|         echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}" | ||||
|         echo "GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-not set}" | ||||
|         echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}" | ||||
|         echo "RUNNER_NAME=${RUNNER_NAME:-not set}" | ||||
|          | ||||
|         # Set API URL based on environment | ||||
|         if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then | ||||
|           GITEA_API_URL="${GITHUB_SERVER_URL}" | ||||
|           GITEA_REPO=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f2) | ||||
|           GITEA_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1) | ||||
|         else | ||||
|           echo "Error: This workflow is only for Gitea" | ||||
|           exit 1 | ||||
|         fi | ||||
|          | ||||
|         echo "GITEA_API_URL=${GITEA_API_URL}" >> $GITHUB_OUTPUT | ||||
|         echo "GITEA_REPO=${GITEA_REPO}" >> $GITHUB_OUTPUT | ||||
|         echo "GITEA_OWNER=${GITEA_OWNER}" >> $GITHUB_OUTPUT | ||||
|  | ||||
|     - name: Create Gitea Release | ||||
|       env: | ||||
|         GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }} | ||||
|         GITEA_API_URL: ${{ steps.gitea_url.outputs.GITEA_API_URL }} | ||||
|         GITEA_REPO: ${{ steps.gitea_url.outputs.GITEA_REPO }} | ||||
|         GITEA_OWNER: ${{ steps.gitea_url.outputs.GITEA_OWNER }} | ||||
|       run: | | ||||
|         # Debug Token (nur Länge ausgeben für Sicherheit) | ||||
|         echo "Debug: Token length: ${#GITEA_TOKEN}" | ||||
|         if [ -z "$GITEA_TOKEN" ]; then | ||||
|           echo "Error: GITEA_TOKEN is empty" | ||||
|           exit 1 | ||||
|         fi | ||||
|  | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|         cd .pio/build/esp32dev | ||||
|          | ||||
|         # Debug-Ausgaben | ||||
|         echo "Debug: API URL: ${GITEA_API_URL}" | ||||
|         echo "Debug: Repository: ${GITEA_OWNER}/${GITEA_REPO}" | ||||
|          | ||||
|         # Prepare files for upload | ||||
|         FILES="" | ||||
|         for file in upgrade_filaman_firmware_v${VERSION}.bin upgrade_filaman_website_v${VERSION}.bin filaman_full_${VERSION}.bin; do | ||||
|           if [ -f "$file" ]; then | ||||
|             FILES="$FILES -a $file" | ||||
|             echo "Debug: Found file: $file" | ||||
|           fi | ||||
|         done | ||||
|  | ||||
|         # Test API connection with explicit token header | ||||
|         echo "Debug: Testing API connection..." | ||||
|         TEST_RESPONSE=$(curl -s -w "\n%{http_code}" \ | ||||
|           -H "Authorization: token ${GITEA_TOKEN}" \ | ||||
|           "${GITEA_API_URL}/api/v1/version") | ||||
|         TEST_STATUS=$(echo "$TEST_RESPONSE" | tail -n1) | ||||
|         if [ "$TEST_STATUS" != "200" ]; then | ||||
|           echo "Error: Cannot connect to Gitea API" | ||||
|           echo "Response: $TEST_RESPONSE" | ||||
|           exit 1 | ||||
|         fi | ||||
|  | ||||
|         # Create release using API | ||||
|         echo "Debug: Creating release..." | ||||
|         RELEASE_DATA="{ | ||||
|           \"tag_name\": \"v${VERSION}\", | ||||
|           \"name\": \"v${VERSION}\", | ||||
|           \"body\": \"${{ steps.changelog.outputs.CHANGES }}\" | ||||
|         }" | ||||
|  | ||||
|         # Create release with explicit token header | ||||
|         RESPONSE=$(curl -s -w "\n%{http_code}" \ | ||||
|           -X POST \ | ||||
|           -H "Authorization: token ${GITEA_TOKEN}" \ | ||||
|           -H "Content-Type: application/json" \ | ||||
|           -d "$RELEASE_DATA" \ | ||||
|           "${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases") | ||||
|          | ||||
|         HTTP_STATUS=$(echo "$RESPONSE" | tail -n1) | ||||
|         RESPONSE_BODY=$(echo "$RESPONSE" | head -n -1) | ||||
|  | ||||
|         echo "Debug: HTTP Status: $HTTP_STATUS" | ||||
|         if [ "$HTTP_STATUS" != "201" ]; then | ||||
|           echo "Error: Failed to create release" | ||||
|           echo "Response: $RESPONSE_BODY" | ||||
|           exit 1 | ||||
| @@ -2,6 +2,13 @@ name: GitHub Release | ||||
| 
 | ||||
| on: | ||||
|   workflow_call: | ||||
|     secrets: | ||||
|       RELEASE_TOKEN: | ||||
|         description: 'GitHub token for release creation' | ||||
|         required: true | ||||
| 
 | ||||
| permissions: | ||||
|   contents: write | ||||
| 
 | ||||
| jobs: | ||||
|   create-release: | ||||
| @@ -10,6 +17,8 @@ jobs: | ||||
|       contents: write | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|       with: | ||||
|         fetch-depth: 0 | ||||
|      | ||||
|     - name: Set up Python | ||||
|       uses: actions/setup-python@v4 | ||||
| @@ -26,6 +35,17 @@ jobs: | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install xxd | ||||
|      | ||||
|     - name: Check for Data changes | ||||
|       id: check_data | ||||
|       run: | | ||||
|         git fetch --unshallow || true | ||||
|         CHANGED_FILES=$(git diff --name-only HEAD^..HEAD) | ||||
|         if echo "$CHANGED_FILES" | grep -q "^data/"; then | ||||
|           echo "DATA_CHANGED=true" >> $GITHUB_OUTPUT | ||||
|         else | ||||
|           echo "DATA_CHANGED=false" >> $GITHUB_OUTPUT | ||||
|         fi | ||||
| 
 | ||||
|     - name: Check for SPIFFS changes | ||||
|       id: check_spiffs | ||||
|       run: | | ||||
| @@ -39,47 +59,41 @@ jobs: | ||||
|      | ||||
|     - name: Build Firmware | ||||
|       run: | | ||||
|         # Get version from platformio.ini | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Always build firmware | ||||
|         # Always build firmware and SPIFFS | ||||
|         echo "Building firmware and SPIFFS..." | ||||
|         pio run -e esp32dev | ||||
|         cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin | ||||
|         pio run -t buildfs | ||||
|          | ||||
|         # Only build SPIFFS if changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Building SPIFFS due to changes..." | ||||
|           cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin | ||||
|         fi | ||||
|         # Copy firmware binary | ||||
|         cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin | ||||
|          | ||||
|     - name: Prepare binaries | ||||
|       run: | | ||||
|         cd .pio/build/esp32dev | ||||
|         VERSION=$(grep '^version = ' ../../platformio.ini | cut -d'"' -f2) | ||||
|         # Always create SPIFFS binary | ||||
|         cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin | ||||
|          | ||||
|         # Create full binary only if SPIFFS changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Creating full binary..." | ||||
|           esptool.py --chip esp32 merge_bin \ | ||||
|             --fill-flash-size 4MB \ | ||||
|             --flash_mode dio \ | ||||
|             --flash_freq 40m \ | ||||
|             --flash_size 4MB \ | ||||
|             -o filaman_full_${VERSION}.bin \ | ||||
|             0x0000 bootloader.bin \ | ||||
|             0x8000 partitions.bin \ | ||||
|             0x10000 firmware.bin \ | ||||
|             0x390000 spiffs.bin | ||||
|         fi | ||||
|         # Create full binary (always) | ||||
|         (cd .pio/build/esp32dev &&  | ||||
|         esptool.py --chip esp32 merge_bin \ | ||||
|           --fill-flash-size 4MB \ | ||||
|           --flash_mode dio \ | ||||
|           --flash_freq 40m \ | ||||
|           --flash_size 4MB \ | ||||
|           -o filaman_full_${VERSION}.bin \ | ||||
|           0x1000 bootloader.bin \ | ||||
|           0x8000 partitions.bin \ | ||||
|           0x10000 firmware.bin \ | ||||
|           0x390000 spiffs.bin) | ||||
|          | ||||
|         # Verify file sizes | ||||
|         echo "File sizes:" | ||||
|         ls -lh *.bin | ||||
|         (cd .pio/build/esp32dev && ls -lh *.bin) | ||||
|      | ||||
|     - name: Get version from tag | ||||
|     - name: Get version from platformio.ini | ||||
|       id: get_version | ||||
|       run: | | ||||
|         echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|         echo "VERSION=$VERSION" >> $GITHUB_OUTPUT | ||||
|        | ||||
|     - name: Read CHANGELOG.md | ||||
|       id: changelog | ||||
| @@ -92,20 +106,22 @@ jobs: | ||||
| 
 | ||||
|     - name: Create GitHub Release | ||||
|       env: | ||||
|         GH_TOKEN: ${{ github.token }} | ||||
|         GH_TOKEN: ${{ secrets.RELEASE_TOKEN }} | ||||
|       run: | | ||||
|         VERSION=${{ steps.get_version.outputs.VERSION }} | ||||
|         cd .pio/build/esp32dev | ||||
|         VERSION=$(grep '^version = ' ../../platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Create release with available files | ||||
|         FILES_TO_UPLOAD="" | ||||
|          | ||||
|         # Always add firmware | ||||
|         if [ -f "filaman_${VERSION}.bin" ]; then | ||||
|           FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_${VERSION}.bin" | ||||
|         if [ -f "upgrade_filaman_firmware_v${VERSION}.bin" ]; then | ||||
|           FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_firmware_v${VERSION}.bin" | ||||
|         fi | ||||
|          | ||||
|         # Add SPIFFS and full binary only if they exist | ||||
|         if [ -f "webpage_${VERSION}.bin" ]; then | ||||
|           FILES_TO_UPLOAD="$FILES_TO_UPLOAD webpage_${VERSION}.bin" | ||||
|         if [ -f "upgrade_filaman_website_v${VERSION}.bin" ]; then | ||||
|           FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_website_v${VERSION}.bin" | ||||
|         fi | ||||
|          | ||||
|         if [ -f "filaman_full_${VERSION}.bin" ]; then | ||||
| @@ -114,8 +130,8 @@ jobs: | ||||
|          | ||||
|         # Create release with available files | ||||
|         if [ -n "$FILES_TO_UPLOAD" ]; then | ||||
|           gh release create "${{ github.ref_name }}" \ | ||||
|             --title "Release ${{ steps.get_version.outputs.VERSION }}" \ | ||||
|           gh release create "v${VERSION}" \ | ||||
|             --title "Release ${VERSION}" \ | ||||
|             --notes "${{ steps.changelog.outputs.CHANGES }}" \ | ||||
|             $FILES_TO_UPLOAD | ||||
|         else | ||||
							
								
								
									
										143
									
								
								.github/workflows/providers/gitea-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										143
									
								
								.github/workflows/providers/gitea-release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,143 +0,0 @@ | ||||
| name: Gitea Release | ||||
|  | ||||
| on: | ||||
|   workflow_call: | ||||
|     inputs: | ||||
|       gitea_ref_name: | ||||
|         required: true | ||||
|         type: string | ||||
|       gitea_server_url: | ||||
|         required: true | ||||
|         type: string | ||||
|       gitea_repository: | ||||
|         required: true | ||||
|         type: string | ||||
|     secrets: | ||||
|       GITEA_TOKEN: | ||||
|         required: true | ||||
|  | ||||
| jobs: | ||||
|   create-release: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|      | ||||
|     - name: Set up Python | ||||
|       uses: actions/setup-python@v4 | ||||
|       with: | ||||
|         python-version: '3.x' | ||||
|      | ||||
|     - name: Install PlatformIO | ||||
|       run: | | ||||
|         python -m pip install --upgrade pip | ||||
|         pip install --upgrade platformio esptool | ||||
|      | ||||
|     - name: Install xxd | ||||
|       run: | | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install xxd | ||||
|      | ||||
|     - name: Check for SPIFFS changes | ||||
|       id: check_spiffs | ||||
|       run: | | ||||
|         git fetch --unshallow || true | ||||
|         CHANGED_FILES=$(git diff --name-only HEAD^..HEAD) | ||||
|         if echo "$CHANGED_FILES" | grep -q "^data/\|^html/"; then | ||||
|           echo "SPIFFS_CHANGED=true" >> $GITHUB_OUTPUT | ||||
|         else | ||||
|           echo "SPIFFS_CHANGED=false" >> $GITHUB_OUTPUT | ||||
|         fi | ||||
|      | ||||
|     - name: Build Firmware | ||||
|       run: | | ||||
|         # Get version from platformio.ini | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Always build firmware | ||||
|         pio run -e esp32dev | ||||
|         cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_${VERSION}.bin | ||||
|          | ||||
|         # Only build SPIFFS if changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Building SPIFFS due to changes..." | ||||
|           cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/webpage_${VERSION}.bin | ||||
|         fi | ||||
|          | ||||
|     - name: Prepare binaries | ||||
|       run: | | ||||
|         cd .pio/build/esp32dev | ||||
|         VERSION=$(grep '^version = ' ../../platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Create full binary only if SPIFFS changed | ||||
|         if [[ "${{ steps.check_spiffs.outputs.SPIFFS_CHANGED }}" == "true" ]]; then | ||||
|           echo "Creating full binary..." | ||||
|           esptool.py --chip esp32 merge_bin \ | ||||
|             --fill-flash-size 4MB \ | ||||
|             --flash_mode dio \ | ||||
|             --flash_freq 40m \ | ||||
|             --flash_size 4MB \ | ||||
|             -o filaman_full_${VERSION}.bin \ | ||||
|             0x0000 bootloader.bin \ | ||||
|             0x8000 partitions.bin \ | ||||
|             0x10000 firmware.bin \ | ||||
|             0x390000 spiffs.bin | ||||
|         fi | ||||
|          | ||||
|         # Verify file sizes | ||||
|         echo "File sizes:" | ||||
|         ls -lh *.bin | ||||
|          | ||||
|     - name: Create Release | ||||
|       env: | ||||
|         TOKEN: ${{ secrets.GITEA_TOKEN }} | ||||
|       run: | | ||||
|         TAG="${{ inputs.gitea_ref_name }}" | ||||
|         API_URL="${{ inputs.gitea_server_url }}/api/v1" | ||||
|         REPO="${{ inputs.gitea_repository }}" | ||||
|         VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2) | ||||
|          | ||||
|         # Create release | ||||
|         RESPONSE=$(curl -k -s \ | ||||
|           -X POST \ | ||||
|           -H "Authorization: token ${TOKEN}" \ | ||||
|           -H "Content-Type: application/json" \ | ||||
|           -d "{ | ||||
|             \"tag_name\":\"${TAG}\", | ||||
|             \"name\":\"Release ${TAG}\", | ||||
|             \"body\":\"${{ steps.changelog.outputs.CHANGES }}\" | ||||
|           }" \ | ||||
|           "${API_URL}/repos/${REPO}/releases") | ||||
|          | ||||
|         RELEASE_ID=$(echo "$RESPONSE" | grep -o '"id":[0-9]*' | cut -d':' -f2 | head -n1) | ||||
|          | ||||
|         if [ -n "$RELEASE_ID" ]; then | ||||
|           echo "Release created with ID: $RELEASE_ID" | ||||
|           cd .pio/build/esp32dev | ||||
|            | ||||
|           # Always upload firmware | ||||
|           if [ -f "filaman_${VERSION}.bin" ]; then | ||||
|             curl -k -s \ | ||||
|               -X POST \ | ||||
|               -H "Authorization: token ${TOKEN}" \ | ||||
|               -H "Content-Type: application/octet-stream" \ | ||||
|               --data-binary "@filaman_${VERSION}.bin" \ | ||||
|               "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=filaman_${VERSION}.bin" | ||||
|           fi | ||||
|            | ||||
|           # Upload SPIFFS and full binary only if they exist | ||||
|           for file in webpage_${VERSION}.bin filaman_full_${VERSION}.bin; do | ||||
|             if [ -f "$file" ]; then | ||||
|               echo "Uploading $file..." | ||||
|               curl -k -s \ | ||||
|                 -X POST \ | ||||
|                 -H "Authorization: token ${TOKEN}" \ | ||||
|                 -H "Content-Type: application/octet-stream" \ | ||||
|                 --data-binary "@$file" \ | ||||
|                 "${API_URL}/repos/${REPO}/releases/${RELEASE_ID}/assets?name=$file" | ||||
|             fi | ||||
|           done | ||||
|         else | ||||
|           echo "Failed to create release. Response:" | ||||
|           echo "$RESPONSE" | ||||
|           exit 1 | ||||
|         fi | ||||
							
								
								
									
										61
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										61
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -5,66 +5,35 @@ on: | ||||
|     tags: | ||||
|       - 'v*' | ||||
|  | ||||
| permissions: | ||||
|   contents: write | ||||
|  | ||||
| jobs: | ||||
|   route: | ||||
|   detect-provider: | ||||
|     runs-on: ubuntu-latest | ||||
|     outputs: | ||||
|       provider: ${{ steps.provider.outputs.provider }} | ||||
|       gitea_ref_name: ${{ steps.provider.outputs.gitea_ref_name }} | ||||
|       gitea_server_url: ${{ steps.provider.outputs.gitea_server_url }} | ||||
|       gitea_repository: ${{ steps.provider.outputs.gitea_repository }} | ||||
|     steps: | ||||
|       - name: Checkout Repository | ||||
|         uses: actions/checkout@v3 | ||||
|  | ||||
|       - name: Debug Environment | ||||
|         run: | | ||||
|           echo "CI Environment Details:" | ||||
|           echo "GITHUB_ACTIONS=${GITHUB_ACTIONS:-not set}" | ||||
|           echo "GITEA_ACTIONS=${GITEA_ACTIONS:-not set}" | ||||
|           echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}" | ||||
|           echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}" | ||||
|           echo "RUNNER_NAME=${RUNNER_NAME:-not set}" | ||||
|  | ||||
|       - name: Determine CI Provider | ||||
|         id: provider | ||||
|         shell: bash | ||||
|         run: | | ||||
|           if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then | ||||
|             echo "provider=gitea" >> "$GITHUB_OUTPUT" | ||||
|             echo "gitea_ref_name=${GITHUB_REF_NAME}" >> "$GITHUB_OUTPUT" | ||||
|             echo "gitea_server_url=${GITHUB_SERVER_URL}" >> "$GITHUB_OUTPUT" | ||||
|             echo "gitea_repository=${GITHUB_REPOSITORY}" >> "$GITHUB_OUTPUT" | ||||
|           elif [ "${GITHUB_ACTIONS}" = "true" ]; then | ||||
|             echo "provider=github" >> "$GITHUB_OUTPUT" | ||||
|           else | ||||
|             echo "provider=unknown" >> "$GITHUB_OUTPUT" | ||||
|           fi | ||||
|  | ||||
|   verify-provider: | ||||
|     needs: route | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - name: Echo detected provider | ||||
|         run: | | ||||
|           echo "Detected CI Provider: ${{ needs.route.outputs.provider }}" | ||||
|           if [ "${{ needs.route.outputs.provider }}" = "unknown" ]; then | ||||
|             echo "::error::Failed to detect CI provider!" | ||||
|             exit 1 | ||||
|             echo "provider=github" >> "$GITHUB_OUTPUT" | ||||
|           fi | ||||
|  | ||||
|   github-release: | ||||
|     needs: [route, verify-provider] | ||||
|     if: needs.route.outputs.provider == 'github' | ||||
|     uses: ./.github/workflows/providers/github-release.yml | ||||
|     needs: detect-provider | ||||
|     permissions: | ||||
|       contents: write | ||||
|     if: needs.detect-provider.outputs.provider == 'github' | ||||
|     uses: ./.github/workflows/github-release.yml | ||||
|     secrets: | ||||
|       RELEASE_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
|  | ||||
|   gitea-release: | ||||
|     needs: [route, verify-provider] | ||||
|     if: needs.route.outputs.provider == 'gitea' | ||||
|     uses: ./.github/workflows/providers/gitea-release.yml | ||||
|     with: | ||||
|       gitea_ref_name: ${{ needs.route.outputs.gitea_ref_name }} | ||||
|       gitea_server_url: ${{ needs.route.outputs.gitea_server_url }} | ||||
|       gitea_repository: ${{ needs.route.outputs.gitea_repository }} | ||||
|     secrets: | ||||
|       GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }} | ||||
|     needs: detect-provider | ||||
|     if: needs.detect-provider.outputs.provider == 'gitea' | ||||
|     uses: ./.github/workflows/gitea-release.yml | ||||
							
								
								
									
										255
									
								
								CHANGELOG.md
									
									
									
									
									
								
							
							
						
						
									
										255
									
								
								CHANGELOG.md
									
									
									
									
									
								
							| @@ -1,5 +1,260 @@ | ||||
| # Changelog | ||||
|  | ||||
| ## [1.3.26] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.26 | ||||
|  | ||||
| ### Fixed | ||||
| - workflow: improve Gitea release workflow with enhanced error handling and debug outputs | ||||
|  | ||||
|  | ||||
| ## [1.3.25] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.25 | ||||
| - workflow: update Gitea release workflow to include RUNNER_NAME and improve error handling | ||||
|  | ||||
|  | ||||
| ## [1.3.24] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.24 | ||||
| - workflow: rename update files to upgrade in GitHub release workflow | ||||
| - workflow: aktualisiere bestehende Einträge im Changelog für vorhandene Versionen | ||||
|  | ||||
| ### Fixed | ||||
| - workflow: improve Gitea release process with dynamic URL determination and debug outputs | ||||
|  | ||||
|  | ||||
| ## [1.3.23] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.23 | ||||
|  | ||||
| ### Fixed | ||||
| - workflow: enhance Gitea release process with debug outputs and API connection checks | ||||
|  | ||||
|  | ||||
| ## [1.3.22] - 2025-02-21 | ||||
| ### Added | ||||
| - workflow: improve Gitea release process with additional environment variables and error handling | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.22 | ||||
|  | ||||
|  | ||||
| ## [1.3.21] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.21 | ||||
| - workflow: enhance Gitea release process with API integration and token management | ||||
|  | ||||
|  | ||||
| ## [1.3.20] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.20 | ||||
| - workflow: enable git tagging and pushing for Gitea releases | ||||
|  | ||||
|  | ||||
| ## [1.3.19] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.19 | ||||
| - workflow: enable git push for version tagging in Gitea release | ||||
|  | ||||
|  | ||||
| ## [1.3.18] - 2025-02-21 | ||||
| ### Changed | ||||
| - ACHTUNG: Installiere einmal das filaman_full.bin danach kannst du über die upgrade Files aktualisieren und deine Settings bleiben auch erhalten. | ||||
| - ATTENTION: Install the filaman_full.bin once, then you can update via the upgrade files and your settings will also be retained. | ||||
|  | ||||
|  | ||||
|  | ||||
| ## [1.3.18] - 2025-02-21 | ||||
| ### Added | ||||
| - add note about filaman_full.bin installation in changelog | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.18 | ||||
| - update changelog for version 1.3.18 and enhance update script for existing entries | ||||
|  | ||||
| ## [1.3.17] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.17 | ||||
| - ci: comment out git tag and push commands in gitea-release workflow | ||||
|  | ||||
|  | ||||
| ## [1.3.16] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.16 | ||||
| - ci: update filenames for firmware and website binaries in release workflows | ||||
|  | ||||
|  | ||||
| ## [1.3.15] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.15 | ||||
|  | ||||
| ### Fixed | ||||
| - ci: fix missing 'fi' in GitHub release workflow script | ||||
|  | ||||
|  | ||||
| ## [1.3.14] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.14 | ||||
| - ci: update GitHub release workflow to improve file upload handling | ||||
|  | ||||
|  | ||||
| ## [1.3.13] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.13 | ||||
| - ci: update GitHub release workflow to use RELEASE_TOKEN for improved security | ||||
|  | ||||
|  | ||||
| ## [1.3.12] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.12 | ||||
| - ci: enhance GitHub release workflow with token handling and file upload improvements | ||||
|  | ||||
|  | ||||
| ## [1.3.11] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.11 | ||||
| - ci: refactor Gitea release workflow by simplifying input handling and removing unnecessary checks | ||||
|  | ||||
|  | ||||
| ## [1.3.10] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.10 | ||||
| - ci: simplify GitHub release workflow by removing provider verification step | ||||
|  | ||||
|  | ||||
| ## [1.3.9] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.9 | ||||
| - ci: comment out permissions for GitHub release workflow | ||||
|  | ||||
|  | ||||
| ## [1.3.8] - 2025-02-21 | ||||
| ### Added | ||||
| - add Gitea and GitHub release workflows | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.8 | ||||
|  | ||||
|  | ||||
| ## [1.3.7] - 2025-02-21 | ||||
| ### Added | ||||
| - add GitHub and Gitea release workflows | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.7 | ||||
|  | ||||
|  | ||||
| ## [1.3.6] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.6 | ||||
|  | ||||
| ### Fixed | ||||
| - update GitHub token reference and correct file path in release workflow | ||||
|  | ||||
|  | ||||
| ## [1.3.5] - 2025-02-21 | ||||
| ### Added | ||||
| - enhance release workflow to support Gitea alongside GitHub | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.5 | ||||
|  | ||||
|  | ||||
| ## [1.3.4] - 2025-02-21 | ||||
| ### Added | ||||
| - add Gitea and GitHub release workflows | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.4 | ||||
| - Merge branch 'old' | ||||
|  | ||||
|  | ||||
| ## [1.3.3] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.3.3 | ||||
|  | ||||
| ### Fixed | ||||
| - correct directory path in GitHub workflows for SPIFFS binary | ||||
|  | ||||
|  | ||||
| ## [1.3.2] - 2025-02-21 | ||||
| ### Added | ||||
| - add missing conditional exit in release workflow | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.2 | ||||
|  | ||||
|  | ||||
| ## [1.3.1] - 2025-02-21 | ||||
| ### Added | ||||
| - enhance GitHub and Gitea release workflows with Python setup and binary preparation | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.3.1 | ||||
|  | ||||
|  | ||||
| ## [1.3.0] - 2025-02-21 | ||||
| ### Changed | ||||
| - bump version to 1.3.0 in platformio.ini | ||||
|  | ||||
|  | ||||
| ## [1.2.102] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.102 | ||||
|  | ||||
| ### Fixed | ||||
| - adjust bootloader offset in binary merge for Gitea and GitHub workflows | ||||
|  | ||||
|  | ||||
| ## [1.2.101] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.101 | ||||
| - always create SPIFFS binary in release workflows | ||||
| - migrate calibration value storage from EEPROM to NVS | ||||
|  | ||||
|  | ||||
| ## [1.2.100] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.100 | ||||
| - remove OTA handling and JSON backup/restore functions | ||||
|  | ||||
|  | ||||
| ## [1.2.99] - 2025-02-21 | ||||
| ### Added | ||||
| - add SPIFFS change detection and binary copying to release workflows | ||||
| - add backup and restore functions for JSON configurations during OTA updates | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.2.99 | ||||
| - update JSON field type checks from JsonObject to String for improved validation | ||||
| - update JSON handling in API and Bambu modules for improved object management | ||||
| - update platformio.ini dependencies and improve version handling in website.cpp | ||||
| - update Cache-Control header to reflect a 1-week duration | ||||
| - remove version definition from website.cpp | ||||
| - optimize WiFi and WebSocket settings; enhance TCP/IP stack configuration | ||||
| - update upgrade page title and heading; adjust cache control duration | ||||
|  | ||||
|  | ||||
| ## [1.2.98] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.98 | ||||
|  | ||||
|  | ||||
| ## [1.2.97] - 2025-02-21 | ||||
| ### Changed | ||||
| - update webpages for version v1.2.97 | ||||
| - streamline Gitea and GitHub release workflows to check for data changes and update binary handling | ||||
|  | ||||
|  | ||||
| ## [1.2.96] - 2025-02-21 | ||||
| ### Added | ||||
| - add SPIFFS build step to Gitea and GitHub release workflows | ||||
|  | ||||
| ### Changed | ||||
| - update webpages for version v1.2.96 | ||||
|  | ||||
|  | ||||
| ## [1.2.95] - 2025-02-21 | ||||
| ### Added | ||||
| - enhance update process with separate forms for firmware and webpage uploads, including validation and improved UI | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| <!DOCTYPE html> | ||||
| <!-- head --><!DOCTYPE html> | ||||
| <html lang="en"> | ||||
| <head> | ||||
|     <meta charset="UTF-8"> | ||||
|     <meta name="viewport" content="width=device-width, initial-scale=1.0"> | ||||
|     <title>FilaMan - Firmware Update</title> | ||||
|     <title>FilaMan - Filament Management Tool</title> | ||||
|     <link rel="icon" type="image/png" href="/favicon.ico"> | ||||
|     <link rel="stylesheet" href="style.css"> | ||||
|     <script> | ||||
|         fetch('/api/version') | ||||
| @@ -49,7 +50,8 @@ | ||||
| <!-- head --> | ||||
|      | ||||
|     <div class="content"> | ||||
|         <h1>System Update</h1> | ||||
|         <h1>Firmware Upgrade</h1> | ||||
|  | ||||
|         <div class="warning"> | ||||
|             <strong>Warning:</strong> Do not power off the device during update. | ||||
|         </div> | ||||
| @@ -79,7 +81,7 @@ | ||||
|         </div> | ||||
|  | ||||
|         <div class="progress-container" style="display: none;"> | ||||
|             <div class="progress-bar">0%</</div> | ||||
|             <div class="progress-bar">0%</div> | ||||
|         </div> | ||||
|         <div class="status"></div> | ||||
|     </div> | ||||
| @@ -104,6 +106,42 @@ | ||||
|             color: #666; | ||||
|             margin-bottom: 1rem; | ||||
|         } | ||||
|         .progress-container { | ||||
|             margin: 20px 0; | ||||
|             background: #f0f0f0; | ||||
|             border-radius: 4px; | ||||
|             overflow: hidden; | ||||
|         } | ||||
|         .progress-bar { | ||||
|             width: 0; | ||||
|             height: 20px; | ||||
|             background: #4CAF50; | ||||
|             transition: width 0.3s ease-in-out; | ||||
|             text-align: center; | ||||
|             line-height: 20px; | ||||
|             color: white; | ||||
|         } | ||||
|         .status { | ||||
|             margin-top: 20px; | ||||
|             padding: 10px; | ||||
|             border-radius: 4px; | ||||
|             display: none; | ||||
|         } | ||||
|         .status.success { | ||||
|             background: #e8f5e9; | ||||
|             color: #2e7d32; | ||||
|         } | ||||
|         .status.error { | ||||
|             background: #ffebee; | ||||
|             color: #c62828; | ||||
|         } | ||||
|         .warning { | ||||
|             background: #fff3e0; | ||||
|             color: #e65100; | ||||
|             padding: 15px; | ||||
|             border-radius: 4px; | ||||
|             margin-bottom: 20px; | ||||
|         } | ||||
|     </style> | ||||
|  | ||||
|     <script> | ||||
| @@ -113,8 +151,13 @@ | ||||
|             statusContainer.style.display = 'none'; | ||||
|         } | ||||
|  | ||||
|         const progress = document.querySelector('.progress-bar'); | ||||
|         const progressContainer = document.querySelector('.progress-container'); | ||||
|         const status = document.querySelector('.status'); | ||||
|  | ||||
|         function handleUpdate(e) { | ||||
|             e.preventDefault(); | ||||
|             const form = e.target; | ||||
|             const file = form.update.files[0]; | ||||
|             const updateType = form.dataset.type; | ||||
|  | ||||
| @@ -132,25 +175,15 @@ | ||||
|                 alert('Please select a valid webpage file (webpage_*.bin)'); | ||||
|                 return; | ||||
|             } | ||||
|  | ||||
|             log(`Selected file: ${file.name} (${file.size} bytes)`); | ||||
|              | ||||
|             // Aktiviere Fortschrittsanzeige | ||||
|             progress.style.display = 'block'; | ||||
|             form.style.display = 'none'; | ||||
|  | ||||
|             // Erstelle FormData für den Upload | ||||
|             const formData = new FormData(); | ||||
|             formData.append('update', file); | ||||
|              | ||||
|             const progress = document.querySelector('.progress-bar'); | ||||
|             const progressContainer = document.querySelector('.progress-container'); | ||||
|             const status = document.querySelector('.status'); | ||||
|              | ||||
|             progressContainer.style.display = 'block'; | ||||
|             status.style.display = 'none'; | ||||
|             status.className = 'status'; | ||||
|  | ||||
|             // Reset progress bar | ||||
|             progress.style.width = '0%'; | ||||
|             progress.textContent = '0%'; | ||||
|  | ||||
|             // Disable both forms during update | ||||
|             document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true); | ||||
|  | ||||
| @@ -215,6 +248,8 @@ | ||||
|                 document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false); | ||||
|             }; | ||||
|  | ||||
|             const formData = new FormData(); | ||||
|             formData.append('update', file); | ||||
|             xhr.send(formData); | ||||
|         } | ||||
|  | ||||
|   | ||||
| @@ -9,7 +9,9 @@ | ||||
| ; https://docs.platformio.org/page/projectconf.html | ||||
|  | ||||
| [common] | ||||
| version = "1.2.95" | ||||
| version = "1.3.26" | ||||
|  | ||||
| #test | ||||
|  | ||||
| [env:esp32dev] | ||||
| platform = espressif32 | ||||
| @@ -19,10 +21,11 @@ monitor_speed = 115200 | ||||
|  | ||||
| lib_deps = | ||||
|     tzapu/WiFiManager @ ^2.0.17 | ||||
|     #https://github.com/me-no-dev/ESPAsyncWebServer.git#master | ||||
|     https://github.com/me-no-dev/ESPAsyncWebServer.git#master | ||||
|     #me-no-dev/AsyncTCP @ ^1.1.1 | ||||
|     mathieucarbou/ESPAsyncWebServer @ ^3.6.0 | ||||
|     esp32async/AsyncTCP @ ^3.3.5 | ||||
|     https://github.com/esphome/AsyncTCP.git | ||||
|     #mathieucarbou/ESPAsyncWebServer @ ^3.6.0 | ||||
|     #esp32async/AsyncTCP @ ^3.3.5 | ||||
|     bogde/HX711 @ ^0.7.5 | ||||
|     adafruit/Adafruit SSD1306 @ ^2.5.13 | ||||
|     adafruit/Adafruit GFX Library @ ^1.11.11 | ||||
| @@ -45,7 +48,7 @@ build_flags = | ||||
|     -fdata-sections | ||||
|     -DNDEBUG | ||||
|     -mtext-section-literals | ||||
|     '-D VERSION="${common.version}"' | ||||
|     -DVERSION=\"${common.version}\" | ||||
|     -DASYNCWEBSERVER_REGEX | ||||
|     -DCORE_DEBUG_LEVEL=3 | ||||
|     -DCONFIG_ARDUHAL_LOG_COLORS=1 | ||||
| @@ -54,6 +57,11 @@ build_flags = | ||||
|     -DCONFIG_ESP32_PANIC_PRINT_REBOOT | ||||
|     -DBOOT_APP_PARTITION_OTA_0=1 | ||||
|     -DCONFIG_LOG_DEFAULT_LEVEL=3 | ||||
|     -DCONFIG_LWIP_TCP_MSL=60000 | ||||
|     -DCONFIG_LWIP_TCP_WND_DEFAULT=8192 | ||||
|     -DCONFIG_LWIP_TCP_SND_BUF_DEFAULT=4096 | ||||
|     -DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096 | ||||
|     -DCONFIG_LWIP_MAX_ACTIVE_TCP=16 | ||||
|      | ||||
| extra_scripts =  | ||||
|     scripts/extra_script.py | ||||
|   | ||||
| @@ -122,7 +122,29 @@ def update_changelog(): | ||||
|                 f.write(updated_content) | ||||
|             push_changes(version) | ||||
|         else: | ||||
|             print(f"Version {version} already exists in changelog") | ||||
|             # Version existiert bereits, aktualisiere die bestehenden Einträge | ||||
|             version_pattern = f"## \\[{version}\\] - \\d{{4}}-\\d{{2}}-\\d{{2}}" | ||||
|             next_version_pattern = "## \\[.*?\\] - \\d{4}-\\d{2}-\\d{2}" | ||||
|              | ||||
|             # Finde den Start der aktuellen Version | ||||
|             version_match = re.search(version_pattern, content) | ||||
|             if version_match: | ||||
|                 version_start = version_match.start() | ||||
|                 # Suche nach der nächsten Version | ||||
|                 next_version_match = re.search(next_version_pattern, content[version_start + 1:]) | ||||
|                  | ||||
|                 if next_version_match: | ||||
|                     # Ersetze den Inhalt zwischen aktueller und nächster Version | ||||
|                     next_version_pos = version_start + 1 + next_version_match.start() | ||||
|                     updated_content = content[:version_start] + changelog_entry + content[next_version_pos:] | ||||
|                 else: | ||||
|                     # Wenn keine nächste Version existiert, ersetze bis zum Ende | ||||
|                     updated_content = content[:version_start] + changelog_entry + "\n" | ||||
|                  | ||||
|                 with open(changelog_path, 'w') as f: | ||||
|                     f.write(updated_content) | ||||
|                 push_changes(version) | ||||
|                 print(f"Updated entries for version {version}") | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     update_changelog() | ||||
							
								
								
									
										22
									
								
								src/api.cpp
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								src/api.cpp
									
									
									
									
									
								
							| @@ -60,10 +60,10 @@ JsonDocument fetchSpoolsForWebsite() { | ||||
|             JsonArray filteredSpools = filteredDoc.to<JsonArray>(); | ||||
|  | ||||
|             for (JsonObject spool : spools) { | ||||
|                 JsonObject filteredSpool = filteredSpools.createNestedObject(); | ||||
|                 JsonObject filteredSpool = filteredSpools.add<JsonObject>(); | ||||
|                 filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"]; | ||||
|  | ||||
|                 JsonObject filament = filteredSpool.createNestedObject("filament"); | ||||
|                 JsonObject filament = filteredSpool["filament"].to<JsonObject>(); | ||||
|                 filament["sm_id"] = spool["id"]; | ||||
|                 filament["id"] = spool["filament"]["id"]; | ||||
|                 filament["name"] = spool["filament"]["name"]; | ||||
| @@ -73,7 +73,7 @@ JsonDocument fetchSpoolsForWebsite() { | ||||
|                 filament["price_meter"] = spool["filament"]["extra"]["price_meter"]; | ||||
|                 filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"]; | ||||
|  | ||||
|                 JsonObject vendor = filament.createNestedObject("vendor"); | ||||
|                 JsonObject vendor = filament["vendor"].to<JsonObject>(); | ||||
|                 vendor["id"] = spool["filament"]["vendor"]["id"]; | ||||
|                 vendor["name"] = spool["filament"]["vendor"]["name"]; | ||||
|             } | ||||
| @@ -110,13 +110,13 @@ JsonDocument fetchAllSpoolsInfo() { | ||||
|             JsonArray filteredSpools = filteredDoc.to<JsonArray>(); | ||||
|  | ||||
|             for (JsonObject spool : spools) { | ||||
|                 JsonObject filteredSpool = filteredSpools.createNestedObject(); | ||||
|                 JsonObject filteredSpool = filteredSpools.add<JsonObject>(); | ||||
|                 filteredSpool["price"] = spool["price"]; | ||||
|                 filteredSpool["remaining_weight"] = spool["remaining_weight"]; | ||||
|                 filteredSpool["used_weight"] = spool["used_weight"]; | ||||
|                 filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"]; | ||||
|  | ||||
|                 JsonObject filament = filteredSpool.createNestedObject("filament"); | ||||
|                 JsonObject filament = filteredSpool["filament"].to<JsonObject>(); | ||||
|                 filament["id"] = spool["filament"]["id"]; | ||||
|                 filament["name"] = spool["filament"]["name"]; | ||||
|                 filament["material"] = spool["filament"]["material"]; | ||||
| @@ -125,11 +125,11 @@ JsonDocument fetchAllSpoolsInfo() { | ||||
|                 filament["spool_weight"] = spool["filament"]["spool_weight"]; | ||||
|                 filament["color_hex"] = spool["filament"]["color_hex"]; | ||||
|  | ||||
|                 JsonObject vendor = filament.createNestedObject("vendor"); | ||||
|                 JsonObject vendor = filament["vendor"].to<JsonObject>(); | ||||
|                 vendor["id"] = spool["filament"]["vendor"]["id"]; | ||||
|                 vendor["name"] = spool["filament"]["vendor"]["name"]; | ||||
|  | ||||
|                 JsonObject extra = filament.createNestedObject("extra"); | ||||
|                 JsonObject extra = filament["extra"].to<JsonObject>(); | ||||
|                 extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"]; | ||||
|                 extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"]; | ||||
|                 extra["price_meter"] = spool["filament"]["extra"]["price_meter"]; | ||||
| @@ -186,7 +186,7 @@ bool updateSpoolTagId(String uidString, const char* payload) { | ||||
|     } | ||||
|      | ||||
|     // Überprüfe, ob die erforderlichen Felder vorhanden sind | ||||
|     if (!doc.containsKey("sm_id") || doc["sm_id"] == "") { | ||||
|     if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") { | ||||
|         Serial.println("Keine Spoolman-ID gefunden."); | ||||
|         return false; | ||||
|     } | ||||
| @@ -368,7 +368,7 @@ bool checkSpoolmanExtraFields() { | ||||
|                 for (uint8_t s = 0; s < extraLength; s++) { | ||||
|                     bool found = false; | ||||
|                     for (JsonObject field : doc.as<JsonArray>()) { | ||||
|                         if (field.containsKey("key") && field["key"] == extraFields[s]) { | ||||
|                         if (field["key"].is<String>() && field["key"] == extraFields[s]) { | ||||
|                             Serial.println("Feld gefunden: " + extraFields[s]); | ||||
|                             found = true; | ||||
|                             break; | ||||
| @@ -430,7 +430,7 @@ bool checkSpoolmanInstance(const String& url) { | ||||
|             String payload = http.getString(); | ||||
|             JsonDocument doc; | ||||
|             DeserializationError error = deserializeJson(doc, payload); | ||||
|             if (!error && doc.containsKey("status")) { | ||||
|             if (!error && doc["status"].is<String>()) { | ||||
|                 const char* status = doc["status"]; | ||||
|                 http.end(); | ||||
|  | ||||
| @@ -469,7 +469,7 @@ bool saveSpoolmanUrl(const String& url) { | ||||
|  | ||||
| String loadSpoolmanUrl() { | ||||
|     JsonDocument doc; | ||||
|     if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) { | ||||
|     if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) { | ||||
|         return doc["url"].as<String>(); | ||||
|     } | ||||
|     Serial.println("Keine gültige Spoolman-URL gefunden."); | ||||
|   | ||||
| @@ -58,7 +58,7 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String | ||||
|  | ||||
| bool loadBambuCredentials() { | ||||
|     JsonDocument doc; | ||||
|     if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) { | ||||
|     if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) { | ||||
|         // Temporäre Strings für die Werte | ||||
|         String ip = doc["bambu_ip"].as<String>(); | ||||
|         String code = doc["bambu_accesscode"].as<String>(); | ||||
| @@ -270,9 +270,9 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|     } | ||||
|  | ||||
|     // Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren | ||||
|     if (doc["print"].containsKey("upgrade_state")) { | ||||
|     if (doc["print"]["upgrade_state"].is<String>()) { | ||||
|         // Prüfen ob AMS-Daten vorhanden sind | ||||
|         if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) { | ||||
|         if (!doc["print"]["ams"].is<String>() || !doc["print"]["ams"]["ams"].is<String>()) { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
| @@ -315,7 +315,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|         } | ||||
|  | ||||
|         // Prüfe die externe Spule | ||||
|         if (!hasChanges && doc["print"].containsKey("vt_tray")) { | ||||
|         if (!hasChanges && doc["print"]["vt_tray"].is<String>()) { | ||||
|             JsonObject vtTray = doc["print"]["vt_tray"]; | ||||
|             bool foundExternal = false; | ||||
|              | ||||
| @@ -363,7 +363,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|         ams_count = amsArray.size(); | ||||
|  | ||||
|         // Wenn externe Spule vorhanden, füge sie hinzu | ||||
|         if (doc["print"].containsKey("vt_tray")) { | ||||
|         if (doc["print"]["vt_tray"].is<String>()) { | ||||
|             JsonObject vtTray = doc["print"]["vt_tray"]; | ||||
|             int extIdx = ams_count;  // Index für externe Spule | ||||
|             ams_data[extIdx].ams_id = 255;  // Spezielle ID für externe Spule | ||||
| @@ -387,14 +387,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|         JsonArray wsArray = wsDoc.to<JsonArray>(); | ||||
|  | ||||
|         for (int i = 0; i < ams_count; i++) { | ||||
|             JsonObject amsObj = wsArray.createNestedObject(); | ||||
|             JsonObject amsObj = wsArray.add<JsonObject>(); | ||||
|             amsObj["ams_id"] = ams_data[i].ams_id; | ||||
|  | ||||
|             JsonArray trays = amsObj.createNestedArray("tray"); | ||||
|             JsonArray trays = amsObj["tray"].to<JsonArray>(); | ||||
|             int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4; | ||||
|              | ||||
|             for (int j = 0; j < maxTrays; j++) { | ||||
|                 JsonObject trayObj = trays.createNestedObject(); | ||||
|                 JsonObject trayObj = trays.add<JsonObject>(); | ||||
|                 trayObj["id"] = ams_data[i].trays[j].id; | ||||
|                 trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx; | ||||
|                 trayObj["tray_type"] = ams_data[i].trays[j].tray_type; | ||||
| @@ -427,14 +427,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) { | ||||
|                 JsonArray wsArray = wsDoc.to<JsonArray>(); | ||||
|  | ||||
|                 for (int j = 0; j < ams_count; j++) { | ||||
|                     JsonObject amsObj = wsArray.createNestedObject(); | ||||
|                     JsonObject amsObj = wsArray.add<JsonObject>(); | ||||
|                     amsObj["ams_id"] = ams_data[j].ams_id; | ||||
|  | ||||
|                     JsonArray trays = amsObj.createNestedArray("tray"); | ||||
|                     JsonArray trays = amsObj["tray"].to<JsonArray>(); | ||||
|                     int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4; | ||||
|                      | ||||
|                     for (int k = 0; k < maxTrays; k++) { | ||||
|                         JsonObject trayObj = trays.createNestedObject(); | ||||
|                         JsonObject trayObj = trays.add<JsonObject>(); | ||||
|                         trayObj["id"] = ams_data[j].trays[k].id; | ||||
|                         trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx; | ||||
|                         trayObj["tray_type"] = ams_data[j].trays[k].tray_type; | ||||
|   | ||||
							
								
								
									
										240
									
								
								src/ota.cpp
									
									
									
									
									
								
							
							
						
						
									
										240
									
								
								src/ota.cpp
									
									
									
									
									
								
							| @@ -1,240 +0,0 @@ | ||||
| #include <Arduino.h> | ||||
| #include "ota.h" | ||||
| #include <Update.h> | ||||
| #include <SPIFFS.h> | ||||
| #include "commonFS.h" | ||||
| #include "bambu.h" | ||||
| #include "scale.h" | ||||
| #include "nfc.h" | ||||
|  | ||||
| #define UPLOAD_TIMEOUT_MS 60000  // 60 Sekunden Timeout für den gesamten Upload | ||||
| #define CHUNK_RESPONSE_TIMEOUT_MS 10000  // 10 Sekunden Timeout pro Chunk | ||||
| #define MAX_FAILED_CHUNKS 3  // Maximale Anzahl fehlgeschlagener Chunks bevor Abbruch | ||||
| #define MAX_FILE_SIZE 4194304    // 4MB Limit | ||||
|  | ||||
| static bool tasksAreStopped = false; | ||||
| static uint32_t lastChunkTime = 0; | ||||
| static size_t failedChunks = 0; | ||||
| static size_t expectedOffset = 0; | ||||
| static size_t totalSize = 0; | ||||
|  | ||||
| void stopAllTasks() { | ||||
|     Serial.println("Stopping RFID Reader"); | ||||
|     if (RfidReaderTask) vTaskSuspend(RfidReaderTask); | ||||
|     Serial.println("Stopping Bambu"); | ||||
|     if (BambuMqttTask) vTaskSuspend(BambuMqttTask); | ||||
|     Serial.println("Stopping Scale"); | ||||
|     if (ScaleTask) vTaskSuspend(ScaleTask); | ||||
|     vTaskDelay(100 / portTICK_PERIOD_MS); | ||||
|     Serial.println("All tasks stopped"); | ||||
| } | ||||
|  | ||||
| void performStageTwo() { | ||||
|     if (!SPIFFS.begin(true)) { | ||||
|         Serial.println("Error: Could not mount SPIFFS for stage 2"); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     File firmwareFile = SPIFFS.open("/firmware.bin", "r"); | ||||
|     if (!firmwareFile) { | ||||
|         Serial.println("Error: Could not open firmware.bin from SPIFFS"); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     size_t firmwareSize = firmwareFile.size(); | ||||
|     size_t maxAppSpace = (ESP.getFreeSketchSpace() - 0x1000) & 0xFFFFF000; | ||||
|  | ||||
|     Serial.printf("Stage 2 - Firmware size: %u bytes\n", firmwareSize); | ||||
|     Serial.printf("Available space: %u bytes\n", maxAppSpace); | ||||
|  | ||||
|     if (firmwareSize > maxAppSpace) { | ||||
|         Serial.printf("Error: Not enough space for firmware. Need %u bytes but only have %u bytes\n",  | ||||
|                     firmwareSize, maxAppSpace); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (!Update.begin(firmwareSize)) { | ||||
|         Update.printError(Serial); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     size_t written = Update.writeStream(firmwareFile); | ||||
|     if (written != firmwareSize) { | ||||
|         Update.printError(Serial); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (!Update.end(true)) { | ||||
|         Update.printError(Serial); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     firmwareFile.close(); | ||||
|     SPIFFS.remove("/firmware.bin"); // Cleanup | ||||
|     Serial.println("Stage 2 update successful, restarting..."); | ||||
|     delay(500); | ||||
|     ESP.restart(); | ||||
| } | ||||
|  | ||||
| void checkForStagedUpdate() { | ||||
|     if (!SPIFFS.begin(true)) { | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (SPIFFS.exists("/firmware.bin")) { | ||||
|         Serial.println("Found staged firmware update, initiating stage 2..."); | ||||
|         performStageTwo(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final) { | ||||
|     static File stagingFile; | ||||
|     static uint32_t uploadStartTime = 0; | ||||
|      | ||||
|     if (!index) { | ||||
|         // Überprüfe Gesamtgröße im Header | ||||
|         if (request->hasHeader("X-Total-Size")) { | ||||
|             totalSize = request->header("X-Total-Size").toInt(); | ||||
|             if (totalSize > MAX_FILE_SIZE) { | ||||
|                 request->send(413, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"File too large\"}"); | ||||
|                 return; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         uploadStartTime = millis(); | ||||
|         lastChunkTime = millis(); | ||||
|         expectedOffset = 0; | ||||
|         failedChunks = 0; | ||||
|          | ||||
|         bool isSpiffsUpdate = filename.endsWith("_spiffs.bin"); | ||||
|         Serial.printf("Update Start: %s (type: %s)\n", filename.c_str(), isSpiffsUpdate ? "SPIFFS" : "OTA"); | ||||
|         Serial.printf("Total size: %u bytes\n", totalSize); | ||||
|          | ||||
|         // Überprüfe Header für Chunk-Informationen | ||||
|         if (request->hasHeader("X-Chunk-Offset")) { | ||||
|             String offsetStr = request->header("X-Chunk-Offset"); | ||||
|             expectedOffset = offsetStr.toInt(); | ||||
|         } | ||||
|  | ||||
|         if (request->contentLength() == 0) { | ||||
|             request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Invalid file size\"}"); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         if (!tasksAreStopped) { | ||||
|             stopAllTasks(); | ||||
|             tasksAreStopped = true; | ||||
|         } | ||||
|  | ||||
|         if (isSpiffsUpdate) { | ||||
|             if (!SPIFFS.begin(true)) { | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Could not mount SPIFFS\"}"); | ||||
|                 return; | ||||
|             } | ||||
|              | ||||
|             if (!Update.begin(totalSize > 0 ? totalSize : request->contentLength(), U_SPIFFS)) { | ||||
|                 Update.printError(Serial); | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"SPIFFS update initialization failed\"}"); | ||||
|                 return; | ||||
|             } | ||||
|         } else { | ||||
|             stagingFile = SPIFFS.open("/firmware.bin", "w"); | ||||
|             if (!stagingFile) { | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Could not create staging file\"}"); | ||||
|                 return; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Chunk Validierung | ||||
|     if (request->hasHeader("X-Chunk-Offset")) { | ||||
|         size_t chunkOffset = request->header("X-Chunk-Offset").toInt(); | ||||
|         if (chunkOffset != expectedOffset) { | ||||
|             failedChunks++; | ||||
|             if (failedChunks >= MAX_FAILED_CHUNKS) { | ||||
|                 if (stagingFile) { | ||||
|                     stagingFile.close(); | ||||
|                     SPIFFS.remove("/firmware.bin"); | ||||
|                 } | ||||
|                 Update.abort(); | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Too many failed chunks\"}"); | ||||
|                 return; | ||||
|             } | ||||
|             request->send(400, "application/json",  | ||||
|                         "{\"status\":\"error\",\"message\":\"Invalid chunk offset\"}"); | ||||
|             return; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Timeout Überprüfungen | ||||
|     uint32_t currentTime = millis(); | ||||
|     if (currentTime - uploadStartTime > UPLOAD_TIMEOUT_MS) { | ||||
|         if (stagingFile) { | ||||
|             stagingFile.close(); | ||||
|             SPIFFS.remove("/firmware.bin"); | ||||
|         } | ||||
|         Update.abort(); | ||||
|         request->send(408, "application/json", "{\"status\":\"error\",\"message\":\"Upload timeout\"}"); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     if (currentTime - lastChunkTime > CHUNK_RESPONSE_TIMEOUT_MS) { | ||||
|         if (stagingFile) { | ||||
|             stagingFile.close(); | ||||
|             SPIFFS.remove("/firmware.bin"); | ||||
|         } | ||||
|         Update.abort(); | ||||
|         request->send(408, "application/json", "{\"status\":\"error\",\"message\":\"Chunk timeout\"}"); | ||||
|         return; | ||||
|     } | ||||
|     lastChunkTime = currentTime; | ||||
|  | ||||
|     if (stagingFile) { | ||||
|         size_t written = stagingFile.write(data, len); | ||||
|         if (written != len) { | ||||
|             stagingFile.close(); | ||||
|             SPIFFS.remove("/firmware.bin"); | ||||
|             request->send(400, "application/json",  | ||||
|                         "{\"status\":\"error\",\"message\":\"Write to SPIFFS failed\"}"); | ||||
|             return; | ||||
|         } | ||||
|     } else { | ||||
|         if (Update.write(data, len) != len) { | ||||
|             Update.printError(Serial); | ||||
|             request->send(400, "application/json",  | ||||
|                         "{\"status\":\"error\",\"message\":\"Write failed\"}"); | ||||
|             return; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     expectedOffset += len; | ||||
|  | ||||
|     if (final) { | ||||
|         if (stagingFile) { | ||||
|             stagingFile.close(); | ||||
|             Serial.println("Stage 1 complete - firmware staged in SPIFFS"); | ||||
|             request->send(200, "application/json",  | ||||
|                         "{\"status\":\"success\",\"message\":\"Update staged successfully! Starting stage 2...\"}"); | ||||
|             delay(100); | ||||
|             performStageTwo(); | ||||
|         } else { | ||||
|             if (!Update.end(true)) { | ||||
|                 Update.printError(Serial); | ||||
|                 request->send(400, "application/json",  | ||||
|                             "{\"status\":\"error\",\"message\":\"Update failed\"}"); | ||||
|                 return; | ||||
|             } | ||||
|             Serial.println("SPIFFS update successful, restarting..."); | ||||
|             request->send(200, "application/json",  | ||||
|                         "{\"status\":\"success\",\"message\":\"SPIFFS update successful! Device will restart...\",\"restart\":true}"); | ||||
|             delay(500); | ||||
|             ESP.restart(); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
							
								
								
									
										15
									
								
								src/ota.h
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								src/ota.h
									
									
									
									
									
								
							| @@ -1,15 +0,0 @@ | ||||
| #ifndef OTA_H | ||||
| #define OTA_H | ||||
|  | ||||
| #include <ESPAsyncWebServer.h> | ||||
|  | ||||
| // Update size unknown constant, falls nicht bereits definiert | ||||
| #ifndef UPDATE_SIZE_UNKNOWN | ||||
| #define UPDATE_SIZE_UNKNOWN 0xFFFFFFFF | ||||
| #endif | ||||
|  | ||||
| void stopAllTasks(); | ||||
| void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final); | ||||
| void checkForStagedUpdate(); | ||||
|  | ||||
| #endif | ||||
| @@ -3,9 +3,9 @@ | ||||
| #include <ArduinoJson.h> | ||||
| #include "config.h" | ||||
| #include "HX711.h" | ||||
| #include <EEPROM.h> | ||||
| #include "display.h" | ||||
| #include "esp_task_wdt.h" | ||||
| #include <Preferences.h> | ||||
|  | ||||
| HX711 scale; | ||||
|  | ||||
| @@ -17,6 +17,10 @@ uint8_t weigthCouterToApi = 0; | ||||
| uint8_t scale_tare_counter = 0; | ||||
| uint8_t pauseMainTask = 0; | ||||
|  | ||||
| Preferences preferences; | ||||
| const char* NVS_NAMESPACE = "scale"; | ||||
| const char* NVS_KEY_CALIBRATION = "cal_value"; | ||||
|  | ||||
| // ##### Funktionen für Waage ##### | ||||
| uint8_t tareScale() { | ||||
|   Serial.println("Tare scale"); | ||||
| @@ -48,13 +52,12 @@ void scale_loop(void * parameter) { | ||||
|  | ||||
| void start_scale() { | ||||
|   Serial.println("Prüfe Calibration Value"); | ||||
|   long calibrationValue; // calibration value (see example file "Calibration.ino") | ||||
|   //calibrationValue = 696.0; // uncomment this if you want to set the calibration value in the sketch | ||||
|   long calibrationValue; | ||||
|  | ||||
|   EEPROM.begin(512); | ||||
|   EEPROM.get(calVal_eepromAdress, calibrationValue); // uncomment this if you want to fetch the calibration value from eeprom | ||||
|  | ||||
|   //calibrationValue = EEPROM.read(calVal_eepromAdress); | ||||
|   // NVS | ||||
|   preferences.begin(NVS_NAMESPACE, true); // true = readonly | ||||
|   calibrationValue = preferences.getLong(NVS_KEY_CALIBRATION, defaultScaleCalibrationValue); | ||||
|   preferences.end(); | ||||
|  | ||||
|   Serial.print("Read Scale Calibration Value "); | ||||
|   Serial.println(calibrationValue); | ||||
| @@ -137,18 +140,19 @@ uint8_t calibrate_scale() { | ||||
|     { | ||||
|       Serial.print("New calibration value has been set to: "); | ||||
|       Serial.println(newCalibrationValue); | ||||
|       Serial.print("Save this value to EEPROM adress "); | ||||
|       Serial.println(calVal_eepromAdress); | ||||
|  | ||||
|       //EEPROM.put(calVal_eepromAdress, newCalibrationValue); | ||||
|       EEPROM.put(calVal_eepromAdress, newCalibrationValue); | ||||
|       EEPROM.commit(); | ||||
|       // Speichern mit NVS | ||||
|       preferences.begin(NVS_NAMESPACE, false); // false = readwrite | ||||
|       preferences.putLong(NVS_KEY_CALIBRATION, newCalibrationValue); | ||||
|       preferences.end(); | ||||
|  | ||||
|       EEPROM.get(calVal_eepromAdress, newCalibrationValue); | ||||
|       //newCalibrationValue = EEPROM.read(calVal_eepromAdress); | ||||
|       // Verifizieren | ||||
|       preferences.begin(NVS_NAMESPACE, true); | ||||
|       long verifyValue = preferences.getLong(NVS_KEY_CALIBRATION, 0); | ||||
|       preferences.end(); | ||||
|  | ||||
|       Serial.print("Read Value "); | ||||
|       Serial.println(newCalibrationValue); | ||||
|       Serial.print("Verified stored value: "); | ||||
|       Serial.println(verifyValue); | ||||
|  | ||||
|       Serial.println("End calibration, revome weight"); | ||||
|  | ||||
|   | ||||
| @@ -7,12 +7,14 @@ | ||||
| #include "nfc.h" | ||||
| #include "scale.h" | ||||
| #include "esp_task_wdt.h" | ||||
| #include "ota.h" | ||||
| #include <Update.h> | ||||
|  | ||||
| #ifndef VERSION | ||||
|   #define VERSION "1.1.0" | ||||
| #endif | ||||
|  | ||||
| // Cache-Control Header definieren | ||||
| #define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr | ||||
| #define VERSION "1.0.0" | ||||
| #define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche | ||||
|  | ||||
| AsyncWebServer server(webserverPort); | ||||
| AsyncWebSocket ws("/ws"); | ||||
| @@ -46,7 +48,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp | ||||
|         } | ||||
|  | ||||
|         else if (doc["type"] == "writeNfcTag") { | ||||
|             if (doc.containsKey("payload")) { | ||||
|             if (doc["payload"].is<String>()) { | ||||
|                 // Versuche NFC-Daten zu schreiben | ||||
|                 String payloadString; | ||||
|                 serializeJson(doc["payload"], payloadString); | ||||
| @@ -153,11 +155,15 @@ void sendNfcData(AsyncWebSocketClient *client) { | ||||
|  | ||||
| void sendAmsData(AsyncWebSocketClient *client) { | ||||
|     if (ams_count > 0) { | ||||
|         ws.textAll("{\"type\":\"amsData\", \"payload\":" + amsJsonData + "}"); | ||||
|         ws.textAll("{\"type\":\"amsData\",\"payload\":" + amsJsonData + "}"); | ||||
|     } | ||||
| } | ||||
|  | ||||
| void setupWebserver(AsyncWebServer &server) { | ||||
|     // WebSocket-Optimierungen | ||||
|     ws.onEvent(onWsEvent); | ||||
|     ws.enable(true); | ||||
|  | ||||
|     // Konfiguriere Server für große Uploads | ||||
|     server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){}); | ||||
|     server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){}); | ||||
| @@ -228,7 +234,7 @@ void setupWebserver(AsyncWebServer &server) { | ||||
|         html.replace("{{spoolmanUrl}}", spoolmanUrl); | ||||
|  | ||||
|         JsonDocument doc; | ||||
|         if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) { | ||||
|         if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) { | ||||
|             String bambuIp = doc["bambu_ip"].as<String>(); | ||||
|             String bambuSerial = doc["bambu_serialnr"].as<String>(); | ||||
|             String bambuCode = doc["bambu_accesscode"].as<String>(); | ||||
| @@ -414,7 +420,8 @@ void setupWebserver(AsyncWebServer &server) { | ||||
|     ); | ||||
|  | ||||
|     server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){ | ||||
|         String jsonResponse = "{\"version\": \"" VERSION "\"}"; | ||||
|         String fm_version = VERSION; | ||||
|         String jsonResponse = "{\"version\": \""+ fm_version +"\"}"; | ||||
|         request->send(200, "application/json", jsonResponse); | ||||
|     }); | ||||
|  | ||||
| @@ -436,26 +443,29 @@ void setupWebserver(AsyncWebServer &server) { | ||||
| } | ||||
|  | ||||
| void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) { | ||||
|     static bool isSpiffsUpdate = false; | ||||
|     if (!index) { | ||||
|         // Start eines neuen Uploads | ||||
|         Serial.println("Update Start: " + filename); | ||||
|          | ||||
|         // Überprüfe den Dateityp basierend auf dem Dateinamen | ||||
|         bool isFirmware = filename.startsWith("filaman_"); | ||||
|         bool isWebpage = filename.startsWith("webpage_"); | ||||
|         isSpiffsUpdate = filename.startsWith("webpage_"); | ||||
|          | ||||
|         if (!isFirmware && !isWebpage) { | ||||
|         if (!isFirmware && !isSpiffsUpdate) { | ||||
|             request->send(400, "application/json", "{\"message\":\"Invalid file type. File must start with 'filaman_' or 'webpage_'\"}"); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         // Wähle den Update-Typ basierend auf dem Dateinamen | ||||
|         if (isWebpage) { | ||||
|         if (isSpiffsUpdate) { | ||||
|             if (!Update.begin(SPIFFS.totalBytes(), U_SPIFFS)) { | ||||
|                 Update.printError(Serial); | ||||
|                 request->send(400, "application/json", "{\"message\":\"SPIFFS Update failed: " + String(Update.errorString()) + "\"}"); | ||||
|                 return; | ||||
|             } | ||||
|             // Backup JSON configs before SPIFFS update | ||||
|             backupJsonConfigs(); | ||||
|         } else { | ||||
|             if (!Update.begin(UPDATE_SIZE_UNKNOWN, U_FLASH)) { | ||||
|                 Update.printError(Serial); | ||||
| @@ -477,8 +487,34 @@ void handleOTAUpload(AsyncWebServerRequest *request, const String& filename, siz | ||||
|             request->send(400, "application/json", "{\"message\":\"Update failed: " + String(Update.errorString()) + "\"}"); | ||||
|             return; | ||||
|         } | ||||
|         if (isSpiffsUpdate) { | ||||
|             // Restore JSON configs after SPIFFS update | ||||
|             restoreJsonConfigs(); | ||||
|         } | ||||
|         request->send(200, "application/json", "{\"message\":\"Update successful!\", \"restart\": true}"); | ||||
|         delay(500); | ||||
|         ESP.restart(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| void backupJsonConfigs() { | ||||
|     const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"}; | ||||
|     for (const char* config : configs) { | ||||
|         if (SPIFFS.exists(config)) { | ||||
|             String backupPath = String(config) + ".bak"; | ||||
|             SPIFFS.remove(backupPath); | ||||
|             SPIFFS.rename(config, backupPath); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| void restoreJsonConfigs() { | ||||
|     const char* configs[] = {"/bambu_credentials.json", "/spoolman_url.json"}; | ||||
|     for (const char* config : configs) { | ||||
|         String backupPath = String(config) + ".bak"; | ||||
|         if (SPIFFS.exists(backupPath)) { | ||||
|             SPIFFS.remove(config); | ||||
|             SPIFFS.rename(backupPath, config); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -29,4 +29,8 @@ void sendNfcData(AsyncWebSocketClient *client); | ||||
| void foundNfcTag(AsyncWebSocketClient *client, uint8_t success); | ||||
| void sendWriteResult(AsyncWebSocketClient *client, uint8_t success); | ||||
|  | ||||
| // Upgrade-Funktionen | ||||
| void backupJsonConfigs(); | ||||
| void restoreJsonConfigs(); | ||||
|  | ||||
| #endif | ||||
|   | ||||
							
								
								
									
										10
									
								
								src/wlan.cpp
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								src/wlan.cpp
									
									
									
									
									
								
							| @@ -10,11 +10,19 @@ WiFiManager wm; | ||||
| bool wm_nonblocking = false; | ||||
|  | ||||
| void initWiFi() { | ||||
|     // Optimierte WiFi-Einstellungen | ||||
|     WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP | ||||
|     WiFi.setSleep(false); // disable sleep mode | ||||
|     esp_wifi_set_ps(WIFI_PS_NONE); | ||||
|      | ||||
|     // Maximale Sendeleistung | ||||
|     WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power | ||||
|    | ||||
|     //esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm | ||||
|     // Optimiere TCP/IP Stack | ||||
|     esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N); | ||||
|      | ||||
|     // Aktiviere WiFi-Roaming für bessere Stabilität | ||||
|     esp_wifi_set_rssi_threshold(-80); | ||||
|    | ||||
|     if(wm_nonblocking) wm.setConfigPortalBlocking(false); | ||||
|     wm.setConfigPortalTimeout(320); // Portal nach 5min schließen | ||||
|   | ||||
		Reference in New Issue
	
	Block a user