Compare commits
2 Commits
v1.3.83
...
920e4cfb69
Author | SHA1 | Date | |
---|---|---|---|
920e4cfb69 | |||
e646edd6f4 |
226
.github/workflows/gitea-release.yml
vendored
226
.github/workflows/gitea-release.yml
vendored
@@ -1,226 +0,0 @@
|
|||||||
name: Gitea Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
secrets:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
description: 'Token für Gitea API-Zugriff'
|
|
||||||
required: true
|
|
||||||
FTP_PASSWORD:
|
|
||||||
description: 'FTP Password for firmware upload'
|
|
||||||
required: true
|
|
||||||
FTP_USER:
|
|
||||||
description: 'FTP User for firmware upload'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.x'
|
|
||||||
|
|
||||||
- name: Install PlatformIO
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install --upgrade platformio esptool
|
|
||||||
|
|
||||||
- name: Install xxd
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install xxd
|
|
||||||
|
|
||||||
- name: Build Firmware
|
|
||||||
run: |
|
|
||||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
|
||||||
|
|
||||||
# Build firmware and SPIFFS
|
|
||||||
echo "Building firmware and SPIFFS..."
|
|
||||||
pio run -e esp32dev
|
|
||||||
pio run -t buildfs
|
|
||||||
|
|
||||||
# Copy firmware binary
|
|
||||||
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
|
|
||||||
|
|
||||||
# Create SPIFFS binary - direct copy without header
|
|
||||||
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
|
|
||||||
|
|
||||||
# Create full binary
|
|
||||||
(cd .pio/build/esp32dev &&
|
|
||||||
esptool.py --chip esp32 merge_bin \
|
|
||||||
--fill-flash-size 4MB \
|
|
||||||
--flash_mode dio \
|
|
||||||
--flash_freq 40m \
|
|
||||||
--flash_size 4MB \
|
|
||||||
-o filaman_full_${VERSION}.bin \
|
|
||||||
0x1000 bootloader.bin \
|
|
||||||
0x8000 partitions.bin \
|
|
||||||
0x10000 firmware.bin \
|
|
||||||
0x3D0000 spiffs.bin)
|
|
||||||
|
|
||||||
# Verify file sizes
|
|
||||||
echo "File sizes:"
|
|
||||||
(cd .pio/build/esp32dev && ls -lh *.bin)
|
|
||||||
|
|
||||||
- name: Get version from platformio.ini
|
|
||||||
id: get_version
|
|
||||||
run: |
|
|
||||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
|
||||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Generate Release Notes
|
|
||||||
id: release_notes
|
|
||||||
run: |
|
|
||||||
# Get the latest tag
|
|
||||||
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
|
|
||||||
|
|
||||||
if [ -n "$LATEST_TAG" ]; then
|
|
||||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Get all commits since last release with commit hash and author
|
|
||||||
echo "### Added" >> $GITHUB_OUTPUT
|
|
||||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
|
||||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Changed" >> $GITHUB_OUTPUT
|
|
||||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
# First release
|
|
||||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
echo "Initial Release" >> $GITHUB_OUTPUT
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Add all commits for initial release
|
|
||||||
echo "### Added" >> $GITHUB_OUTPUT
|
|
||||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
|
||||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Changed" >> $GITHUB_OUTPUT
|
|
||||||
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Determine Gitea URL
|
|
||||||
id: gitea_url
|
|
||||||
run: |
|
|
||||||
echo "Debug Environment:"
|
|
||||||
echo "GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-not set}"
|
|
||||||
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
|
|
||||||
echo "GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-not set}"
|
|
||||||
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
|
|
||||||
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
|
|
||||||
|
|
||||||
# Set API URL based on environment
|
|
||||||
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
|
|
||||||
GITEA_API_URL="${GITHUB_SERVER_URL}"
|
|
||||||
GITEA_REPO=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f2)
|
|
||||||
GITEA_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1)
|
|
||||||
else
|
|
||||||
echo "Error: This workflow is only for Gitea"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "GITEA_API_URL=${GITEA_API_URL}" >> $GITHUB_OUTPUT
|
|
||||||
echo "GITEA_REPO=${GITEA_REPO}" >> $GITHUB_OUTPUT
|
|
||||||
echo "GITEA_OWNER=${GITEA_OWNER}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Create Gitea Release
|
|
||||||
env:
|
|
||||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
|
||||||
GITEA_API_URL: ${{ steps.gitea_url.outputs.GITEA_API_URL }}
|
|
||||||
GITEA_REPO: ${{ steps.gitea_url.outputs.GITEA_REPO }}
|
|
||||||
GITEA_OWNER: ${{ steps.gitea_url.outputs.GITEA_OWNER }}
|
|
||||||
run: |
|
|
||||||
# Debug Token (nur Länge ausgeben für Sicherheit)
|
|
||||||
echo "Debug: Token length: ${#GITEA_TOKEN}"
|
|
||||||
if [ -z "$GITEA_TOKEN" ]; then
|
|
||||||
echo "Error: GITEA_TOKEN is empty"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
VERSION=${{ steps.get_version.outputs.VERSION }}
|
|
||||||
cd .pio/build/esp32dev
|
|
||||||
|
|
||||||
# Debug-Ausgaben
|
|
||||||
echo "Debug: API URL: ${GITEA_API_URL}"
|
|
||||||
echo "Debug: Repository: ${GITEA_OWNER}/${GITEA_REPO}"
|
|
||||||
|
|
||||||
# Erstelle zuerst den Release ohne Dateien
|
|
||||||
echo "Debug: Creating release..."
|
|
||||||
RELEASE_DATA="{\"tag_name\":\"v${VERSION}\",\"name\":\"v${VERSION}\",\"body\":\"${{ steps.release_notes.outputs.CHANGES }}\"}"
|
|
||||||
|
|
||||||
RELEASE_RESPONSE=$(curl -s -w "\n%{http_code}" \
|
|
||||||
-X POST \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "${RELEASE_DATA}" \
|
|
||||||
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases")
|
|
||||||
|
|
||||||
RELEASE_STATUS=$(echo "$RELEASE_RESPONSE" | tail -n1)
|
|
||||||
RELEASE_BODY=$(echo "$RELEASE_RESPONSE" | head -n -1)
|
|
||||||
|
|
||||||
if [ "$RELEASE_STATUS" != "201" ]; then
|
|
||||||
echo "Error: Failed to create release"
|
|
||||||
echo "Response: $RELEASE_BODY"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Extrahiere die Release-ID aus der Antwort
|
|
||||||
RELEASE_ID=$(echo "$RELEASE_BODY" | grep -o '"id":[0-9]*' | cut -d':' -f2)
|
|
||||||
|
|
||||||
# Lade die Dateien einzeln hoch
|
|
||||||
for file in upgrade_filaman_firmware_v${VERSION}.bin upgrade_filaman_website_v${VERSION}.bin filaman_full_${VERSION}.bin; do
|
|
||||||
if [ -f "$file" ]; then
|
|
||||||
echo "Debug: Uploading $file..."
|
|
||||||
UPLOAD_RESPONSE=$(curl -s -w "\n%{http_code}" \
|
|
||||||
-X POST \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
-H "Content-Type: application/octet-stream" \
|
|
||||||
--data-binary @"$file" \
|
|
||||||
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases/${RELEASE_ID}/assets?name=${file}")
|
|
||||||
|
|
||||||
UPLOAD_STATUS=$(echo "$UPLOAD_RESPONSE" | tail -n1)
|
|
||||||
if [ "$UPLOAD_STATUS" != "201" ]; then
|
|
||||||
echo "Warning: Failed to upload $file"
|
|
||||||
echo "Response: $(echo "$UPLOAD_RESPONSE" | head -n -1)"
|
|
||||||
else
|
|
||||||
echo "Successfully uploaded $file"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Install lftp
|
|
||||||
run: sudo apt-get install -y lftp
|
|
||||||
|
|
||||||
- name: Upload Firmware via FTP
|
|
||||||
if: success()
|
|
||||||
env:
|
|
||||||
FTP_PASSWORD: ${{ secrets.FTP_PASSWORD }}
|
|
||||||
FTP_USER: ${{ secrets.FTP_USER }}
|
|
||||||
VERSION: ${{ steps.get_version.outputs.VERSION }}
|
|
||||||
run: |
|
|
||||||
cd .pio/build/esp32dev
|
|
||||||
lftp -c 'set ssl:verify-certificate no; \
|
|
||||||
set ftp:ssl-protect-data true; \
|
|
||||||
set ftp:ssl-force true; \
|
|
||||||
set ssl:check-hostname false; \
|
|
||||||
set ftp:ssl-auth TLS; \
|
|
||||||
open -u '$FTP_USER','$FTP_PASSWORD' ftp://filaman.app:21; \
|
|
||||||
put -O / filaman_full_'$VERSION'.bin -o filaman_full.bin'
|
|
152
.github/workflows/github-release.yml
vendored
152
.github/workflows/github-release.yml
vendored
@@ -1,152 +0,0 @@
|
|||||||
name: GitHub Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
secrets:
|
|
||||||
RELEASE_TOKEN:
|
|
||||||
description: 'GitHub token for release creation'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.x'
|
|
||||||
|
|
||||||
- name: Install PlatformIO
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install --upgrade platformio esptool
|
|
||||||
|
|
||||||
- name: Install xxd
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install xxd
|
|
||||||
|
|
||||||
- name: Build Firmware
|
|
||||||
run: |
|
|
||||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
|
||||||
|
|
||||||
# Always build firmware and SPIFFS
|
|
||||||
echo "Building firmware and SPIFFS..."
|
|
||||||
pio run -e esp32dev
|
|
||||||
pio run -t buildfs
|
|
||||||
|
|
||||||
# Copy firmware binary
|
|
||||||
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
|
|
||||||
|
|
||||||
# Create SPIFFS binary - direct copy without header
|
|
||||||
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
|
|
||||||
|
|
||||||
# Create full binary (always)
|
|
||||||
(cd .pio/build/esp32dev &&
|
|
||||||
esptool.py --chip esp32 merge_bin \
|
|
||||||
--fill-flash-size 4MB \
|
|
||||||
--flash_mode dio \
|
|
||||||
--flash_freq 40m \
|
|
||||||
--flash_size 4MB \
|
|
||||||
-o filaman_full_${VERSION}.bin \
|
|
||||||
0x1000 bootloader.bin \
|
|
||||||
0x8000 partitions.bin \
|
|
||||||
0x10000 firmware.bin \
|
|
||||||
0x3D0000 spiffs.bin)
|
|
||||||
|
|
||||||
# Verify file sizes
|
|
||||||
echo "File sizes:"
|
|
||||||
(cd .pio/build/esp32dev && ls -lh *.bin)
|
|
||||||
|
|
||||||
- name: Get version from platformio.ini
|
|
||||||
id: get_version
|
|
||||||
run: |
|
|
||||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
|
||||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Generate Release Notes
|
|
||||||
id: release_notes
|
|
||||||
run: |
|
|
||||||
# Get the latest tag
|
|
||||||
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
|
|
||||||
|
|
||||||
if [ -n "$LATEST_TAG" ]; then
|
|
||||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Get all commits since last release with commit hash and author
|
|
||||||
echo "### Added" >> $GITHUB_OUTPUT
|
|
||||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
|
||||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Changed" >> $GITHUB_OUTPUT
|
|
||||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
# First release
|
|
||||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
echo "Initial Release" >> $GITHUB_OUTPUT
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Add all commits for initial release
|
|
||||||
echo "### Added" >> $GITHUB_OUTPUT
|
|
||||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
|
||||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo "### Changed" >> $GITHUB_OUTPUT
|
|
||||||
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Create GitHub Release
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
|
||||||
run: |
|
|
||||||
VERSION=${{ steps.get_version.outputs.VERSION }}
|
|
||||||
cd .pio/build/esp32dev
|
|
||||||
|
|
||||||
# Create release with available files
|
|
||||||
FILES_TO_UPLOAD=""
|
|
||||||
|
|
||||||
# Always add firmware
|
|
||||||
if [ -f "upgrade_filaman_firmware_v${VERSION}.bin" ]; then
|
|
||||||
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_firmware_v${VERSION}.bin"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Add SPIFFS and full binary only if they exist
|
|
||||||
if [ -f "upgrade_filaman_website_v${VERSION}.bin" ]; then
|
|
||||||
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_website_v${VERSION}.bin"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f "filaman_full_${VERSION}.bin" ]; then
|
|
||||||
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_full_${VERSION}.bin"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create release with available files
|
|
||||||
if [ -n "$FILES_TO_UPLOAD" ]; then
|
|
||||||
gh release create "v${VERSION}" \
|
|
||||||
--title "Release ${VERSION}" \
|
|
||||||
--notes "${{ steps.release_notes.outputs.CHANGES }}" \
|
|
||||||
$FILES_TO_UPLOAD
|
|
||||||
else
|
|
||||||
echo "Error: No files found to upload"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
103
.github/workflows/providers/gitea-release.yml
vendored
Normal file
103
.github/workflows/providers/gitea-release.yml
vendored
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
name: Gitea Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # Required for creating releases
|
||||||
|
issues: read # Required for reading changelog
|
||||||
|
pull-requests: read # Required for reading changelog
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
|
||||||
|
- name: Install PlatformIO
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install --upgrade platformio
|
||||||
|
|
||||||
|
- name: Build Firmware
|
||||||
|
run: |
|
||||||
|
pio run -t buildfs # Build SPIFFS
|
||||||
|
pio run # Build firmware
|
||||||
|
|
||||||
|
- name: Install esptool
|
||||||
|
run: |
|
||||||
|
pip install esptool
|
||||||
|
|
||||||
|
- name: Merge firmware and SPIFFS
|
||||||
|
run: |
|
||||||
|
esptool.py --chip esp32 merge_bin \
|
||||||
|
--flash_mode dio \
|
||||||
|
--flash_freq 40m \
|
||||||
|
--flash_size 4MB \
|
||||||
|
-o .pio/build/esp32dev/filaman_full.bin \
|
||||||
|
0x1000 .pio/build/esp32dev/bootloader.bin \
|
||||||
|
0x8000 .pio/build/esp32dev/partitions.bin \
|
||||||
|
0x10000 .pio/build/esp32dev/firmware.bin \
|
||||||
|
0x290000 .pio/build/esp32dev/spiffs.bin
|
||||||
|
|
||||||
|
- name: Prepare OTA firmware
|
||||||
|
run: |
|
||||||
|
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Read CHANGELOG.md
|
||||||
|
id: changelog
|
||||||
|
run: |
|
||||||
|
CHANGELOG=$(awk "/## \\[${{ steps.get_version.outputs.VERSION }}\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
|
||||||
|
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "$CHANGELOG" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
env:
|
||||||
|
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
GITEA_API_URL: ${{ secrets.GITEA_API_URL }}
|
||||||
|
GITEA_REPOSITORY: ${{ secrets.GITEA_REPOSITORY }}
|
||||||
|
run: |
|
||||||
|
# Create release using Gitea API
|
||||||
|
RESPONSE=$(curl -X POST \
|
||||||
|
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "accept: application/json" \
|
||||||
|
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases" \
|
||||||
|
-d '{
|
||||||
|
"tag_name": "${{ github.ref_name }}",
|
||||||
|
"name": "Release ${{ steps.get_version.outputs.VERSION }}",
|
||||||
|
"body": "${{ steps.changelog.outputs.CHANGES }}",
|
||||||
|
"draft": false,
|
||||||
|
"prerelease": false
|
||||||
|
}')
|
||||||
|
|
||||||
|
# Extract release ID from response
|
||||||
|
RELEASE_ID=$(echo $RESPONSE | jq -r .id)
|
||||||
|
|
||||||
|
# Upload full firmware
|
||||||
|
curl -X POST \
|
||||||
|
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases/${RELEASE_ID}/assets?name=filaman_full.bin" \
|
||||||
|
--data-binary @.pio/build/esp32dev/filaman_full.bin
|
||||||
|
|
||||||
|
# Upload OTA firmware
|
||||||
|
curl -X POST \
|
||||||
|
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases/${RELEASE_ID}/assets?name=filaman_ota.bin" \
|
||||||
|
--data-binary @.pio/build/esp32dev/filaman_ota.bin
|
85
.github/workflows/providers/github-release.yml
vendored
Normal file
85
.github/workflows/providers/github-release.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
name: GitHub Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # Required for creating releases
|
||||||
|
issues: read # Required for reading changelog
|
||||||
|
pull-requests: read # Required for reading changelog
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write # Required for creating releases at job level
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
|
||||||
|
- name: Install PlatformIO
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install --upgrade platformio
|
||||||
|
|
||||||
|
- name: Build Firmware
|
||||||
|
run: |
|
||||||
|
pio run -t buildfs # Build SPIFFS
|
||||||
|
pio run # Build firmware
|
||||||
|
|
||||||
|
- name: Install esptool
|
||||||
|
run: |
|
||||||
|
pip install esptool
|
||||||
|
|
||||||
|
- name: Merge firmware and SPIFFS
|
||||||
|
run: |
|
||||||
|
esptool.py --chip esp32 merge_bin \
|
||||||
|
--flash_mode dio \
|
||||||
|
--flash_freq 40m \
|
||||||
|
--flash_size 4MB \
|
||||||
|
-o .pio/build/esp32dev/filaman_full.bin \
|
||||||
|
0x1000 .pio/build/esp32dev/bootloader.bin \
|
||||||
|
0x8000 .pio/build/esp32dev/partitions.bin \
|
||||||
|
0x10000 .pio/build/esp32dev/firmware.bin \
|
||||||
|
0x290000 .pio/build/esp32dev/spiffs.bin
|
||||||
|
|
||||||
|
- name: Prepare OTA firmware
|
||||||
|
run: |
|
||||||
|
# Use PlatformIO to create a proper OTA image
|
||||||
|
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Read CHANGELOG.md
|
||||||
|
id: changelog
|
||||||
|
run: |
|
||||||
|
CHANGELOG=$(awk "/## \\[${{ steps.get_version.outputs.VERSION }}\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
|
||||||
|
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "$CHANGELOG" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Install and Configure GitHub CLI
|
||||||
|
run: |
|
||||||
|
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||||
|
&& sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||||
|
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||||
|
&& sudo apt update \
|
||||||
|
&& sudo apt install gh -y
|
||||||
|
|
||||||
|
- name: Create Release with GitHub CLI
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh release create "${{ github.ref_name }}" \
|
||||||
|
--title "Release ${{ steps.get_version.outputs.VERSION }}" \
|
||||||
|
--notes "${{ steps.changelog.outputs.CHANGES }}" \
|
||||||
|
".pio/build/esp32dev/filaman_full.bin#filaman_full.bin" \
|
||||||
|
".pio/build/esp32dev/filaman_ota.bin#filaman_ota.bin"
|
45
.github/workflows/release.yml
vendored
45
.github/workflows/release.yml
vendored
@@ -1,41 +1,32 @@
|
|||||||
name: Release Workflow
|
name: Release
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- 'v*'
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
detect-provider:
|
detect-and-run:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
|
||||||
provider: ${{ steps.provider.outputs.provider }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Determine CI Provider
|
- name: Checkout code
|
||||||
id: provider
|
uses: actions/checkout@v4
|
||||||
shell: bash
|
|
||||||
|
- name: Determine hosting platform
|
||||||
|
id: platform
|
||||||
run: |
|
run: |
|
||||||
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
|
if [[ "$GITHUB_SERVER_URL" == "https://github.com" ]]; then
|
||||||
echo "provider=gitea" >> "$GITHUB_OUTPUT"
|
echo "platform=github" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "$CI_SERVER_URL" == *"gitlab"* ]]; then
|
||||||
|
echo "platform=gitlab" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "provider=github" >> "$GITHUB_OUTPUT"
|
echo "platform=gitea" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
github-release:
|
- name: Run GitHub Release
|
||||||
needs: detect-provider
|
if: steps.platform.outputs.platform == 'github'
|
||||||
permissions:
|
uses: ./.github/workflows/providers/github-release.yml
|
||||||
contents: write
|
|
||||||
if: needs.detect-provider.outputs.provider == 'github'
|
|
||||||
uses: ./.github/workflows/github-release.yml
|
|
||||||
secrets:
|
|
||||||
RELEASE_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
gitea-release:
|
- name: Run Gitea Release
|
||||||
needs: detect-provider
|
if: steps.platform.outputs.platform == 'gitea'
|
||||||
if: needs.detect-provider.outputs.provider == 'gitea'
|
uses: ./.github/workflows/providers/gitea-release.yml
|
||||||
uses: ./.github/workflows/gitea-release.yml
|
|
||||||
secrets:
|
|
||||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
|
1183
CHANGELOG.md
1183
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
16
README.de.md
16
README.de.md
@@ -53,14 +53,14 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
|
|||||||
### Komponenten
|
### Komponenten
|
||||||
- **ESP32 Entwicklungsboard:** Jede ESP32-Variante.
|
- **ESP32 Entwicklungsboard:** Jede ESP32-Variante.
|
||||||
[Amazon Link](https://amzn.eu/d/aXThslf)
|
[Amazon Link](https://amzn.eu/d/aXThslf)
|
||||||
- **HX711 5kg Wägezellen-Verstärker:** Für Gewichtsmessung.
|
- **HX711 Wägezellen-Verstärker:** Für Gewichtsmessung.
|
||||||
[Amazon Link](https://amzn.eu/d/06A0DLb)
|
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
|
||||||
- **OLED 0.96 Zoll I2C weiß/gelb Display:** 128x64 SSD1306.
|
- **OLED Display:** 128x64 SSD1306.
|
||||||
[Amazon Link](https://amzn.eu/d/0AuBp2c)
|
[Amazon Link](https://amzn.eu/d/dozAYDU)
|
||||||
- **PN532 NFC NXP RFID-Modul V3:** Für NFC-Tag-Operationen.
|
- **PN532 NFC Modul:** Für NFC-Tag-Operationen.
|
||||||
[Amazon Link](https://amzn.eu/d/jfIuQXb)
|
[Amazon Link](https://amzn.eu/d/8205DDh)
|
||||||
- **NFC Tags Ntag215:** RFID Tag
|
- **NFC-Tag:** NTAG215
|
||||||
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
|
[Amazon Link](https://amzn.eu/d/fywy4c4)
|
||||||
|
|
||||||
### Pin-Konfiguration
|
### Pin-Konfiguration
|
||||||
| Komponente | ESP32 Pin |
|
| Komponente | ESP32 Pin |
|
||||||
|
34
README.md
34
README.md
@@ -56,14 +56,14 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
|
|||||||
### Components
|
### Components
|
||||||
- **ESP32 Development Board:** Any ESP32 variant.
|
- **ESP32 Development Board:** Any ESP32 variant.
|
||||||
[Amazon Link](https://amzn.eu/d/aXThslf)
|
[Amazon Link](https://amzn.eu/d/aXThslf)
|
||||||
- **HX711 5kg Load Cell Amplifier:** For weight measurement.
|
- **HX711 Load Cell Amplifier:** For weight measurement.
|
||||||
[Amazon Link](https://amzn.eu/d/06A0DLb)
|
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
|
||||||
- **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
|
- **OLED Display:** 128x64 SSD1306.
|
||||||
[Amazon Link](https://amzn.eu/d/0AuBp2c)
|
[Amazon Link](https://amzn.eu/d/dozAYDU)
|
||||||
- **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
|
- **PN532 NFC Module:** For NFC tag operations.
|
||||||
[Amazon Link](https://amzn.eu/d/jfIuQXb)
|
[Amazon Link](https://amzn.eu/d/8205DDh)
|
||||||
- **NFC Tags Ntag215:** RFID Tag
|
- **NFC-Tag:** NTAG215
|
||||||
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
|
[Amazon Link](https://amzn.eu/d/fywy4c4)
|
||||||
|
|
||||||
|
|
||||||
### Pin Configuration
|
### Pin Configuration
|
||||||
@@ -124,24 +124,6 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
|
|||||||
- Configure WiFi settings through the captive portal.
|
- Configure WiFi settings through the captive portal.
|
||||||
- Access the web interface at `http://filaman.local` or the IP address.
|
- Access the web interface at `http://filaman.local` or the IP address.
|
||||||
|
|
||||||
## GitHub Actions Configuration
|
|
||||||
|
|
||||||
### Required Secrets for Gitea Releases
|
|
||||||
|
|
||||||
When using Gitea as your repository host, you need to configure the following secrets in your repository:
|
|
||||||
|
|
||||||
- `GITEA_API_URL`: The base URL of your Gitea instance, including protocol (e.g., `https://git.example.com`)
|
|
||||||
- `GITEA_TOKEN`: Your Gitea access token with permissions to create releases
|
|
||||||
- `GITEA_REPOSITORY`: The repository name in format `owner/repo` (e.g., `username/filaman`)
|
|
||||||
|
|
||||||
Example values:
|
|
||||||
```
|
|
||||||
GITEA_API_URL=https://git.example.com
|
|
||||||
GITEA_TOKEN=abcdef1234567890
|
|
||||||
GITEA_REPOSITORY=username/filaman
|
|
||||||
```
|
|
||||||
|
|
||||||
Make sure to set these secrets in your repository settings under Settings > Secrets and Variables > Actions.
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -1051,10 +1051,9 @@ input[type="submit"]:disabled,
|
|||||||
}
|
}
|
||||||
.update-form {
|
.update-form {
|
||||||
background: var(--primary-color);
|
background: var(--primary-color);
|
||||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.05);
|
|
||||||
border: var(--glass-border);
|
|
||||||
padding: 20px;
|
padding: 20px;
|
||||||
border-radius: 8px;
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
width: 400px;
|
width: 400px;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
@@ -1065,7 +1064,7 @@ input[type="submit"]:disabled,
|
|||||||
padding: 8px;
|
padding: 8px;
|
||||||
border: 1px solid #ddd;
|
border: 1px solid #ddd;
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
background-color: #4CAF50;
|
background: white;
|
||||||
}
|
}
|
||||||
.update-form input[type="submit"] {
|
.update-form input[type="submit"] {
|
||||||
background-color: #4CAF50;
|
background-color: #4CAF50;
|
||||||
@@ -1087,66 +1086,10 @@ input[type="submit"]:disabled,
|
|||||||
.warning {
|
.warning {
|
||||||
background-color: var(--primary-color);
|
background-color: var(--primary-color);
|
||||||
border: 1px solid #ffe0b2;
|
border: 1px solid #ffe0b2;
|
||||||
|
color: white;
|
||||||
|
padding: 15px;
|
||||||
margin: 20px auto;
|
margin: 20px auto;
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
max-width: 600px;
|
max-width: 600px;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
color: #e65100;
|
|
||||||
padding: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.update-options {
|
|
||||||
display: flex;
|
|
||||||
gap: 2rem;
|
|
||||||
margin: 2rem 0;
|
|
||||||
}
|
|
||||||
.update-section {
|
|
||||||
flex: 1;
|
|
||||||
background: var(--background-green);
|
|
||||||
padding: 1.5rem;
|
|
||||||
border-radius: 8px;
|
|
||||||
}
|
|
||||||
.update-section h2 {
|
|
||||||
margin-top: 0;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
.update-section p {
|
|
||||||
color: #666;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
.progress-container {
|
|
||||||
margin: 20px 0;
|
|
||||||
background: #f0f0f0;
|
|
||||||
border-radius: 4px;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
.progress-bar {
|
|
||||||
width: 0;
|
|
||||||
height: 20px;
|
|
||||||
background: #4CAF50;
|
|
||||||
transition: width 0.3s ease-in-out;
|
|
||||||
text-align: center;
|
|
||||||
line-height: 20px;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
.status {
|
|
||||||
margin-top: 20px;
|
|
||||||
padding: 10px;
|
|
||||||
border-radius: 4px;
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
.status.success {
|
|
||||||
background: #e8f5e9;
|
|
||||||
color: #2e7d32;
|
|
||||||
}
|
|
||||||
.status.error {
|
|
||||||
background: #ffebee;
|
|
||||||
color: #c62828;
|
|
||||||
}
|
|
||||||
.warning {
|
|
||||||
background: #fff3e0;
|
|
||||||
color: #e65100;
|
|
||||||
padding: 15px;
|
|
||||||
border-radius: 4px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
}
|
}
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -51,34 +40,18 @@
|
|||||||
<h1>Firmware Upgrade</h1>
|
<h1>Firmware Upgrade</h1>
|
||||||
|
|
||||||
<div class="warning">
|
<div class="warning">
|
||||||
<strong>Warning:</strong> Do not power off the device during update.
|
<strong>Warning:</strong> Please do not turn off or restart the device during the update.
|
||||||
|
The device will restart automatically after the update.
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="update-options">
|
|
||||||
<div class="update-section">
|
|
||||||
<h2>Firmware Update</h2>
|
|
||||||
<p>Upload a new firmware file (filaman_*.bin)</p>
|
|
||||||
<div class="update-form">
|
<div class="update-form">
|
||||||
<form id="firmwareForm" enctype='multipart/form-data' data-type="firmware">
|
<form id="updateForm" enctype='multipart/form-data'>
|
||||||
<input type='file' name='update' accept='.bin' required>
|
<input type='file' name='update' accept='.bin' required>
|
||||||
<input type='submit' value='Start Firmware Update'>
|
<input type='submit' value='Start Firmware Update'>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="update-section">
|
<div class="progress-container">
|
||||||
<h2>Webpage Update</h2>
|
|
||||||
<p>Upload a new webpage file (webpage_*.bin)</p>
|
|
||||||
<div class="update-form">
|
|
||||||
<form id="webpageForm" enctype='multipart/form-data' data-type="webpage">
|
|
||||||
<input type='file' name='update' accept='.bin' required>
|
|
||||||
<input type='submit' value='Start Webpage Update'>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="progress-container" style="display: none;">
|
|
||||||
<div class="progress-bar">0%</div>
|
<div class="progress-bar">0%</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="status"></div>
|
<div class="status"></div>
|
||||||
@@ -91,163 +64,91 @@
|
|||||||
statusContainer.style.display = 'none';
|
statusContainer.style.display = 'none';
|
||||||
}
|
}
|
||||||
|
|
||||||
const progress = document.querySelector('.progress-bar');
|
document.getElementById('updateForm').addEventListener('submit', async (e) => {
|
||||||
const progressContainer = document.querySelector('.progress-container');
|
|
||||||
const status = document.querySelector('.status');
|
|
||||||
let updateInProgress = false;
|
|
||||||
let lastReceivedProgress = 0;
|
|
||||||
|
|
||||||
// WebSocket Handling
|
|
||||||
let ws = null;
|
|
||||||
let wsReconnectTimer = null;
|
|
||||||
|
|
||||||
function connectWebSocket() {
|
|
||||||
ws = new WebSocket('ws://' + window.location.host + '/ws');
|
|
||||||
|
|
||||||
ws.onmessage = function(event) {
|
|
||||||
try {
|
|
||||||
const data = JSON.parse(event.data);
|
|
||||||
if (data.type === "updateProgress" && updateInProgress) {
|
|
||||||
// Zeige Fortschrittsbalken
|
|
||||||
progressContainer.style.display = 'block';
|
|
||||||
|
|
||||||
// Aktualisiere den Fortschritt nur wenn er größer ist
|
|
||||||
const newProgress = parseInt(data.progress);
|
|
||||||
if (!isNaN(newProgress) && newProgress >= lastReceivedProgress) {
|
|
||||||
progress.style.width = newProgress + '%';
|
|
||||||
progress.textContent = newProgress + '%';
|
|
||||||
lastReceivedProgress = newProgress;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Zeige Status-Nachricht
|
|
||||||
if (data.message || data.status) {
|
|
||||||
status.textContent = data.message || getStatusMessage(data.status);
|
|
||||||
status.className = 'status success';
|
|
||||||
status.style.display = 'block';
|
|
||||||
|
|
||||||
// Starte Reload wenn Update erfolgreich
|
|
||||||
if (data.status === 'success' || lastReceivedProgress >= 98) {
|
|
||||||
clearTimeout(wsReconnectTimer);
|
|
||||||
setTimeout(() => {
|
|
||||||
window.location.href = '/';
|
|
||||||
}, 30000);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error('WebSocket message error:', e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
ws.onclose = function() {
|
|
||||||
if (updateInProgress) {
|
|
||||||
// Wenn der Fortschritt hoch genug ist, gehen wir von einem erfolgreichen Update aus
|
|
||||||
if (lastReceivedProgress >= 85) {
|
|
||||||
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
|
|
||||||
status.className = 'status success';
|
|
||||||
status.style.display = 'block';
|
|
||||||
clearTimeout(wsReconnectTimer);
|
|
||||||
setTimeout(() => {
|
|
||||||
window.location.href = '/';
|
|
||||||
}, 30000);
|
|
||||||
} else {
|
|
||||||
// Versuche Reconnect bei niedrigem Fortschritt
|
|
||||||
wsReconnectTimer = setTimeout(connectWebSocket, 1000);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
ws.onerror = function(err) {
|
|
||||||
console.error('WebSocket error:', err);
|
|
||||||
if (updateInProgress && lastReceivedProgress >= 85) {
|
|
||||||
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
|
|
||||||
status.className = 'status success';
|
|
||||||
status.style.display = 'block';
|
|
||||||
setTimeout(() => {
|
|
||||||
window.location.href = '/';
|
|
||||||
}, 30000);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initial WebSocket connection
|
|
||||||
connectWebSocket();
|
|
||||||
|
|
||||||
function getStatusMessage(status) {
|
|
||||||
switch(status) {
|
|
||||||
case 'starting': return 'Starting update...';
|
|
||||||
case 'uploading': return 'Uploading...';
|
|
||||||
case 'finalizing': return 'Finalizing update...';
|
|
||||||
case 'restoring': return 'Restoring configurations...';
|
|
||||||
case 'preparing': return 'Preparing for restart...';
|
|
||||||
case 'success': return 'Update successful! Device is restarting... Page will reload in 30 seconds.';
|
|
||||||
default: return 'Updating...';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleUpdate(e) {
|
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
const form = e.target;
|
const form = e.target;
|
||||||
const file = form.update.files[0];
|
const file = form.update.files[0];
|
||||||
const updateType = form.dataset.type;
|
|
||||||
|
|
||||||
if (!file) {
|
if (!file) {
|
||||||
alert('Please select a file.');
|
alert('Please select a firmware file.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate file name pattern
|
const formData = new FormData();
|
||||||
if (updateType === 'firmware' && !file.name.startsWith('upgrade_filaman_firmware_')) {
|
formData.append('update', file);
|
||||||
alert('Please select a valid firmware file (upgrade_filaman_firmware_*.bin)');
|
|
||||||
return;
|
const progress = document.querySelector('.progress-bar');
|
||||||
}
|
const progressContainer = document.querySelector('.progress-container');
|
||||||
if (updateType === 'webpage' && !file.name.startsWith('upgrade_filaman_website_')) {
|
const status = document.querySelector('.status');
|
||||||
alert('Please select a valid webpage file (upgrade_filaman_website_*.bin)');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset UI
|
|
||||||
updateInProgress = true;
|
|
||||||
progressContainer.style.display = 'block';
|
progressContainer.style.display = 'block';
|
||||||
status.style.display = 'none';
|
status.style.display = 'none';
|
||||||
status.className = 'status';
|
status.className = 'status';
|
||||||
progress.style.width = '0%';
|
form.querySelector('input[type=submit]').disabled = true;
|
||||||
progress.textContent = '0%';
|
|
||||||
|
|
||||||
// Disable submit buttons
|
|
||||||
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true);
|
|
||||||
|
|
||||||
// Send update
|
|
||||||
const xhr = new XMLHttpRequest();
|
const xhr = new XMLHttpRequest();
|
||||||
xhr.open('POST', '/update', true);
|
xhr.open('POST', '/update', true);
|
||||||
|
|
||||||
|
xhr.upload.onprogress = (e) => {
|
||||||
|
if (e.lengthComputable) {
|
||||||
|
const percentComplete = (e.loaded / e.total) * 100;
|
||||||
|
progress.style.width = percentComplete + '%';
|
||||||
|
progress.textContent = Math.round(percentComplete) + '%';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
xhr.onload = function() {
|
xhr.onload = function() {
|
||||||
if (xhr.status !== 200 && !progress.textContent.startsWith('100')) {
|
try {
|
||||||
status.textContent = "Update failed: " + (xhr.responseText || "Unknown error");
|
let response = this.responseText;
|
||||||
status.className = 'status error';
|
try {
|
||||||
|
const jsonResponse = JSON.parse(response);
|
||||||
|
response = jsonResponse.message;
|
||||||
|
|
||||||
|
if (jsonResponse.restart) {
|
||||||
|
status.textContent = response + " Redirecting in 20 seconds...";
|
||||||
|
let countdown = 20;
|
||||||
|
const timer = setInterval(() => {
|
||||||
|
countdown--;
|
||||||
|
if (countdown <= 0) {
|
||||||
|
clearInterval(timer);
|
||||||
|
window.location.href = '/';
|
||||||
|
} else {
|
||||||
|
status.textContent = response + ` Redirecting in ${countdown} seconds...`;
|
||||||
|
}
|
||||||
|
}, 1000);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (!isNaN(response)) {
|
||||||
|
const percent = parseInt(response);
|
||||||
|
progress.style.width = percent + '%';
|
||||||
|
progress.textContent = percent + '%';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
status.textContent = response;
|
||||||
|
status.classList.add(xhr.status === 200 ? 'success' : 'error');
|
||||||
status.style.display = 'block';
|
status.style.display = 'block';
|
||||||
updateInProgress = false;
|
|
||||||
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
|
if (xhr.status !== 200) {
|
||||||
|
form.querySelector('input[type=submit]').disabled = false;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
status.textContent = 'Error: ' + error.message;
|
||||||
|
status.classList.add('error');
|
||||||
|
status.style.display = 'block';
|
||||||
|
form.querySelector('input[type=submit]').disabled = false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
xhr.onerror = function() {
|
xhr.onerror = function() {
|
||||||
if (!progress.textContent.startsWith('100')) {
|
status.textContent = 'Update failed: Network error';
|
||||||
status.textContent = "Network error during update";
|
status.classList.add('error');
|
||||||
status.className = 'status error';
|
|
||||||
status.style.display = 'block';
|
status.style.display = 'block';
|
||||||
updateInProgress = false;
|
form.querySelector('input[type=submit]').disabled = false;
|
||||||
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const formData = new FormData();
|
|
||||||
formData.append('update', file);
|
|
||||||
xhr.send(formData);
|
xhr.send(formData);
|
||||||
}
|
});
|
||||||
|
|
||||||
document.getElementById('firmwareForm').addEventListener('submit', handleUpdate);
|
|
||||||
document.getElementById('webpageForm').addEventListener('submit', handleUpdate);
|
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -6,24 +6,13 @@
|
|||||||
<title>FilaMan - Filament Management Tool</title>
|
<title>FilaMan - Filament Management Tool</title>
|
||||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||||
<link rel="stylesheet" href="style.css">
|
<link rel="stylesheet" href="style.css">
|
||||||
<script>
|
|
||||||
fetch('/api/version')
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {
|
|
||||||
const versionSpan = document.querySelector('.version');
|
|
||||||
if (versionSpan) {
|
|
||||||
versionSpan.textContent = 'v' + data.version;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => console.error('Error fetching version:', error));
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="navbar">
|
<div class="navbar">
|
||||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||||
<div class="logo-text">
|
<div class="logo-text">
|
||||||
<h1>FilaMan<span class="version"></span></h1>
|
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||||
<h4>Filament Management Tool</h4>
|
<h4>Filament Management Tool</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -9,8 +9,8 @@
|
|||||||
; https://docs.platformio.org/page/projectconf.html
|
; https://docs.platformio.org/page/projectconf.html
|
||||||
|
|
||||||
[common]
|
[common]
|
||||||
version = "1.3.83"
|
version = "1.2.4"
|
||||||
##
|
|
||||||
[env:esp32dev]
|
[env:esp32dev]
|
||||||
platform = espressif32
|
platform = espressif32
|
||||||
board = esp32dev
|
board = esp32dev
|
||||||
@@ -20,10 +20,7 @@ monitor_speed = 115200
|
|||||||
lib_deps =
|
lib_deps =
|
||||||
tzapu/WiFiManager @ ^2.0.17
|
tzapu/WiFiManager @ ^2.0.17
|
||||||
https://github.com/me-no-dev/ESPAsyncWebServer.git#master
|
https://github.com/me-no-dev/ESPAsyncWebServer.git#master
|
||||||
#me-no-dev/AsyncTCP @ ^1.1.1
|
me-no-dev/AsyncTCP @ ^1.1.1
|
||||||
https://github.com/esphome/AsyncTCP.git
|
|
||||||
#mathieucarbou/ESPAsyncWebServer @ ^3.6.0
|
|
||||||
#esp32async/AsyncTCP @ ^3.3.5
|
|
||||||
bogde/HX711 @ ^0.7.5
|
bogde/HX711 @ ^0.7.5
|
||||||
adafruit/Adafruit SSD1306 @ ^2.5.13
|
adafruit/Adafruit SSD1306 @ ^2.5.13
|
||||||
adafruit/Adafruit GFX Library @ ^1.11.11
|
adafruit/Adafruit GFX Library @ ^1.11.11
|
||||||
@@ -46,26 +43,34 @@ build_flags =
|
|||||||
-fdata-sections
|
-fdata-sections
|
||||||
-DNDEBUG
|
-DNDEBUG
|
||||||
-mtext-section-literals
|
-mtext-section-literals
|
||||||
-DVERSION=\"${common.version}\"
|
'-D VERSION="${common.version}"'
|
||||||
-DASYNCWEBSERVER_REGEX
|
-DASYNCWEBSERVER_REGEX
|
||||||
-DCORE_DEBUG_LEVEL=3
|
-DCORE_DEBUG_LEVEL=1
|
||||||
-DCONFIG_ARDUHAL_LOG_COLORS=1
|
-DCONFIG_ARDUHAL_LOG_COLORS=1
|
||||||
-DOTA_DEBUG=1
|
-DOTA_DEBUG=1
|
||||||
|
-DARDUINO_RUNNING_CORE=1
|
||||||
|
-DARDUINO_EVENT_RUNNING_CORE=1
|
||||||
-DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1
|
-DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1
|
||||||
-DBOOT_APP_PARTITION_OTA_0=1
|
-DCONFIG_ESP32_PANIC_PRINT_REBOOT
|
||||||
-DCONFIG_LWIP_TCP_MSL=60000
|
-DCONFIG_ARDUINO_OTA_READSIZE=1024
|
||||||
-DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096
|
-DCONFIG_ASYNC_TCP_RUNNING_CORE=1
|
||||||
-DCONFIG_LWIP_MAX_ACTIVE_TCP=16
|
-DCONFIG_ASYNC_TCP_USE_WDT=0
|
||||||
|
-DCONFIG_LWIP_TCP_MSS=1460
|
||||||
|
-DOTA_PARTITION_SUBTYPE=0x10
|
||||||
|
-DPARTITION_TABLE_OFFSET=0x8000
|
||||||
|
-DPARTITION_TABLE_SIZE=0x1000
|
||||||
|
|
||||||
extra_scripts =
|
extra_scripts =
|
||||||
scripts/extra_script.py
|
scripts/extra_script.py
|
||||||
${env:buildfs.extra_scripts}
|
pre:scripts/pre_build.py ; wird zuerst ausgeführt
|
||||||
|
pre:scripts/pre_spiffs.py ; wird als zweites ausgeführt
|
||||||
|
pre:scripts/combine_html.py ; wird als drittes ausgeführt
|
||||||
|
scripts/gzip_files.py
|
||||||
|
|
||||||
[env:buildfs]
|
; Remove or comment out the targets line
|
||||||
extra_scripts =
|
;targets = buildfs, build
|
||||||
pre:scripts/combine_html.py ; Combine header with HTML files
|
|
||||||
scripts/gzip_files.py ; Compress files for SPIFFS
|
|
||||||
|
|
||||||
|
; Add a custom target to build both
|
||||||
[platformio]
|
[platformio]
|
||||||
default_envs = esp32dev
|
default_envs = esp32dev
|
||||||
|
|
||||||
|
@@ -1,39 +1,7 @@
|
|||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
board_config = env.BoardConfig()
|
|
||||||
|
|
||||||
# Calculate SPIFFS size based on partition table
|
|
||||||
SPIFFS_START = 0x310000 # From partitions.csv
|
|
||||||
SPIFFS_SIZE = 0xE0000 # From partitions.csv
|
|
||||||
SPIFFS_PAGE = 256
|
|
||||||
SPIFFS_BLOCK = 4096
|
|
||||||
|
|
||||||
env.Replace(
|
|
||||||
MKSPIFFSTOOL="mkspiffs",
|
|
||||||
SPIFFSBLOCKSZ=SPIFFS_BLOCK,
|
|
||||||
SPIFFSBLOCKSIZE=SPIFFS_BLOCK,
|
|
||||||
SPIFFSSTART=SPIFFS_START,
|
|
||||||
SPIFFSEND=SPIFFS_START + SPIFFS_SIZE,
|
|
||||||
SPIFFSPAGESZ=SPIFFS_PAGE,
|
|
||||||
SPIFFSSIZE=SPIFFS_SIZE
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wiederverwendung der replace_version Funktion
|
# Wiederverwendung der replace_version Funktion
|
||||||
exec(open("./scripts/pre_build.py").read())
|
exec(open("./scripts/pre_build.py").read())
|
||||||
|
|
||||||
# Bind to SPIFFS build
|
# Bind to SPIFFS build
|
||||||
env.AddPreAction("buildfs", replace_version)
|
env.AddPreAction("buildfs", replace_version)
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from SCons.Script import DefaultEnvironment
|
|
||||||
|
|
||||||
env = DefaultEnvironment()
|
|
||||||
|
|
||||||
# Format SPIFFS partition before uploading new files
|
|
||||||
spiffs_dir = os.path.join(env.subst("$BUILD_DIR"), "spiffs")
|
|
||||||
if os.path.exists(spiffs_dir):
|
|
||||||
shutil.rmtree(spiffs_dir)
|
|
||||||
os.makedirs(spiffs_dir)
|
|
||||||
|
|
||||||
print("SPIFFS partition formatted.")
|
|
@@ -64,10 +64,29 @@ def get_changes_from_git():
|
|||||||
|
|
||||||
return changes
|
return changes
|
||||||
|
|
||||||
|
def push_changes(version):
|
||||||
|
"""Push changes to upstream"""
|
||||||
|
try:
|
||||||
|
# Stage the CHANGELOG.md
|
||||||
|
subprocess.run(['git', 'add', 'CHANGELOG.md'], check=True)
|
||||||
|
|
||||||
|
# Commit the changelog
|
||||||
|
commit_msg = f"docs: update changelog for version {version}"
|
||||||
|
subprocess.run(['git', 'commit', '-m', commit_msg], check=True)
|
||||||
|
|
||||||
|
# Push to origin (local)
|
||||||
|
subprocess.run(['git', 'push', 'origin'], check=True)
|
||||||
|
print("Successfully pushed to origin")
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Error during git operations: {e}")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def update_changelog():
|
def update_changelog():
|
||||||
print("Starting changelog update...")
|
print("Starting changelog update...") # Add this line
|
||||||
version = get_version()
|
version = get_version()
|
||||||
print(f"Current version: {version}")
|
print(f"Current version: {version}") # Add this line
|
||||||
today = datetime.now().strftime('%Y-%m-%d')
|
today = datetime.now().strftime('%Y-%m-%d')
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
@@ -92,7 +111,7 @@ def update_changelog():
|
|||||||
if not os.path.exists(changelog_path):
|
if not os.path.exists(changelog_path):
|
||||||
with open(changelog_path, 'w') as f:
|
with open(changelog_path, 'w') as f:
|
||||||
f.write(f"# Changelog\n\n{changelog_entry}")
|
f.write(f"# Changelog\n\n{changelog_entry}")
|
||||||
print(f"Created new changelog file with version {version}")
|
push_changes(version)
|
||||||
else:
|
else:
|
||||||
with open(changelog_path, 'r') as f:
|
with open(changelog_path, 'r') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
@@ -101,30 +120,9 @@ def update_changelog():
|
|||||||
updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}")
|
updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}")
|
||||||
with open(changelog_path, 'w') as f:
|
with open(changelog_path, 'w') as f:
|
||||||
f.write(updated_content)
|
f.write(updated_content)
|
||||||
print(f"Added new version {version} to changelog")
|
push_changes(version)
|
||||||
else:
|
else:
|
||||||
# Version existiert bereits, aktualisiere die bestehenden Einträge
|
print(f"Version {version} already exists in changelog")
|
||||||
version_pattern = f"## \\[{version}\\] - \\d{{4}}-\\d{{2}}-\\d{{2}}"
|
|
||||||
next_version_pattern = "## \\[.*?\\] - \\d{4}-\\d{2}-\\d{2}"
|
|
||||||
|
|
||||||
# Finde den Start der aktuellen Version
|
|
||||||
version_match = re.search(version_pattern, content)
|
|
||||||
if version_match:
|
|
||||||
version_start = version_match.start()
|
|
||||||
# Suche nach der nächsten Version
|
|
||||||
next_version_match = re.search(next_version_pattern, content[version_start + 1:])
|
|
||||||
|
|
||||||
if next_version_match:
|
|
||||||
# Ersetze den Inhalt zwischen aktueller und nächster Version
|
|
||||||
next_version_pos = version_start + 1 + next_version_match.start()
|
|
||||||
updated_content = content[:version_start] + changelog_entry + content[next_version_pos:]
|
|
||||||
else:
|
|
||||||
# Wenn keine nächste Version existiert, ersetze bis zum Ende
|
|
||||||
updated_content = content[:version_start] + changelog_entry + "\n"
|
|
||||||
|
|
||||||
with open(changelog_path, 'w') as f:
|
|
||||||
f.write(updated_content)
|
|
||||||
print(f"Updated entries for version {version}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
update_changelog()
|
update_changelog()
|
22
src/api.cpp
22
src/api.cpp
@@ -60,10 +60,10 @@ JsonDocument fetchSpoolsForWebsite() {
|
|||||||
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
|
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
|
||||||
|
|
||||||
for (JsonObject spool : spools) {
|
for (JsonObject spool : spools) {
|
||||||
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
|
JsonObject filteredSpool = filteredSpools.createNestedObject();
|
||||||
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
|
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
|
||||||
|
|
||||||
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
|
JsonObject filament = filteredSpool.createNestedObject("filament");
|
||||||
filament["sm_id"] = spool["id"];
|
filament["sm_id"] = spool["id"];
|
||||||
filament["id"] = spool["filament"]["id"];
|
filament["id"] = spool["filament"]["id"];
|
||||||
filament["name"] = spool["filament"]["name"];
|
filament["name"] = spool["filament"]["name"];
|
||||||
@@ -73,7 +73,7 @@ JsonDocument fetchSpoolsForWebsite() {
|
|||||||
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
|
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
|
||||||
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
|
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
|
||||||
|
|
||||||
JsonObject vendor = filament["vendor"].to<JsonObject>();
|
JsonObject vendor = filament.createNestedObject("vendor");
|
||||||
vendor["id"] = spool["filament"]["vendor"]["id"];
|
vendor["id"] = spool["filament"]["vendor"]["id"];
|
||||||
vendor["name"] = spool["filament"]["vendor"]["name"];
|
vendor["name"] = spool["filament"]["vendor"]["name"];
|
||||||
}
|
}
|
||||||
@@ -110,13 +110,13 @@ JsonDocument fetchAllSpoolsInfo() {
|
|||||||
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
|
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
|
||||||
|
|
||||||
for (JsonObject spool : spools) {
|
for (JsonObject spool : spools) {
|
||||||
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
|
JsonObject filteredSpool = filteredSpools.createNestedObject();
|
||||||
filteredSpool["price"] = spool["price"];
|
filteredSpool["price"] = spool["price"];
|
||||||
filteredSpool["remaining_weight"] = spool["remaining_weight"];
|
filteredSpool["remaining_weight"] = spool["remaining_weight"];
|
||||||
filteredSpool["used_weight"] = spool["used_weight"];
|
filteredSpool["used_weight"] = spool["used_weight"];
|
||||||
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
|
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
|
||||||
|
|
||||||
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
|
JsonObject filament = filteredSpool.createNestedObject("filament");
|
||||||
filament["id"] = spool["filament"]["id"];
|
filament["id"] = spool["filament"]["id"];
|
||||||
filament["name"] = spool["filament"]["name"];
|
filament["name"] = spool["filament"]["name"];
|
||||||
filament["material"] = spool["filament"]["material"];
|
filament["material"] = spool["filament"]["material"];
|
||||||
@@ -125,11 +125,11 @@ JsonDocument fetchAllSpoolsInfo() {
|
|||||||
filament["spool_weight"] = spool["filament"]["spool_weight"];
|
filament["spool_weight"] = spool["filament"]["spool_weight"];
|
||||||
filament["color_hex"] = spool["filament"]["color_hex"];
|
filament["color_hex"] = spool["filament"]["color_hex"];
|
||||||
|
|
||||||
JsonObject vendor = filament["vendor"].to<JsonObject>();
|
JsonObject vendor = filament.createNestedObject("vendor");
|
||||||
vendor["id"] = spool["filament"]["vendor"]["id"];
|
vendor["id"] = spool["filament"]["vendor"]["id"];
|
||||||
vendor["name"] = spool["filament"]["vendor"]["name"];
|
vendor["name"] = spool["filament"]["vendor"]["name"];
|
||||||
|
|
||||||
JsonObject extra = filament["extra"].to<JsonObject>();
|
JsonObject extra = filament.createNestedObject("extra");
|
||||||
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
|
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
|
||||||
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
|
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
|
||||||
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
|
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
|
||||||
@@ -186,7 +186,7 @@ bool updateSpoolTagId(String uidString, const char* payload) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Überprüfe, ob die erforderlichen Felder vorhanden sind
|
// Überprüfe, ob die erforderlichen Felder vorhanden sind
|
||||||
if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") {
|
if (!doc.containsKey("sm_id") || doc["sm_id"] == "") {
|
||||||
Serial.println("Keine Spoolman-ID gefunden.");
|
Serial.println("Keine Spoolman-ID gefunden.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -368,7 +368,7 @@ bool checkSpoolmanExtraFields() {
|
|||||||
for (uint8_t s = 0; s < extraLength; s++) {
|
for (uint8_t s = 0; s < extraLength; s++) {
|
||||||
bool found = false;
|
bool found = false;
|
||||||
for (JsonObject field : doc.as<JsonArray>()) {
|
for (JsonObject field : doc.as<JsonArray>()) {
|
||||||
if (field["key"].is<String>() && field["key"] == extraFields[s]) {
|
if (field.containsKey("key") && field["key"] == extraFields[s]) {
|
||||||
Serial.println("Feld gefunden: " + extraFields[s]);
|
Serial.println("Feld gefunden: " + extraFields[s]);
|
||||||
found = true;
|
found = true;
|
||||||
break;
|
break;
|
||||||
@@ -430,7 +430,7 @@ bool checkSpoolmanInstance(const String& url) {
|
|||||||
String payload = http.getString();
|
String payload = http.getString();
|
||||||
JsonDocument doc;
|
JsonDocument doc;
|
||||||
DeserializationError error = deserializeJson(doc, payload);
|
DeserializationError error = deserializeJson(doc, payload);
|
||||||
if (!error && doc["status"].is<String>()) {
|
if (!error && doc.containsKey("status")) {
|
||||||
const char* status = doc["status"];
|
const char* status = doc["status"];
|
||||||
http.end();
|
http.end();
|
||||||
|
|
||||||
@@ -469,7 +469,7 @@ bool saveSpoolmanUrl(const String& url) {
|
|||||||
|
|
||||||
String loadSpoolmanUrl() {
|
String loadSpoolmanUrl() {
|
||||||
JsonDocument doc;
|
JsonDocument doc;
|
||||||
if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) {
|
if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) {
|
||||||
return doc["url"].as<String>();
|
return doc["url"].as<String>();
|
||||||
}
|
}
|
||||||
Serial.println("Keine gültige Spoolman-URL gefunden.");
|
Serial.println("Keine gültige Spoolman-URL gefunden.");
|
||||||
|
@@ -58,7 +58,7 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
|
|||||||
|
|
||||||
bool loadBambuCredentials() {
|
bool loadBambuCredentials() {
|
||||||
JsonDocument doc;
|
JsonDocument doc;
|
||||||
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
|
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
|
||||||
// Temporäre Strings für die Werte
|
// Temporäre Strings für die Werte
|
||||||
String ip = doc["bambu_ip"].as<String>();
|
String ip = doc["bambu_ip"].as<String>();
|
||||||
String code = doc["bambu_accesscode"].as<String>();
|
String code = doc["bambu_accesscode"].as<String>();
|
||||||
@@ -270,9 +270,9 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
|
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
|
||||||
if (doc["print"]["upgrade_state"].is<String>()) {
|
if (doc["print"].containsKey("upgrade_state")) {
|
||||||
// Prüfen ob AMS-Daten vorhanden sind
|
// Prüfen ob AMS-Daten vorhanden sind
|
||||||
if (!doc["print"]["ams"].is<String>() || !doc["print"]["ams"]["ams"].is<String>()) {
|
if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -315,7 +315,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Prüfe die externe Spule
|
// Prüfe die externe Spule
|
||||||
if (!hasChanges && doc["print"]["vt_tray"].is<String>()) {
|
if (!hasChanges && doc["print"].containsKey("vt_tray")) {
|
||||||
JsonObject vtTray = doc["print"]["vt_tray"];
|
JsonObject vtTray = doc["print"]["vt_tray"];
|
||||||
bool foundExternal = false;
|
bool foundExternal = false;
|
||||||
|
|
||||||
@@ -363,7 +363,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
|||||||
ams_count = amsArray.size();
|
ams_count = amsArray.size();
|
||||||
|
|
||||||
// Wenn externe Spule vorhanden, füge sie hinzu
|
// Wenn externe Spule vorhanden, füge sie hinzu
|
||||||
if (doc["print"]["vt_tray"].is<String>()) {
|
if (doc["print"].containsKey("vt_tray")) {
|
||||||
JsonObject vtTray = doc["print"]["vt_tray"];
|
JsonObject vtTray = doc["print"]["vt_tray"];
|
||||||
int extIdx = ams_count; // Index für externe Spule
|
int extIdx = ams_count; // Index für externe Spule
|
||||||
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
|
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
|
||||||
@@ -387,14 +387,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
|||||||
JsonArray wsArray = wsDoc.to<JsonArray>();
|
JsonArray wsArray = wsDoc.to<JsonArray>();
|
||||||
|
|
||||||
for (int i = 0; i < ams_count; i++) {
|
for (int i = 0; i < ams_count; i++) {
|
||||||
JsonObject amsObj = wsArray.add<JsonObject>();
|
JsonObject amsObj = wsArray.createNestedObject();
|
||||||
amsObj["ams_id"] = ams_data[i].ams_id;
|
amsObj["ams_id"] = ams_data[i].ams_id;
|
||||||
|
|
||||||
JsonArray trays = amsObj["tray"].to<JsonArray>();
|
JsonArray trays = amsObj.createNestedArray("tray");
|
||||||
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
|
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
|
||||||
|
|
||||||
for (int j = 0; j < maxTrays; j++) {
|
for (int j = 0; j < maxTrays; j++) {
|
||||||
JsonObject trayObj = trays.add<JsonObject>();
|
JsonObject trayObj = trays.createNestedObject();
|
||||||
trayObj["id"] = ams_data[i].trays[j].id;
|
trayObj["id"] = ams_data[i].trays[j].id;
|
||||||
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
|
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
|
||||||
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
|
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
|
||||||
@@ -427,14 +427,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
|||||||
JsonArray wsArray = wsDoc.to<JsonArray>();
|
JsonArray wsArray = wsDoc.to<JsonArray>();
|
||||||
|
|
||||||
for (int j = 0; j < ams_count; j++) {
|
for (int j = 0; j < ams_count; j++) {
|
||||||
JsonObject amsObj = wsArray.add<JsonObject>();
|
JsonObject amsObj = wsArray.createNestedObject();
|
||||||
amsObj["ams_id"] = ams_data[j].ams_id;
|
amsObj["ams_id"] = ams_data[j].ams_id;
|
||||||
|
|
||||||
JsonArray trays = amsObj["tray"].to<JsonArray>();
|
JsonArray trays = amsObj.createNestedArray("tray");
|
||||||
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
|
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
|
||||||
|
|
||||||
for (int k = 0; k < maxTrays; k++) {
|
for (int k = 0; k < maxTrays; k++) {
|
||||||
JsonObject trayObj = trays.add<JsonObject>();
|
JsonObject trayObj = trays.createNestedObject();
|
||||||
trayObj["id"] = ams_data[j].trays[k].id;
|
trayObj["id"] = ams_data[j].trays[k].id;
|
||||||
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
|
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
|
||||||
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;
|
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;
|
||||||
|
@@ -36,5 +36,4 @@ void mqtt_loop(void * parameter);
|
|||||||
bool setBambuSpool(String payload);
|
bool setBambuSpool(String payload);
|
||||||
void bambu_restart();
|
void bambu_restart();
|
||||||
|
|
||||||
extern TaskHandle_t BambuMqttTask;
|
|
||||||
#endif
|
#endif
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
#include "commonFS.h"
|
#include "commonFS.h"
|
||||||
#include <SPIFFS.h>
|
|
||||||
|
|
||||||
bool saveJsonValue(const char* filename, const JsonDocument& doc) {
|
bool saveJsonValue(const char* filename, const JsonDocument& doc) {
|
||||||
File file = SPIFFS.open(filename, "w");
|
File file = SPIFFS.open(filename, "w");
|
||||||
@@ -36,12 +35,23 @@ bool loadJsonValue(const char* filename, JsonDocument& doc) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void initializeSPIFFS() {
|
bool initializeSPIFFS() {
|
||||||
if (!SPIFFS.begin(true, "/spiffs", 10, "spiffs")) {
|
// Erster Versuch
|
||||||
Serial.println("SPIFFS Mount Failed");
|
if (SPIFFS.begin(true)) {
|
||||||
return;
|
Serial.println("SPIFFS mounted successfully.");
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
Serial.printf("SPIFFS Total: %u bytes\n", SPIFFS.totalBytes());
|
|
||||||
Serial.printf("SPIFFS Used: %u bytes\n", SPIFFS.usedBytes());
|
// Formatierung versuchen
|
||||||
Serial.printf("SPIFFS Free: %u bytes\n", SPIFFS.totalBytes() - SPIFFS.usedBytes());
|
Serial.println("Failed to mount SPIFFS. Formatting...");
|
||||||
|
SPIFFS.format();
|
||||||
|
|
||||||
|
// Zweiter Versuch nach Formatierung
|
||||||
|
if (SPIFFS.begin(true)) {
|
||||||
|
Serial.println("SPIFFS formatted and mounted successfully.");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
Serial.println("SPIFFS initialization failed completely.");
|
||||||
|
return false;
|
||||||
}
|
}
|
@@ -7,6 +7,6 @@
|
|||||||
|
|
||||||
bool saveJsonValue(const char* filename, const JsonDocument& doc);
|
bool saveJsonValue(const char* filename, const JsonDocument& doc);
|
||||||
bool loadJsonValue(const char* filename, JsonDocument& doc);
|
bool loadJsonValue(const char* filename, JsonDocument& doc);
|
||||||
void initializeSPIFFS();
|
bool initializeSPIFFS();
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@@ -19,12 +19,6 @@
|
|||||||
void setup() {
|
void setup() {
|
||||||
Serial.begin(115200);
|
Serial.begin(115200);
|
||||||
|
|
||||||
uint64_t chipid;
|
|
||||||
|
|
||||||
chipid = ESP.getEfuseMac(); //The chip ID is essentially its MAC address(length: 6 bytes).
|
|
||||||
Serial.printf("ESP32 Chip ID = %04X", (uint16_t)(chipid >> 32)); //print High 2 bytes
|
|
||||||
Serial.printf("%08X\n", (uint32_t)chipid); //print Low 4bytes.
|
|
||||||
|
|
||||||
// Initialize SPIFFS
|
// Initialize SPIFFS
|
||||||
initializeSPIFFS();
|
initializeSPIFFS();
|
||||||
|
|
||||||
@@ -84,8 +78,6 @@ uint8_t wifiErrorCounter = 0;
|
|||||||
|
|
||||||
// ##### PROGRAM START #####
|
// ##### PROGRAM START #####
|
||||||
void loop() {
|
void loop() {
|
||||||
|
|
||||||
/*
|
|
||||||
// Überprüfe den WLAN-Status
|
// Überprüfe den WLAN-Status
|
||||||
if (WiFi.status() != WL_CONNECTED) {
|
if (WiFi.status() != WL_CONNECTED) {
|
||||||
wifiErrorCounter++;
|
wifiErrorCounter++;
|
||||||
@@ -95,7 +87,6 @@ void loop() {
|
|||||||
wifiOn = true;
|
wifiOn = true;
|
||||||
}
|
}
|
||||||
if (wifiErrorCounter > 20) ESP.restart();
|
if (wifiErrorCounter > 20) ESP.restart();
|
||||||
*/
|
|
||||||
|
|
||||||
unsigned long currentMillis = millis();
|
unsigned long currentMillis = millis();
|
||||||
|
|
||||||
|
46
src/ota.cpp
Normal file
46
src/ota.cpp
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#include <Arduino.h>
|
||||||
|
#include "ota.h"
|
||||||
|
#include <Update.h>
|
||||||
|
#include <SPIFFS.h>
|
||||||
|
#include "commonFS.h"
|
||||||
|
|
||||||
|
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final) {
|
||||||
|
static size_t contentLength = 0;
|
||||||
|
|
||||||
|
if (!index) {
|
||||||
|
contentLength = request->contentLength();
|
||||||
|
Serial.printf("Update size: %u bytes\n", contentLength);
|
||||||
|
|
||||||
|
if (contentLength == 0) {
|
||||||
|
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Invalid file size\"}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Update.begin(contentLength)) {
|
||||||
|
Serial.printf("Not enough space: %u required\n", contentLength);
|
||||||
|
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Not enough space available\"}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Serial.println("Update started");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Update.write(data, len) != len) {
|
||||||
|
Update.printError(Serial);
|
||||||
|
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Error writing update\"}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (final) {
|
||||||
|
if (Update.end(true)) {
|
||||||
|
Serial.println("Update complete");
|
||||||
|
request->send(200, "application/json", "{\"status\":\"success\",\"message\":\"Update successful! Device will restart...\",\"restart\":true}");
|
||||||
|
delay(1000);
|
||||||
|
ESP.restart();
|
||||||
|
} else {
|
||||||
|
Update.printError(Serial);
|
||||||
|
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Update failed\"}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
8
src/ota.h
Normal file
8
src/ota.h
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#ifndef OTA_H
|
||||||
|
#define OTA_H
|
||||||
|
|
||||||
|
#include <ESPAsyncWebServer.h>
|
||||||
|
|
||||||
|
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
|
||||||
|
|
||||||
|
#endif
|
@@ -3,9 +3,9 @@
|
|||||||
#include <ArduinoJson.h>
|
#include <ArduinoJson.h>
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "HX711.h"
|
#include "HX711.h"
|
||||||
|
#include <EEPROM.h>
|
||||||
#include "display.h"
|
#include "display.h"
|
||||||
#include "esp_task_wdt.h"
|
#include "esp_task_wdt.h"
|
||||||
#include <Preferences.h>
|
|
||||||
|
|
||||||
HX711 scale;
|
HX711 scale;
|
||||||
|
|
||||||
@@ -17,10 +17,6 @@ uint8_t weigthCouterToApi = 0;
|
|||||||
uint8_t scale_tare_counter = 0;
|
uint8_t scale_tare_counter = 0;
|
||||||
uint8_t pauseMainTask = 0;
|
uint8_t pauseMainTask = 0;
|
||||||
|
|
||||||
Preferences preferences;
|
|
||||||
const char* NVS_NAMESPACE = "scale";
|
|
||||||
const char* NVS_KEY_CALIBRATION = "cal_value";
|
|
||||||
|
|
||||||
// ##### Funktionen für Waage #####
|
// ##### Funktionen für Waage #####
|
||||||
uint8_t tareScale() {
|
uint8_t tareScale() {
|
||||||
Serial.println("Tare scale");
|
Serial.println("Tare scale");
|
||||||
@@ -52,12 +48,13 @@ void scale_loop(void * parameter) {
|
|||||||
|
|
||||||
void start_scale() {
|
void start_scale() {
|
||||||
Serial.println("Prüfe Calibration Value");
|
Serial.println("Prüfe Calibration Value");
|
||||||
long calibrationValue;
|
long calibrationValue; // calibration value (see example file "Calibration.ino")
|
||||||
|
//calibrationValue = 696.0; // uncomment this if you want to set the calibration value in the sketch
|
||||||
|
|
||||||
// NVS
|
EEPROM.begin(512);
|
||||||
preferences.begin(NVS_NAMESPACE, true); // true = readonly
|
EEPROM.get(calVal_eepromAdress, calibrationValue); // uncomment this if you want to fetch the calibration value from eeprom
|
||||||
calibrationValue = preferences.getLong(NVS_KEY_CALIBRATION, defaultScaleCalibrationValue);
|
|
||||||
preferences.end();
|
//calibrationValue = EEPROM.read(calVal_eepromAdress);
|
||||||
|
|
||||||
Serial.print("Read Scale Calibration Value ");
|
Serial.print("Read Scale Calibration Value ");
|
||||||
Serial.println(calibrationValue);
|
Serial.println(calibrationValue);
|
||||||
@@ -140,19 +137,18 @@ uint8_t calibrate_scale() {
|
|||||||
{
|
{
|
||||||
Serial.print("New calibration value has been set to: ");
|
Serial.print("New calibration value has been set to: ");
|
||||||
Serial.println(newCalibrationValue);
|
Serial.println(newCalibrationValue);
|
||||||
|
Serial.print("Save this value to EEPROM adress ");
|
||||||
|
Serial.println(calVal_eepromAdress);
|
||||||
|
|
||||||
// Speichern mit NVS
|
//EEPROM.put(calVal_eepromAdress, newCalibrationValue);
|
||||||
preferences.begin(NVS_NAMESPACE, false); // false = readwrite
|
EEPROM.put(calVal_eepromAdress, newCalibrationValue);
|
||||||
preferences.putLong(NVS_KEY_CALIBRATION, newCalibrationValue);
|
EEPROM.commit();
|
||||||
preferences.end();
|
|
||||||
|
|
||||||
// Verifizieren
|
EEPROM.get(calVal_eepromAdress, newCalibrationValue);
|
||||||
preferences.begin(NVS_NAMESPACE, true);
|
//newCalibrationValue = EEPROM.read(calVal_eepromAdress);
|
||||||
long verifyValue = preferences.getLong(NVS_KEY_CALIBRATION, 0);
|
|
||||||
preferences.end();
|
|
||||||
|
|
||||||
Serial.print("Verified stored value: ");
|
Serial.print("Read Value ");
|
||||||
Serial.println(verifyValue);
|
Serial.println(newCalibrationValue);
|
||||||
|
|
||||||
Serial.println("End calibration, revome weight");
|
Serial.println("End calibration, revome weight");
|
||||||
|
|
||||||
|
@@ -15,6 +15,4 @@ extern uint8_t weigthCouterToApi;
|
|||||||
extern uint8_t scale_tare_counter;
|
extern uint8_t scale_tare_counter;
|
||||||
extern uint8_t pauseMainTask;
|
extern uint8_t pauseMainTask;
|
||||||
|
|
||||||
extern TaskHandle_t ScaleTask;
|
|
||||||
|
|
||||||
#endif
|
#endif
|
245
src/website.cpp
245
src/website.cpp
@@ -7,15 +7,10 @@
|
|||||||
#include "nfc.h"
|
#include "nfc.h"
|
||||||
#include "scale.h"
|
#include "scale.h"
|
||||||
#include "esp_task_wdt.h"
|
#include "esp_task_wdt.h"
|
||||||
#include <Update.h>
|
#include "ota.h"
|
||||||
#include "display.h"
|
|
||||||
|
|
||||||
#ifndef VERSION
|
|
||||||
#define VERSION "1.1.0"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Cache-Control Header definieren
|
// Cache-Control Header definieren
|
||||||
#define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche
|
#define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr
|
||||||
|
|
||||||
AsyncWebServer server(webserverPort);
|
AsyncWebServer server(webserverPort);
|
||||||
AsyncWebSocket ws("/ws");
|
AsyncWebSocket ws("/ws");
|
||||||
@@ -23,49 +18,6 @@ AsyncWebSocket ws("/ws");
|
|||||||
uint8_t lastSuccess = 0;
|
uint8_t lastSuccess = 0;
|
||||||
uint8_t lastHasReadRfidTag = 0;
|
uint8_t lastHasReadRfidTag = 0;
|
||||||
|
|
||||||
// Globale Variablen für Config Backups hinzufügen
|
|
||||||
String bambuCredentialsBackup;
|
|
||||||
String spoolmanUrlBackup;
|
|
||||||
|
|
||||||
// Globale Variable für den Update-Typ
|
|
||||||
static int currentUpdateCommand = 0;
|
|
||||||
|
|
||||||
// Globale Update-Variablen
|
|
||||||
static size_t updateTotalSize = 0;
|
|
||||||
static size_t updateWritten = 0;
|
|
||||||
static bool isSpiffsUpdate = false;
|
|
||||||
|
|
||||||
void sendUpdateProgress(int progress, const char* status = nullptr, const char* message = nullptr) {
|
|
||||||
static int lastSentProgress = -1;
|
|
||||||
|
|
||||||
// Verhindere zu häufige Updates
|
|
||||||
if (progress == lastSentProgress && !status && !message) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
String progressMsg = "{\"type\":\"updateProgress\",\"progress\":" + String(progress);
|
|
||||||
if (status) {
|
|
||||||
progressMsg += ",\"status\":\"" + String(status) + "\"";
|
|
||||||
}
|
|
||||||
if (message) {
|
|
||||||
progressMsg += ",\"message\":\"" + String(message) + "\"";
|
|
||||||
}
|
|
||||||
progressMsg += "}";
|
|
||||||
|
|
||||||
// Sende die Nachricht mehrmals mit Verzögerung für wichtige Updates
|
|
||||||
if (status || abs(progress - lastSentProgress) >= 10 || progress == 100) {
|
|
||||||
for (int i = 0; i < 2; i++) {
|
|
||||||
ws.textAll(progressMsg);
|
|
||||||
delay(100); // Längerer Delay zwischen Nachrichten
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ws.textAll(progressMsg);
|
|
||||||
delay(50);
|
|
||||||
}
|
|
||||||
|
|
||||||
lastSentProgress = progress;
|
|
||||||
}
|
|
||||||
|
|
||||||
void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) {
|
void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) {
|
||||||
if (type == WS_EVT_CONNECT) {
|
if (type == WS_EVT_CONNECT) {
|
||||||
Serial.println("Neuer Client verbunden!");
|
Serial.println("Neuer Client verbunden!");
|
||||||
@@ -76,10 +28,6 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
|
|||||||
sendWriteResult(client, 3);
|
sendWriteResult(client, 3);
|
||||||
} else if (type == WS_EVT_DISCONNECT) {
|
} else if (type == WS_EVT_DISCONNECT) {
|
||||||
Serial.println("Client getrennt.");
|
Serial.println("Client getrennt.");
|
||||||
} else if (type == WS_EVT_ERROR) {
|
|
||||||
Serial.printf("WebSocket Client #%u error(%u): %s\n", client->id(), *((uint16_t*)arg), (char*)data);
|
|
||||||
} else if (type == WS_EVT_PONG) {
|
|
||||||
Serial.printf("WebSocket Client #%u pong\n", client->id());
|
|
||||||
} else if (type == WS_EVT_DATA) {
|
} else if (type == WS_EVT_DATA) {
|
||||||
String message = String((char*)data);
|
String message = String((char*)data);
|
||||||
JsonDocument doc;
|
JsonDocument doc;
|
||||||
@@ -96,7 +44,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
|
|||||||
}
|
}
|
||||||
|
|
||||||
else if (doc["type"] == "writeNfcTag") {
|
else if (doc["type"] == "writeNfcTag") {
|
||||||
if (doc["payload"].is<String>()) {
|
if (doc.containsKey("payload")) {
|
||||||
// Versuche NFC-Daten zu schreiben
|
// Versuche NFC-Daten zu schreiben
|
||||||
String payloadString;
|
String payloadString;
|
||||||
serializeJson(doc["payload"], payloadString);
|
serializeJson(doc["payload"], payloadString);
|
||||||
@@ -207,117 +155,7 @@ void sendAmsData(AsyncWebSocketClient *client) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void handleUpdate(AsyncWebServer &server) {
|
|
||||||
AsyncCallbackWebHandler* updateHandler = new AsyncCallbackWebHandler();
|
|
||||||
updateHandler->setUri("/update");
|
|
||||||
updateHandler->setMethod(HTTP_POST);
|
|
||||||
|
|
||||||
updateHandler->onUpload([](AsyncWebServerRequest *request, String filename,
|
|
||||||
size_t index, uint8_t *data, size_t len, bool final) {
|
|
||||||
if (!index) {
|
|
||||||
updateTotalSize = request->contentLength();
|
|
||||||
updateWritten = 0;
|
|
||||||
isSpiffsUpdate = (filename.indexOf("website") > -1);
|
|
||||||
|
|
||||||
if (isSpiffsUpdate) {
|
|
||||||
// Backup vor dem Update
|
|
||||||
sendUpdateProgress(0, "backup", "Backing up configurations...");
|
|
||||||
delay(200);
|
|
||||||
backupJsonConfigs();
|
|
||||||
delay(200);
|
|
||||||
|
|
||||||
const esp_partition_t *partition = esp_partition_find_first(ESP_PARTITION_TYPE_DATA, ESP_PARTITION_SUBTYPE_DATA_SPIFFS, NULL);
|
|
||||||
if (!partition || !Update.begin(partition->size, U_SPIFFS)) {
|
|
||||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
sendUpdateProgress(5, "starting", "Starting SPIFFS update...");
|
|
||||||
delay(200);
|
|
||||||
} else {
|
|
||||||
if (!Update.begin(updateTotalSize)) {
|
|
||||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
sendUpdateProgress(0, "starting", "Starting firmware update...");
|
|
||||||
delay(200);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (len) {
|
|
||||||
if (Update.write(data, len) != len) {
|
|
||||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Write failed\"}");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
updateWritten += len;
|
|
||||||
int currentProgress;
|
|
||||||
|
|
||||||
// Berechne den Fortschritt basierend auf dem Update-Typ
|
|
||||||
if (isSpiffsUpdate) {
|
|
||||||
// SPIFFS: 5-75% für Upload
|
|
||||||
currentProgress = 5 + (updateWritten * 100) / updateTotalSize;
|
|
||||||
} else {
|
|
||||||
// Firmware: 0-100% für Upload
|
|
||||||
currentProgress = 1 + (updateWritten * 100) / updateTotalSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
static int lastProgress = -1;
|
|
||||||
if (currentProgress != lastProgress && (currentProgress % 10 == 0 || final)) {
|
|
||||||
sendUpdateProgress(currentProgress, "uploading");
|
|
||||||
oledShowMessage("Update: " + String(currentProgress) + "%");
|
|
||||||
delay(50);
|
|
||||||
lastProgress = currentProgress;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (final) {
|
|
||||||
if (Update.end(true)) {
|
|
||||||
if (isSpiffsUpdate) {
|
|
||||||
restoreJsonConfigs();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update finalization failed\"}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
updateHandler->onRequest([](AsyncWebServerRequest *request) {
|
|
||||||
if (Update.hasError()) {
|
|
||||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update failed\"}");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Erste 100% Nachricht
|
|
||||||
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
|
|
||||||
delay(2000); // Längerer Delay für die erste Nachricht
|
|
||||||
|
|
||||||
AsyncWebServerResponse *response = request->beginResponse(200, "application/json",
|
|
||||||
"{\"success\":true,\"message\":\"Update successful! Restarting device...\"}");
|
|
||||||
response->addHeader("Connection", "close");
|
|
||||||
request->send(response);
|
|
||||||
|
|
||||||
// Zweite 100% Nachricht zur Sicherheit
|
|
||||||
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
|
|
||||||
delay(3000); // Noch längerer Delay vor dem Neustart
|
|
||||||
|
|
||||||
ESP.restart();
|
|
||||||
});
|
|
||||||
|
|
||||||
server.addHandler(updateHandler);
|
|
||||||
}
|
|
||||||
|
|
||||||
void setupWebserver(AsyncWebServer &server) {
|
void setupWebserver(AsyncWebServer &server) {
|
||||||
// Deaktiviere alle Debug-Ausgaben
|
|
||||||
Serial.setDebugOutput(false);
|
|
||||||
|
|
||||||
// WebSocket-Optimierungen
|
|
||||||
ws.onEvent(onWsEvent);
|
|
||||||
ws.enable(true);
|
|
||||||
|
|
||||||
// Konfiguriere Server für große Uploads
|
|
||||||
server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){});
|
|
||||||
server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){});
|
|
||||||
|
|
||||||
// Lade die Spoolman-URL beim Booten
|
// Lade die Spoolman-URL beim Booten
|
||||||
spoolmanUrl = loadSpoolmanUrl();
|
spoolmanUrl = loadSpoolmanUrl();
|
||||||
Serial.print("Geladene Spoolman-URL: ");
|
Serial.print("Geladene Spoolman-URL: ");
|
||||||
@@ -384,7 +222,7 @@ void setupWebserver(AsyncWebServer &server) {
|
|||||||
html.replace("{{spoolmanUrl}}", spoolmanUrl);
|
html.replace("{{spoolmanUrl}}", spoolmanUrl);
|
||||||
|
|
||||||
JsonDocument doc;
|
JsonDocument doc;
|
||||||
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
|
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
|
||||||
String bambuIp = doc["bambu_ip"].as<String>();
|
String bambuIp = doc["bambu_ip"].as<String>();
|
||||||
String bambuSerial = doc["bambu_serialnr"].as<String>();
|
String bambuSerial = doc["bambu_serialnr"].as<String>();
|
||||||
String bambuCode = doc["bambu_accesscode"].as<String>();
|
String bambuCode = doc["bambu_accesscode"].as<String>();
|
||||||
@@ -500,22 +338,30 @@ void setupWebserver(AsyncWebServer &server) {
|
|||||||
Serial.println("RFID.js gesendet");
|
Serial.println("RFID.js gesendet");
|
||||||
});
|
});
|
||||||
|
|
||||||
// Vereinfachter Update-Handler
|
// Route for Firmware Update
|
||||||
server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) {
|
server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) {
|
||||||
|
// During OTA, reduce memory usage
|
||||||
|
ws.enable(false); // Temporarily disable WebSocket
|
||||||
|
ws.cleanupClients();
|
||||||
|
|
||||||
|
Serial.println("Request for /upgrade received");
|
||||||
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/upgrade.html.gz", "text/html");
|
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/upgrade.html.gz", "text/html");
|
||||||
response->addHeader("Content-Encoding", "gzip");
|
response->addHeader("Content-Encoding", "gzip");
|
||||||
response->addHeader("Cache-Control", "no-store");
|
response->addHeader("Cache-Control", CACHE_CONTROL);
|
||||||
request->send(response);
|
request->send(response);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update-Handler registrieren
|
server.on("/update", HTTP_POST,
|
||||||
handleUpdate(server);
|
[](AsyncWebServerRequest *request) {
|
||||||
|
// The response will be sent from handleOTAUpload when the upload is complete
|
||||||
server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){
|
},
|
||||||
String fm_version = VERSION;
|
[](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) {
|
||||||
String jsonResponse = "{\"version\": \""+ fm_version +"\"}";
|
// Free memory before handling update
|
||||||
request->send(200, "application/json", jsonResponse);
|
ws.enable(false);
|
||||||
});
|
ws.cleanupClients();
|
||||||
|
handleOTAUpload(request, filename, index, data, len, final);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
// Fehlerbehandlung für nicht gefundene Seiten
|
// Fehlerbehandlung für nicht gefundene Seiten
|
||||||
server.onNotFound([](AsyncWebServerRequest *request){
|
server.onNotFound([](AsyncWebServerRequest *request){
|
||||||
@@ -533,50 +379,3 @@ void setupWebserver(AsyncWebServer &server) {
|
|||||||
server.begin();
|
server.begin();
|
||||||
Serial.println("Webserver gestartet");
|
Serial.println("Webserver gestartet");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void backupJsonConfigs() {
|
|
||||||
// Bambu Credentials backup
|
|
||||||
if (SPIFFS.exists("/bambu_credentials.json")) {
|
|
||||||
File file = SPIFFS.open("/bambu_credentials.json", "r");
|
|
||||||
if (file) {
|
|
||||||
bambuCredentialsBackup = file.readString();
|
|
||||||
file.close();
|
|
||||||
Serial.println("Bambu credentials backed up");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spoolman URL backup
|
|
||||||
if (SPIFFS.exists("/spoolman_url.json")) {
|
|
||||||
File file = SPIFFS.open("/spoolman_url.json", "r");
|
|
||||||
if (file) {
|
|
||||||
spoolmanUrlBackup = file.readString();
|
|
||||||
file.close();
|
|
||||||
Serial.println("Spoolman URL backed up");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void restoreJsonConfigs() {
|
|
||||||
// Restore Bambu credentials
|
|
||||||
if (bambuCredentialsBackup.length() > 0) {
|
|
||||||
File file = SPIFFS.open("/bambu_credentials.json", "w");
|
|
||||||
if (file) {
|
|
||||||
file.print(bambuCredentialsBackup);
|
|
||||||
file.close();
|
|
||||||
Serial.println("Bambu credentials restored");
|
|
||||||
}
|
|
||||||
bambuCredentialsBackup = ""; // Clear backup
|
|
||||||
}
|
|
||||||
|
|
||||||
// Restore Spoolman URL
|
|
||||||
if (spoolmanUrlBackup.length() > 0) {
|
|
||||||
File file = SPIFFS.open("/spoolman_url.json", "w");
|
|
||||||
if (file) {
|
|
||||||
file.print(spoolmanUrlBackup);
|
|
||||||
file.close();
|
|
||||||
Serial.println("Spoolman URL restored");
|
|
||||||
}
|
|
||||||
spoolmanUrlBackup = ""; // Clear backup
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@@ -6,8 +6,8 @@
|
|||||||
#include "commonFS.h"
|
#include "commonFS.h"
|
||||||
#include "api.h"
|
#include "api.h"
|
||||||
#include <ArduinoJson.h>
|
#include <ArduinoJson.h>
|
||||||
#include <Update.h>
|
#include <ESPAsyncWebServer.h>
|
||||||
#include <AsyncTCP.h>
|
#include <AsyncWebSocket.h>
|
||||||
#include "bambu.h"
|
#include "bambu.h"
|
||||||
#include "nfc.h"
|
#include "nfc.h"
|
||||||
#include "scale.h"
|
#include "scale.h"
|
||||||
@@ -17,20 +17,10 @@ extern String spoolmanUrl;
|
|||||||
extern AsyncWebServer server;
|
extern AsyncWebServer server;
|
||||||
extern AsyncWebSocket ws;
|
extern AsyncWebSocket ws;
|
||||||
|
|
||||||
// Server-Initialisierung und Handler
|
|
||||||
void initWebServer();
|
|
||||||
void handleUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
|
|
||||||
void handleBody(AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total);
|
|
||||||
void setupWebserver(AsyncWebServer &server);
|
void setupWebserver(AsyncWebServer &server);
|
||||||
|
|
||||||
// WebSocket-Funktionen
|
|
||||||
void sendAmsData(AsyncWebSocketClient *client);
|
void sendAmsData(AsyncWebSocketClient *client);
|
||||||
void sendNfcData(AsyncWebSocketClient *client);
|
void sendNfcData(AsyncWebSocketClient *client);
|
||||||
void foundNfcTag(AsyncWebSocketClient *client, uint8_t success);
|
void foundNfcTag(AsyncWebSocketClient *client, uint8_t success);
|
||||||
void sendWriteResult(AsyncWebSocketClient *client, uint8_t success);
|
void sendWriteResult(AsyncWebSocketClient *client, uint8_t success);
|
||||||
|
|
||||||
// Upgrade-Funktionen
|
|
||||||
void backupJsonConfigs();
|
|
||||||
void restoreJsonConfigs();
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
12
src/wlan.cpp
12
src/wlan.cpp
@@ -10,19 +10,9 @@ WiFiManager wm;
|
|||||||
bool wm_nonblocking = false;
|
bool wm_nonblocking = false;
|
||||||
|
|
||||||
void initWiFi() {
|
void initWiFi() {
|
||||||
// Optimierte WiFi-Einstellungen
|
|
||||||
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
|
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
|
||||||
WiFi.setSleep(false); // disable sleep mode
|
|
||||||
esp_wifi_set_ps(WIFI_PS_NONE);
|
|
||||||
|
|
||||||
// Maximale Sendeleistung
|
esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm
|
||||||
WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power
|
|
||||||
|
|
||||||
// Optimiere TCP/IP Stack
|
|
||||||
esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N);
|
|
||||||
|
|
||||||
// Aktiviere WiFi-Roaming für bessere Stabilität
|
|
||||||
esp_wifi_set_rssi_threshold(-80);
|
|
||||||
|
|
||||||
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
|
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
|
||||||
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen
|
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen
|
||||||
|
Reference in New Issue
Block a user