Compare commits
2 Commits
v1.3.90
...
920e4cfb69
Author | SHA1 | Date | |
---|---|---|---|
920e4cfb69 | |||
e646edd6f4 |
208
.github/workflows/gitea-release.yml
vendored
208
.github/workflows/gitea-release.yml
vendored
@@ -1,208 +0,0 @@
|
||||
name: Gitea Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITEA_TOKEN:
|
||||
description: 'Token für Gitea API-Zugriff'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
version:
|
||||
description: 'The version that was released'
|
||||
value: ${{ jobs.create-release.outputs.version }}
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.get_version.outputs.VERSION }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade platformio esptool
|
||||
|
||||
- name: Install xxd
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install xxd
|
||||
|
||||
- name: Build Firmware
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
||||
|
||||
# Build firmware and SPIFFS
|
||||
echo "Building firmware and SPIFFS..."
|
||||
pio run -e esp32dev
|
||||
pio run -t buildfs
|
||||
|
||||
# Copy firmware binary
|
||||
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
|
||||
|
||||
# Create SPIFFS binary - direct copy without header
|
||||
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
|
||||
|
||||
# Create full binary
|
||||
(cd .pio/build/esp32dev &&
|
||||
esptool.py --chip esp32 merge_bin \
|
||||
--fill-flash-size 4MB \
|
||||
--flash_mode dio \
|
||||
--flash_freq 40m \
|
||||
--flash_size 4MB \
|
||||
-o filaman_full_${VERSION}.bin \
|
||||
0x1000 bootloader.bin \
|
||||
0x8000 partitions.bin \
|
||||
0x10000 firmware.bin \
|
||||
0x3D0000 spiffs.bin)
|
||||
|
||||
# Verify file sizes
|
||||
echo "File sizes:"
|
||||
(cd .pio/build/esp32dev && ls -lh *.bin)
|
||||
|
||||
- name: Get version from platformio.ini
|
||||
id: get_version
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: release_notes
|
||||
run: |
|
||||
# Get the latest tag
|
||||
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
|
||||
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get all commits since last release with commit hash and author
|
||||
echo "### Added" >> $GITHUB_OUTPUT
|
||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Changed" >> $GITHUB_OUTPUT
|
||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# First release
|
||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "Initial Release" >> $GITHUB_OUTPUT
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
# Add all commits for initial release
|
||||
echo "### Added" >> $GITHUB_OUTPUT
|
||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Changed" >> $GITHUB_OUTPUT
|
||||
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Determine Gitea URL
|
||||
id: gitea_url
|
||||
run: |
|
||||
echo "Debug Environment:"
|
||||
echo "GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-not set}"
|
||||
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
|
||||
echo "GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-not set}"
|
||||
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
|
||||
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
|
||||
|
||||
# Set API URL based on environment
|
||||
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
|
||||
GITEA_API_URL="${GITHUB_SERVER_URL}"
|
||||
GITEA_REPO=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f2)
|
||||
GITEA_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1)
|
||||
else
|
||||
echo "Error: This workflow is only for Gitea"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "GITEA_API_URL=${GITEA_API_URL}" >> $GITHUB_OUTPUT
|
||||
echo "GITEA_REPO=${GITEA_REPO}" >> $GITHUB_OUTPUT
|
||||
echo "GITEA_OWNER=${GITEA_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Gitea Release
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
GITEA_API_URL: ${{ steps.gitea_url.outputs.GITEA_API_URL }}
|
||||
GITEA_REPO: ${{ steps.gitea_url.outputs.GITEA_REPO }}
|
||||
GITEA_OWNER: ${{ steps.gitea_url.outputs.GITEA_OWNER }}
|
||||
run: |
|
||||
# Debug Token (nur Länge ausgeben für Sicherheit)
|
||||
echo "Debug: Token length: ${#GITEA_TOKEN}"
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo "Error: GITEA_TOKEN is empty"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION=${{ steps.get_version.outputs.VERSION }}
|
||||
cd .pio/build/esp32dev
|
||||
|
||||
# Debug-Ausgaben
|
||||
echo "Debug: API URL: ${GITEA_API_URL}"
|
||||
echo "Debug: Repository: ${GITEA_OWNER}/${GITEA_REPO}"
|
||||
|
||||
# Erstelle zuerst den Release ohne Dateien
|
||||
echo "Debug: Creating release..."
|
||||
RELEASE_DATA="{\"tag_name\":\"v${VERSION}\",\"name\":\"v${VERSION}\",\"body\":\"${{ steps.release_notes.outputs.CHANGES }}\"}"
|
||||
|
||||
RELEASE_RESPONSE=$(curl -s -w "\n%{http_code}" \
|
||||
-X POST \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "${RELEASE_DATA}" \
|
||||
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases")
|
||||
|
||||
RELEASE_STATUS=$(echo "$RELEASE_RESPONSE" | tail -n1)
|
||||
RELEASE_BODY=$(echo "$RELEASE_RESPONSE" | head -n -1)
|
||||
|
||||
if [ "$RELEASE_STATUS" != "201" ]; then
|
||||
echo "Error: Failed to create release"
|
||||
echo "Response: $RELEASE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extrahiere die Release-ID aus der Antwort
|
||||
RELEASE_ID=$(echo "$RELEASE_BODY" | grep -o '"id":[0-9]*' | cut -d':' -f2)
|
||||
|
||||
# Lade die Dateien einzeln hoch
|
||||
for file in upgrade_filaman_firmware_v${VERSION}.bin upgrade_filaman_website_v${VERSION}.bin filaman_full_${VERSION}.bin; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Debug: Uploading $file..."
|
||||
UPLOAD_RESPONSE=$(curl -s -w "\n%{http_code}" \
|
||||
-X POST \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/octet-stream" \
|
||||
--data-binary @"$file" \
|
||||
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases/${RELEASE_ID}/assets?name=${file}")
|
||||
|
||||
UPLOAD_STATUS=$(echo "$UPLOAD_RESPONSE" | tail -n1)
|
||||
if [ "$UPLOAD_STATUS" != "201" ]; then
|
||||
echo "Warning: Failed to upload $file"
|
||||
echo "Response: $(echo "$UPLOAD_RESPONSE" | head -n -1)"
|
||||
else
|
||||
echo "Successfully uploaded $file"
|
||||
fi
|
||||
fi
|
||||
done
|
185
.github/workflows/github-release.yml
vendored
185
.github/workflows/github-release.yml
vendored
@@ -1,185 +0,0 @@
|
||||
name: GitHub Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
RELEASE_TOKEN:
|
||||
description: 'GitHub token for release creation'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade platformio esptool
|
||||
|
||||
- name: Install xxd
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install xxd
|
||||
|
||||
- name: Build Firmware
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
||||
|
||||
# Always build firmware and SPIFFS
|
||||
echo "Building firmware and SPIFFS..."
|
||||
pio run -e esp32dev
|
||||
pio run -t buildfs
|
||||
|
||||
# Copy firmware binary
|
||||
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
|
||||
|
||||
# Create SPIFFS binary - direct copy without header
|
||||
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
|
||||
|
||||
# Create full binary (always)
|
||||
(cd .pio/build/esp32dev &&
|
||||
esptool.py --chip esp32 merge_bin \
|
||||
--fill-flash-size 4MB \
|
||||
--flash_mode dio \
|
||||
--flash_freq 40m \
|
||||
--flash_size 4MB \
|
||||
-o filaman_full_${VERSION}.bin \
|
||||
0x1000 bootloader.bin \
|
||||
0x8000 partitions.bin \
|
||||
0x10000 firmware.bin \
|
||||
0x3D0000 spiffs.bin)
|
||||
|
||||
# Verify file sizes
|
||||
echo "File sizes:"
|
||||
(cd .pio/build/esp32dev && ls -lh *.bin)
|
||||
|
||||
- name: Get version from platformio.ini
|
||||
id: get_version
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: release_notes
|
||||
run: |
|
||||
# Get the latest tag
|
||||
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
|
||||
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get all commits since last release with commit hash and author
|
||||
echo "### Added" >> $GITHUB_OUTPUT
|
||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Changed" >> $GITHUB_OUTPUT
|
||||
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# First release
|
||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "Initial Release" >> $GITHUB_OUTPUT
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
# Add all commits for initial release
|
||||
echo "### Added" >> $GITHUB_OUTPUT
|
||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Fixed" >> $GITHUB_OUTPUT
|
||||
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### Changed" >> $GITHUB_OUTPUT
|
||||
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
run: |
|
||||
VERSION=${{ steps.get_version.outputs.VERSION }}
|
||||
cd .pio/build/esp32dev
|
||||
|
||||
# Create release with available files
|
||||
FILES_TO_UPLOAD=""
|
||||
|
||||
# Always add firmware
|
||||
if [ -f "upgrade_filaman_firmware_v${VERSION}.bin" ]; then
|
||||
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_firmware_v${VERSION}.bin"
|
||||
fi
|
||||
|
||||
# Add SPIFFS and full binary only if they exist
|
||||
if [ -f "upgrade_filaman_website_v${VERSION}.bin" ]; then
|
||||
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_website_v${VERSION}.bin"
|
||||
fi
|
||||
|
||||
if [ -f "filaman_full_${VERSION}.bin" ]; then
|
||||
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_full_${VERSION}.bin"
|
||||
fi
|
||||
|
||||
# Create release with available files
|
||||
if [ -n "$FILES_TO_UPLOAD" ]; then
|
||||
gh release create "v${VERSION}" \
|
||||
--title "Release ${VERSION}" \
|
||||
--notes "${{ steps.release_notes.outputs.CHANGES }}" \
|
||||
$FILES_TO_UPLOAD
|
||||
else
|
||||
echo "Error: No files found to upload"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Install lftp
|
||||
run: sudo apt-get install -y lftp
|
||||
|
||||
- name: Upload Firmware via FTP
|
||||
if: success()
|
||||
env:
|
||||
FTP_PASSWORD: ${{ vars.FTP_PASSWORD }}
|
||||
FTP_USER: ${{ vars.FTP_USER }}
|
||||
FTP_HOST: ${{ vars.FTP_HOST }}
|
||||
VERSION: ${{ steps.get_version.outputs.VERSION }}
|
||||
run: |
|
||||
echo "Environment variables:"
|
||||
env | grep -E '^FTP_' | while read -r line; do
|
||||
var_name=$(echo "$line" | cut -d= -f1)
|
||||
var_value=$(echo "$line" | cut -d= -f2-)
|
||||
echo "$var_name is $(if [ -n "$var_value" ]; then echo "set"; else echo "empty"; fi)"
|
||||
done
|
||||
|
||||
cd .pio/build/esp32dev
|
||||
if [ -n "$FTP_USER" ] && [ -n "$FTP_PASSWORD" ] && [ -n "$FTP_HOST" ]; then
|
||||
echo "All FTP credentials are present, attempting upload..."
|
||||
lftp -c "set ssl:verify-certificate no; \
|
||||
set ftp:ssl-protect-data true; \
|
||||
set ftp:ssl-force true; \
|
||||
set ssl:check-hostname false; \
|
||||
set ftp:ssl-auth TLS; \
|
||||
open -u $FTP_USER,$FTP_PASSWORD $FTP_HOST; \
|
||||
put -O / filaman_full_${VERSION}.bin -o filaman_full.bin"
|
||||
else
|
||||
echo "Error: Some FTP credentials are missing"
|
||||
exit 1
|
||||
fi
|
103
.github/workflows/providers/gitea-release.yml
vendored
Normal file
103
.github/workflows/providers/gitea-release.yml
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
name: Gitea Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write # Required for creating releases
|
||||
issues: read # Required for reading changelog
|
||||
pull-requests: read # Required for reading changelog
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade platformio
|
||||
|
||||
- name: Build Firmware
|
||||
run: |
|
||||
pio run -t buildfs # Build SPIFFS
|
||||
pio run # Build firmware
|
||||
|
||||
- name: Install esptool
|
||||
run: |
|
||||
pip install esptool
|
||||
|
||||
- name: Merge firmware and SPIFFS
|
||||
run: |
|
||||
esptool.py --chip esp32 merge_bin \
|
||||
--flash_mode dio \
|
||||
--flash_freq 40m \
|
||||
--flash_size 4MB \
|
||||
-o .pio/build/esp32dev/filaman_full.bin \
|
||||
0x1000 .pio/build/esp32dev/bootloader.bin \
|
||||
0x8000 .pio/build/esp32dev/partitions.bin \
|
||||
0x10000 .pio/build/esp32dev/firmware.bin \
|
||||
0x290000 .pio/build/esp32dev/spiffs.bin
|
||||
|
||||
- name: Prepare OTA firmware
|
||||
run: |
|
||||
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
|
||||
|
||||
- name: Get version from tag
|
||||
id: get_version
|
||||
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Read CHANGELOG.md
|
||||
id: changelog
|
||||
run: |
|
||||
CHANGELOG=$(awk "/## \\[${{ steps.get_version.outputs.VERSION }}\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
|
||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGELOG" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Release
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
GITEA_API_URL: ${{ secrets.GITEA_API_URL }}
|
||||
GITEA_REPOSITORY: ${{ secrets.GITEA_REPOSITORY }}
|
||||
run: |
|
||||
# Create release using Gitea API
|
||||
RESPONSE=$(curl -X POST \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "accept: application/json" \
|
||||
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases" \
|
||||
-d '{
|
||||
"tag_name": "${{ github.ref_name }}",
|
||||
"name": "Release ${{ steps.get_version.outputs.VERSION }}",
|
||||
"body": "${{ steps.changelog.outputs.CHANGES }}",
|
||||
"draft": false,
|
||||
"prerelease": false
|
||||
}')
|
||||
|
||||
# Extract release ID from response
|
||||
RELEASE_ID=$(echo $RESPONSE | jq -r .id)
|
||||
|
||||
# Upload full firmware
|
||||
curl -X POST \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/octet-stream" \
|
||||
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases/${RELEASE_ID}/assets?name=filaman_full.bin" \
|
||||
--data-binary @.pio/build/esp32dev/filaman_full.bin
|
||||
|
||||
# Upload OTA firmware
|
||||
curl -X POST \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
-H "Content-Type: application/octet-stream" \
|
||||
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases/${RELEASE_ID}/assets?name=filaman_ota.bin" \
|
||||
--data-binary @.pio/build/esp32dev/filaman_ota.bin
|
85
.github/workflows/providers/github-release.yml
vendored
Normal file
85
.github/workflows/providers/github-release.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: GitHub Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write # Required for creating releases
|
||||
issues: read # Required for reading changelog
|
||||
pull-requests: read # Required for reading changelog
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # Required for creating releases at job level
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade platformio
|
||||
|
||||
- name: Build Firmware
|
||||
run: |
|
||||
pio run -t buildfs # Build SPIFFS
|
||||
pio run # Build firmware
|
||||
|
||||
- name: Install esptool
|
||||
run: |
|
||||
pip install esptool
|
||||
|
||||
- name: Merge firmware and SPIFFS
|
||||
run: |
|
||||
esptool.py --chip esp32 merge_bin \
|
||||
--flash_mode dio \
|
||||
--flash_freq 40m \
|
||||
--flash_size 4MB \
|
||||
-o .pio/build/esp32dev/filaman_full.bin \
|
||||
0x1000 .pio/build/esp32dev/bootloader.bin \
|
||||
0x8000 .pio/build/esp32dev/partitions.bin \
|
||||
0x10000 .pio/build/esp32dev/firmware.bin \
|
||||
0x290000 .pio/build/esp32dev/spiffs.bin
|
||||
|
||||
- name: Prepare OTA firmware
|
||||
run: |
|
||||
# Use PlatformIO to create a proper OTA image
|
||||
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
|
||||
|
||||
- name: Get version from tag
|
||||
id: get_version
|
||||
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Read CHANGELOG.md
|
||||
id: changelog
|
||||
run: |
|
||||
CHANGELOG=$(awk "/## \\[${{ steps.get_version.outputs.VERSION }}\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
|
||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGELOG" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install and Configure GitHub CLI
|
||||
run: |
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||
&& sudo apt update \
|
||||
&& sudo apt install gh -y
|
||||
|
||||
- name: Create Release with GitHub CLI
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release create "${{ github.ref_name }}" \
|
||||
--title "Release ${{ steps.get_version.outputs.VERSION }}" \
|
||||
--notes "${{ steps.changelog.outputs.CHANGES }}" \
|
||||
".pio/build/esp32dev/filaman_full.bin#filaman_full.bin" \
|
||||
".pio/build/esp32dev/filaman_ota.bin#filaman_ota.bin"
|
45
.github/workflows/release.yml
vendored
45
.github/workflows/release.yml
vendored
@@ -1,41 +1,32 @@
|
||||
name: Release Workflow
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
detect-provider:
|
||||
detect-and-run:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
provider: ${{ steps.provider.outputs.provider }}
|
||||
steps:
|
||||
- name: Determine CI Provider
|
||||
id: provider
|
||||
shell: bash
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Determine hosting platform
|
||||
id: platform
|
||||
run: |
|
||||
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
|
||||
echo "provider=gitea" >> "$GITHUB_OUTPUT"
|
||||
if [[ "$GITHUB_SERVER_URL" == "https://github.com" ]]; then
|
||||
echo "platform=github" >> $GITHUB_OUTPUT
|
||||
elif [[ "$CI_SERVER_URL" == *"gitlab"* ]]; then
|
||||
echo "platform=gitlab" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "provider=github" >> "$GITHUB_OUTPUT"
|
||||
echo "platform=gitea" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
github-release:
|
||||
needs: detect-provider
|
||||
permissions:
|
||||
contents: write
|
||||
if: needs.detect-provider.outputs.provider == 'github'
|
||||
uses: ./.github/workflows/github-release.yml
|
||||
secrets:
|
||||
RELEASE_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Run GitHub Release
|
||||
if: steps.platform.outputs.platform == 'github'
|
||||
uses: ./.github/workflows/providers/github-release.yml
|
||||
|
||||
gitea-release:
|
||||
needs: detect-provider
|
||||
if: needs.detect-provider.outputs.provider == 'gitea'
|
||||
uses: ./.github/workflows/gitea-release.yml
|
||||
secrets:
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
- name: Run Gitea Release
|
||||
if: steps.platform.outputs.platform == 'gitea'
|
||||
uses: ./.github/workflows/providers/gitea-release.yml
|
1246
CHANGELOG.md
1246
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
34
README.de.md
34
README.de.md
@@ -53,14 +53,14 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
|
||||
### Komponenten
|
||||
- **ESP32 Entwicklungsboard:** Jede ESP32-Variante.
|
||||
[Amazon Link](https://amzn.eu/d/aXThslf)
|
||||
- **HX711 5kg Wägezellen-Verstärker:** Für Gewichtsmessung.
|
||||
[Amazon Link](https://amzn.eu/d/06A0DLb)
|
||||
- **OLED 0.96 Zoll I2C weiß/gelb Display:** 128x64 SSD1306.
|
||||
[Amazon Link](https://amzn.eu/d/0AuBp2c)
|
||||
- **PN532 NFC NXP RFID-Modul V3:** Für NFC-Tag-Operationen.
|
||||
[Amazon Link](https://amzn.eu/d/jfIuQXb)
|
||||
- **NFC Tags Ntag215:** RFID Tag
|
||||
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
|
||||
- **HX711 Wägezellen-Verstärker:** Für Gewichtsmessung.
|
||||
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
|
||||
- **OLED Display:** 128x64 SSD1306.
|
||||
[Amazon Link](https://amzn.eu/d/dozAYDU)
|
||||
- **PN532 NFC Modul:** Für NFC-Tag-Operationen.
|
||||
[Amazon Link](https://amzn.eu/d/8205DDh)
|
||||
- **NFC-Tag:** NTAG215
|
||||
[Amazon Link](https://amzn.eu/d/fywy4c4)
|
||||
|
||||
### Pin-Konfiguration
|
||||
| Komponente | ESP32 Pin |
|
||||
@@ -76,10 +76,6 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
|
||||
| PN532 MISO | 12 |
|
||||
| PN532 CS/SS | 15 |
|
||||
|
||||
Ich nutze die HSPI default PINs + IRQ und RESET am PN532
|
||||
|
||||

|
||||
|
||||
## Software-Abhängigkeiten
|
||||
|
||||
### ESP32-Bibliotheken
|
||||
@@ -106,20 +102,6 @@ Ich nutze die HSPI default PINs + IRQ und RESET am PN532
|
||||
- Verbindungskabel
|
||||
|
||||
### Schritt-für-Schritt Installation
|
||||
## Einfache Installation
|
||||
1. **Gehe auf https://www.filaman.app/installer.html
|
||||
|
||||
2. **Stecke dein ESP an den Rechner und klicke Connect
|
||||
|
||||
3. **Wähle dein Device Port und klicke Intall
|
||||
|
||||
4. **Ersteinrichtung:**
|
||||
- Mit dem "FilaMan" WLAN-Zugangspunkt verbinden.
|
||||
- WLAN-Einstellungen über das Konfigurationsportal vornehmen.
|
||||
- Weboberfläche unter `http://filaman.local` oder der IP-Adresse aufrufen.
|
||||
|
||||
## Compile by yourself
|
||||
|
||||
1. **Repository klonen:**
|
||||
```bash
|
||||
git clone https://github.com/ManuelW77/Filaman.git
|
||||
|
34
README.md
34
README.md
@@ -56,14 +56,14 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
|
||||
### Components
|
||||
- **ESP32 Development Board:** Any ESP32 variant.
|
||||
[Amazon Link](https://amzn.eu/d/aXThslf)
|
||||
- **HX711 5kg Load Cell Amplifier:** For weight measurement.
|
||||
[Amazon Link](https://amzn.eu/d/06A0DLb)
|
||||
- **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
|
||||
[Amazon Link](https://amzn.eu/d/0AuBp2c)
|
||||
- **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
|
||||
[Amazon Link](https://amzn.eu/d/jfIuQXb)
|
||||
- **NFC Tags Ntag215:** RFID Tag
|
||||
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
|
||||
- **HX711 Load Cell Amplifier:** For weight measurement.
|
||||
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
|
||||
- **OLED Display:** 128x64 SSD1306.
|
||||
[Amazon Link](https://amzn.eu/d/dozAYDU)
|
||||
- **PN532 NFC Module:** For NFC tag operations.
|
||||
[Amazon Link](https://amzn.eu/d/8205DDh)
|
||||
- **NFC-Tag:** NTAG215
|
||||
[Amazon Link](https://amzn.eu/d/fywy4c4)
|
||||
|
||||
|
||||
### Pin Configuration
|
||||
@@ -80,10 +80,6 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
|
||||
| PN532 MISO | 12 |
|
||||
| PN532 CS/SS | 15 |
|
||||
|
||||
I use the HSPI default PINs + IRQ and RESET at the PN532
|
||||
|
||||

|
||||
|
||||
## Software Dependencies
|
||||
|
||||
### ESP32 Libraries
|
||||
@@ -110,19 +106,6 @@ I use the HSPI default PINs + IRQ and RESET at the PN532
|
||||
- Connecting wires
|
||||
|
||||
### Step-by-Step Installation
|
||||
## Easy Installation
|
||||
1. **Go to https://www.filaman.app/installer.html
|
||||
|
||||
2. **Plug you device in and push Connect button
|
||||
|
||||
3. **Select your Device Port and push Intall
|
||||
|
||||
4. **Initial Setup:**
|
||||
- Connect to the "FilaMan" WiFi access point.
|
||||
- Configure WiFi settings through the captive portal.
|
||||
- Access the web interface at `http://filaman.local` or the IP address.
|
||||
|
||||
## Compile by yourself
|
||||
1. **Clone the Repository:**
|
||||
```bash
|
||||
git clone https://github.com/ManuelW77/Filaman.git
|
||||
@@ -141,6 +124,7 @@ I use the HSPI default PINs + IRQ and RESET at the PN532
|
||||
- Configure WiFi settings through the captive portal.
|
||||
- Access the web interface at `http://filaman.local` or the IP address.
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
### Relevant Links
|
||||
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
@@ -47,7 +36,7 @@
|
||||
|
||||
<!-- head -->
|
||||
|
||||
<div class="content">
|
||||
<div class="container">
|
||||
<h1>FilaMan</h1>
|
||||
<p>Filament Management Tool</p>
|
||||
<p>Your smart solution for <strong>Filament Management</strong> in 3D printing.</p>
|
||||
@@ -55,11 +44,10 @@
|
||||
<h2>About FilaMan</h2>
|
||||
<p>
|
||||
FilaMan is a tool designed to simplify filament spool management. It allows you to identify and weigh filament spools,
|
||||
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform.
|
||||
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform,
|
||||
and ensure compatibility with <a href="https://github.com/spuder/OpenSpool" target="_blank">OpenSpool</a> for Bambu printers.
|
||||
</p>
|
||||
|
||||
<p>Get more information at <a href="https://www.filaman.app" target="_blank">https://www.filaman.app</a> and <a href="https://github.com/ManuelW77/Filaman" target="_blank">https://github.com/ManuelW77/Filaman</a>.</p>
|
||||
|
||||
<div class="features">
|
||||
<div class="feature">
|
||||
<h3>Spool Identification</h3>
|
||||
@@ -74,6 +62,12 @@
|
||||
<p>Works with OpenSpool to recognize and activate spools on Bambu printers.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>Future Plans</h2>
|
||||
<p>
|
||||
We are working on expanding compatibility to support smaller NFC tags like NTag213
|
||||
and developing custom software to enhance the OpenSpool experience.
|
||||
</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
@@ -95,19 +84,12 @@
|
||||
|
||||
<div class="content">
|
||||
<h1>Spoolman API URL / Bambu Credentials</h1>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">Set URL/IP to your Spoolman-Instanz</h5>
|
||||
<label for="spoolmanUrl">Set URL/IP to your Spoolman-Instanz:</label>
|
||||
<input type="text" id="spoolmanUrl" placeholder="http://ip-or-url-of-your-spoolman-instanz:port">
|
||||
<button onclick="checkSpoolmanInstance()">Save Spoolman URL</button>
|
||||
<p id="statusMessage"></p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">Bambu Lab Printer Credentials</h5>
|
||||
<h2>Bambu Lab Printer Credentials</h2>
|
||||
<div class="bambu-settings">
|
||||
<div class="input-group">
|
||||
<label for="bambuIp">Bambu Drucker IP-Adresse:</label>
|
||||
@@ -125,7 +107,5 @@
|
||||
<p id="bambuStatusMessage"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
@@ -279,10 +279,9 @@ a:hover {
|
||||
|
||||
/* Karten-Stil für optische Trennung */
|
||||
.card {
|
||||
background: var(--primary-color);
|
||||
width: 500px;
|
||||
background: #f9f9f9;
|
||||
padding: 15px;
|
||||
margin: 20px auto;
|
||||
margin: 20px 0;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
@@ -960,6 +959,7 @@ input[type="submit"]:disabled,
|
||||
|
||||
/* Bambu Settings Erweiterung */
|
||||
.bambu-settings {
|
||||
background: white;
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
@@ -1051,10 +1051,9 @@ input[type="submit"]:disabled,
|
||||
}
|
||||
.update-form {
|
||||
background: var(--primary-color);
|
||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.05);
|
||||
border: var(--glass-border);
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||
margin: 0 auto;
|
||||
width: 400px;
|
||||
text-align: center;
|
||||
@@ -1065,7 +1064,7 @@ input[type="submit"]:disabled,
|
||||
padding: 8px;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
background-color: #4CAF50;
|
||||
background: white;
|
||||
}
|
||||
.update-form input[type="submit"] {
|
||||
background-color: #4CAF50;
|
||||
@@ -1087,66 +1086,10 @@ input[type="submit"]:disabled,
|
||||
.warning {
|
||||
background-color: var(--primary-color);
|
||||
border: 1px solid #ffe0b2;
|
||||
color: white;
|
||||
padding: 15px;
|
||||
margin: 20px auto;
|
||||
border-radius: 4px;
|
||||
max-width: 600px;
|
||||
text-align: center;
|
||||
color: #e65100;
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.update-options {
|
||||
display: flex;
|
||||
gap: 2rem;
|
||||
margin: 2rem 0;
|
||||
}
|
||||
.update-section {
|
||||
flex: 1;
|
||||
background: var(--background-green);
|
||||
padding: 1.5rem;
|
||||
border-radius: 8px;
|
||||
}
|
||||
.update-section h2 {
|
||||
margin-top: 0;
|
||||
color: #333;
|
||||
}
|
||||
.update-section p {
|
||||
color: #666;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
.progress-container {
|
||||
margin: 20px 0;
|
||||
background: #f0f0f0;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.progress-bar {
|
||||
width: 0;
|
||||
height: 20px;
|
||||
background: #4CAF50;
|
||||
transition: width 0.3s ease-in-out;
|
||||
text-align: center;
|
||||
line-height: 20px;
|
||||
color: white;
|
||||
}
|
||||
.status {
|
||||
margin-top: 20px;
|
||||
padding: 10px;
|
||||
border-radius: 4px;
|
||||
display: none;
|
||||
}
|
||||
.status.success {
|
||||
background: #e8f5e9;
|
||||
color: #2e7d32;
|
||||
}
|
||||
.status.error {
|
||||
background: #ffebee;
|
||||
color: #c62828;
|
||||
}
|
||||
.warning {
|
||||
background: #fff3e0;
|
||||
color: #e65100;
|
||||
padding: 15px;
|
||||
border-radius: 4px;
|
||||
margin-bottom: 20px;
|
||||
}
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
@@ -51,34 +40,18 @@
|
||||
<h1>Firmware Upgrade</h1>
|
||||
|
||||
<div class="warning">
|
||||
<strong>Warning:</strong> Do not power off the device during update.
|
||||
<strong>Warning:</strong> Please do not turn off or restart the device during the update.
|
||||
The device will restart automatically after the update.
|
||||
</div>
|
||||
|
||||
<div class="update-options">
|
||||
<div class="update-section">
|
||||
<h2>Firmware Update</h2>
|
||||
<p>Upload a new firmware file (filaman_*.bin)</p>
|
||||
<div class="update-form">
|
||||
<form id="firmwareForm" enctype='multipart/form-data' data-type="firmware">
|
||||
<form id="updateForm" enctype='multipart/form-data'>
|
||||
<input type='file' name='update' accept='.bin' required>
|
||||
<input type='submit' value='Start Firmware Update'>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="update-section">
|
||||
<h2>Webpage Update</h2>
|
||||
<p>Upload a new webpage file (webpage_*.bin)</p>
|
||||
<div class="update-form">
|
||||
<form id="webpageForm" enctype='multipart/form-data' data-type="webpage">
|
||||
<input type='file' name='update' accept='.bin' required>
|
||||
<input type='submit' value='Start Webpage Update'>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="progress-container" style="display: none;">
|
||||
<div class="progress-container">
|
||||
<div class="progress-bar">0%</div>
|
||||
</div>
|
||||
<div class="status"></div>
|
||||
@@ -91,163 +64,91 @@
|
||||
statusContainer.style.display = 'none';
|
||||
}
|
||||
|
||||
const progress = document.querySelector('.progress-bar');
|
||||
const progressContainer = document.querySelector('.progress-container');
|
||||
const status = document.querySelector('.status');
|
||||
let updateInProgress = false;
|
||||
let lastReceivedProgress = 0;
|
||||
|
||||
// WebSocket Handling
|
||||
let ws = null;
|
||||
let wsReconnectTimer = null;
|
||||
|
||||
function connectWebSocket() {
|
||||
ws = new WebSocket('ws://' + window.location.host + '/ws');
|
||||
|
||||
ws.onmessage = function(event) {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.type === "updateProgress" && updateInProgress) {
|
||||
// Zeige Fortschrittsbalken
|
||||
progressContainer.style.display = 'block';
|
||||
|
||||
// Aktualisiere den Fortschritt nur wenn er größer ist
|
||||
const newProgress = parseInt(data.progress);
|
||||
if (!isNaN(newProgress) && newProgress >= lastReceivedProgress) {
|
||||
progress.style.width = newProgress + '%';
|
||||
progress.textContent = newProgress + '%';
|
||||
lastReceivedProgress = newProgress;
|
||||
}
|
||||
|
||||
// Zeige Status-Nachricht
|
||||
if (data.message || data.status) {
|
||||
status.textContent = data.message || getStatusMessage(data.status);
|
||||
status.className = 'status success';
|
||||
status.style.display = 'block';
|
||||
|
||||
// Starte Reload wenn Update erfolgreich
|
||||
if (data.status === 'success' || lastReceivedProgress >= 98) {
|
||||
clearTimeout(wsReconnectTimer);
|
||||
setTimeout(() => {
|
||||
window.location.href = '/';
|
||||
}, 30000);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('WebSocket message error:', e);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = function() {
|
||||
if (updateInProgress) {
|
||||
// Wenn der Fortschritt hoch genug ist, gehen wir von einem erfolgreichen Update aus
|
||||
if (lastReceivedProgress >= 85) {
|
||||
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
|
||||
status.className = 'status success';
|
||||
status.style.display = 'block';
|
||||
clearTimeout(wsReconnectTimer);
|
||||
setTimeout(() => {
|
||||
window.location.href = '/';
|
||||
}, 30000);
|
||||
} else {
|
||||
// Versuche Reconnect bei niedrigem Fortschritt
|
||||
wsReconnectTimer = setTimeout(connectWebSocket, 1000);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = function(err) {
|
||||
console.error('WebSocket error:', err);
|
||||
if (updateInProgress && lastReceivedProgress >= 85) {
|
||||
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
|
||||
status.className = 'status success';
|
||||
status.style.display = 'block';
|
||||
setTimeout(() => {
|
||||
window.location.href = '/';
|
||||
}, 30000);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Initial WebSocket connection
|
||||
connectWebSocket();
|
||||
|
||||
function getStatusMessage(status) {
|
||||
switch(status) {
|
||||
case 'starting': return 'Starting update...';
|
||||
case 'uploading': return 'Uploading...';
|
||||
case 'finalizing': return 'Finalizing update...';
|
||||
case 'restoring': return 'Restoring configurations...';
|
||||
case 'preparing': return 'Preparing for restart...';
|
||||
case 'success': return 'Update successful! Device is restarting... Page will reload in 30 seconds.';
|
||||
default: return 'Updating...';
|
||||
}
|
||||
}
|
||||
|
||||
function handleUpdate(e) {
|
||||
document.getElementById('updateForm').addEventListener('submit', async (e) => {
|
||||
e.preventDefault();
|
||||
const form = e.target;
|
||||
const file = form.update.files[0];
|
||||
const updateType = form.dataset.type;
|
||||
|
||||
if (!file) {
|
||||
alert('Please select a file.');
|
||||
alert('Please select a firmware file.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate file name pattern
|
||||
if (updateType === 'firmware' && !file.name.startsWith('upgrade_filaman_firmware_')) {
|
||||
alert('Please select a valid firmware file (upgrade_filaman_firmware_*.bin)');
|
||||
return;
|
||||
}
|
||||
if (updateType === 'webpage' && !file.name.startsWith('upgrade_filaman_website_')) {
|
||||
alert('Please select a valid webpage file (upgrade_filaman_website_*.bin)');
|
||||
return;
|
||||
}
|
||||
const formData = new FormData();
|
||||
formData.append('update', file);
|
||||
|
||||
const progress = document.querySelector('.progress-bar');
|
||||
const progressContainer = document.querySelector('.progress-container');
|
||||
const status = document.querySelector('.status');
|
||||
|
||||
// Reset UI
|
||||
updateInProgress = true;
|
||||
progressContainer.style.display = 'block';
|
||||
status.style.display = 'none';
|
||||
status.className = 'status';
|
||||
progress.style.width = '0%';
|
||||
progress.textContent = '0%';
|
||||
form.querySelector('input[type=submit]').disabled = true;
|
||||
|
||||
// Disable submit buttons
|
||||
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true);
|
||||
|
||||
// Send update
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', '/update', true);
|
||||
|
||||
xhr.upload.onprogress = (e) => {
|
||||
if (e.lengthComputable) {
|
||||
const percentComplete = (e.loaded / e.total) * 100;
|
||||
progress.style.width = percentComplete + '%';
|
||||
progress.textContent = Math.round(percentComplete) + '%';
|
||||
}
|
||||
};
|
||||
|
||||
xhr.onload = function() {
|
||||
if (xhr.status !== 200 && !progress.textContent.startsWith('100')) {
|
||||
status.textContent = "Update failed: " + (xhr.responseText || "Unknown error");
|
||||
status.className = 'status error';
|
||||
try {
|
||||
let response = this.responseText;
|
||||
try {
|
||||
const jsonResponse = JSON.parse(response);
|
||||
response = jsonResponse.message;
|
||||
|
||||
if (jsonResponse.restart) {
|
||||
status.textContent = response + " Redirecting in 20 seconds...";
|
||||
let countdown = 20;
|
||||
const timer = setInterval(() => {
|
||||
countdown--;
|
||||
if (countdown <= 0) {
|
||||
clearInterval(timer);
|
||||
window.location.href = '/';
|
||||
} else {
|
||||
status.textContent = response + ` Redirecting in ${countdown} seconds...`;
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
} catch (e) {
|
||||
if (!isNaN(response)) {
|
||||
const percent = parseInt(response);
|
||||
progress.style.width = percent + '%';
|
||||
progress.textContent = percent + '%';
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
status.textContent = response;
|
||||
status.classList.add(xhr.status === 200 ? 'success' : 'error');
|
||||
status.style.display = 'block';
|
||||
updateInProgress = false;
|
||||
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
|
||||
|
||||
if (xhr.status !== 200) {
|
||||
form.querySelector('input[type=submit]').disabled = false;
|
||||
}
|
||||
} catch (error) {
|
||||
status.textContent = 'Error: ' + error.message;
|
||||
status.classList.add('error');
|
||||
status.style.display = 'block';
|
||||
form.querySelector('input[type=submit]').disabled = false;
|
||||
}
|
||||
};
|
||||
|
||||
xhr.onerror = function() {
|
||||
if (!progress.textContent.startsWith('100')) {
|
||||
status.textContent = "Network error during update";
|
||||
status.className = 'status error';
|
||||
status.textContent = 'Update failed: Network error';
|
||||
status.classList.add('error');
|
||||
status.style.display = 'block';
|
||||
updateInProgress = false;
|
||||
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
|
||||
}
|
||||
form.querySelector('input[type=submit]').disabled = false;
|
||||
};
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('update', file);
|
||||
xhr.send(formData);
|
||||
}
|
||||
|
||||
document.getElementById('firmwareForm').addEventListener('submit', handleUpdate);
|
||||
document.getElementById('webpageForm').addEventListener('submit', handleUpdate);
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -6,24 +6,13 @@
|
||||
<title>FilaMan - Filament Management Tool</title>
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script>
|
||||
fetch('/api/version')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const versionSpan = document.querySelector('.version');
|
||||
if (versionSpan) {
|
||||
versionSpan.textContent = 'v' + data.version;
|
||||
}
|
||||
})
|
||||
.catch(error => console.error('Error fetching version:', error));
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="navbar">
|
||||
<div style="display: flex; align-items: center; gap: 2rem;">
|
||||
<img src="/logo.png" alt="FilaMan Logo" class="logo">
|
||||
<div class="logo-text">
|
||||
<h1>FilaMan<span class="version"></span></h1>
|
||||
<h1>FilaMan<span class="version">v1.2.4</span></h1>
|
||||
<h4>Filament Management Tool</h4>
|
||||
</div>
|
||||
</div>
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 143 KiB |
@@ -9,8 +9,8 @@
|
||||
; https://docs.platformio.org/page/projectconf.html
|
||||
|
||||
[common]
|
||||
version = "1.3.90"
|
||||
##
|
||||
version = "1.2.4"
|
||||
|
||||
[env:esp32dev]
|
||||
platform = espressif32
|
||||
board = esp32dev
|
||||
@@ -20,10 +20,7 @@ monitor_speed = 115200
|
||||
lib_deps =
|
||||
tzapu/WiFiManager @ ^2.0.17
|
||||
https://github.com/me-no-dev/ESPAsyncWebServer.git#master
|
||||
#me-no-dev/AsyncTCP @ ^1.1.1
|
||||
https://github.com/esphome/AsyncTCP.git
|
||||
#mathieucarbou/ESPAsyncWebServer @ ^3.6.0
|
||||
#esp32async/AsyncTCP @ ^3.3.5
|
||||
me-no-dev/AsyncTCP @ ^1.1.1
|
||||
bogde/HX711 @ ^0.7.5
|
||||
adafruit/Adafruit SSD1306 @ ^2.5.13
|
||||
adafruit/Adafruit GFX Library @ ^1.11.11
|
||||
@@ -46,26 +43,34 @@ build_flags =
|
||||
-fdata-sections
|
||||
-DNDEBUG
|
||||
-mtext-section-literals
|
||||
-DVERSION=\"${common.version}\"
|
||||
'-D VERSION="${common.version}"'
|
||||
-DASYNCWEBSERVER_REGEX
|
||||
-DCORE_DEBUG_LEVEL=3
|
||||
-DCORE_DEBUG_LEVEL=1
|
||||
-DCONFIG_ARDUHAL_LOG_COLORS=1
|
||||
-DOTA_DEBUG=1
|
||||
-DARDUINO_RUNNING_CORE=1
|
||||
-DARDUINO_EVENT_RUNNING_CORE=1
|
||||
-DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1
|
||||
-DBOOT_APP_PARTITION_OTA_0=1
|
||||
-DCONFIG_LWIP_TCP_MSL=60000
|
||||
-DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096
|
||||
-DCONFIG_LWIP_MAX_ACTIVE_TCP=16
|
||||
-DCONFIG_ESP32_PANIC_PRINT_REBOOT
|
||||
-DCONFIG_ARDUINO_OTA_READSIZE=1024
|
||||
-DCONFIG_ASYNC_TCP_RUNNING_CORE=1
|
||||
-DCONFIG_ASYNC_TCP_USE_WDT=0
|
||||
-DCONFIG_LWIP_TCP_MSS=1460
|
||||
-DOTA_PARTITION_SUBTYPE=0x10
|
||||
-DPARTITION_TABLE_OFFSET=0x8000
|
||||
-DPARTITION_TABLE_SIZE=0x1000
|
||||
|
||||
extra_scripts =
|
||||
scripts/extra_script.py
|
||||
${env:buildfs.extra_scripts}
|
||||
pre:scripts/pre_build.py ; wird zuerst ausgeführt
|
||||
pre:scripts/pre_spiffs.py ; wird als zweites ausgeführt
|
||||
pre:scripts/combine_html.py ; wird als drittes ausgeführt
|
||||
scripts/gzip_files.py
|
||||
|
||||
[env:buildfs]
|
||||
extra_scripts =
|
||||
pre:scripts/combine_html.py ; Combine header with HTML files
|
||||
scripts/gzip_files.py ; Compress files for SPIFFS
|
||||
; Remove or comment out the targets line
|
||||
;targets = buildfs, build
|
||||
|
||||
; Add a custom target to build both
|
||||
[platformio]
|
||||
default_envs = esp32dev
|
||||
|
||||
|
@@ -1,39 +1,7 @@
|
||||
Import("env")
|
||||
|
||||
board_config = env.BoardConfig()
|
||||
|
||||
# Calculate SPIFFS size based on partition table
|
||||
SPIFFS_START = 0x310000 # From partitions.csv
|
||||
SPIFFS_SIZE = 0xE0000 # From partitions.csv
|
||||
SPIFFS_PAGE = 256
|
||||
SPIFFS_BLOCK = 4096
|
||||
|
||||
env.Replace(
|
||||
MKSPIFFSTOOL="mkspiffs",
|
||||
SPIFFSBLOCKSZ=SPIFFS_BLOCK,
|
||||
SPIFFSBLOCKSIZE=SPIFFS_BLOCK,
|
||||
SPIFFSSTART=SPIFFS_START,
|
||||
SPIFFSEND=SPIFFS_START + SPIFFS_SIZE,
|
||||
SPIFFSPAGESZ=SPIFFS_PAGE,
|
||||
SPIFFSSIZE=SPIFFS_SIZE
|
||||
)
|
||||
|
||||
# Wiederverwendung der replace_version Funktion
|
||||
exec(open("./scripts/pre_build.py").read())
|
||||
|
||||
# Bind to SPIFFS build
|
||||
env.AddPreAction("buildfs", replace_version)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from SCons.Script import DefaultEnvironment
|
||||
|
||||
env = DefaultEnvironment()
|
||||
|
||||
# Format SPIFFS partition before uploading new files
|
||||
spiffs_dir = os.path.join(env.subst("$BUILD_DIR"), "spiffs")
|
||||
if os.path.exists(spiffs_dir):
|
||||
shutil.rmtree(spiffs_dir)
|
||||
os.makedirs(spiffs_dir)
|
||||
|
||||
print("SPIFFS partition formatted.")
|
@@ -64,10 +64,29 @@ def get_changes_from_git():
|
||||
|
||||
return changes
|
||||
|
||||
def push_changes(version):
|
||||
"""Push changes to upstream"""
|
||||
try:
|
||||
# Stage the CHANGELOG.md
|
||||
subprocess.run(['git', 'add', 'CHANGELOG.md'], check=True)
|
||||
|
||||
# Commit the changelog
|
||||
commit_msg = f"docs: update changelog for version {version}"
|
||||
subprocess.run(['git', 'commit', '-m', commit_msg], check=True)
|
||||
|
||||
# Push to origin (local)
|
||||
subprocess.run(['git', 'push', 'origin'], check=True)
|
||||
print("Successfully pushed to origin")
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error during git operations: {e}")
|
||||
return False
|
||||
return True
|
||||
|
||||
def update_changelog():
|
||||
print("Starting changelog update...")
|
||||
print("Starting changelog update...") # Add this line
|
||||
version = get_version()
|
||||
print(f"Current version: {version}")
|
||||
print(f"Current version: {version}") # Add this line
|
||||
today = datetime.now().strftime('%Y-%m-%d')
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
@@ -92,7 +111,7 @@ def update_changelog():
|
||||
if not os.path.exists(changelog_path):
|
||||
with open(changelog_path, 'w') as f:
|
||||
f.write(f"# Changelog\n\n{changelog_entry}")
|
||||
print(f"Created new changelog file with version {version}")
|
||||
push_changes(version)
|
||||
else:
|
||||
with open(changelog_path, 'r') as f:
|
||||
content = f.read()
|
||||
@@ -101,30 +120,9 @@ def update_changelog():
|
||||
updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}")
|
||||
with open(changelog_path, 'w') as f:
|
||||
f.write(updated_content)
|
||||
print(f"Added new version {version} to changelog")
|
||||
push_changes(version)
|
||||
else:
|
||||
# Version existiert bereits, aktualisiere die bestehenden Einträge
|
||||
version_pattern = f"## \\[{version}\\] - \\d{{4}}-\\d{{2}}-\\d{{2}}"
|
||||
next_version_pattern = "## \\[.*?\\] - \\d{4}-\\d{2}-\\d{2}"
|
||||
|
||||
# Finde den Start der aktuellen Version
|
||||
version_match = re.search(version_pattern, content)
|
||||
if version_match:
|
||||
version_start = version_match.start()
|
||||
# Suche nach der nächsten Version
|
||||
next_version_match = re.search(next_version_pattern, content[version_start + 1:])
|
||||
|
||||
if next_version_match:
|
||||
# Ersetze den Inhalt zwischen aktueller und nächster Version
|
||||
next_version_pos = version_start + 1 + next_version_match.start()
|
||||
updated_content = content[:version_start] + changelog_entry + content[next_version_pos:]
|
||||
else:
|
||||
# Wenn keine nächste Version existiert, ersetze bis zum Ende
|
||||
updated_content = content[:version_start] + changelog_entry + "\n"
|
||||
|
||||
with open(changelog_path, 'w') as f:
|
||||
f.write(updated_content)
|
||||
print(f"Updated entries for version {version}")
|
||||
print(f"Version {version} already exists in changelog")
|
||||
|
||||
if __name__ == "__main__":
|
||||
update_changelog()
|
22
src/api.cpp
22
src/api.cpp
@@ -60,10 +60,10 @@ JsonDocument fetchSpoolsForWebsite() {
|
||||
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
|
||||
|
||||
for (JsonObject spool : spools) {
|
||||
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
|
||||
JsonObject filteredSpool = filteredSpools.createNestedObject();
|
||||
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
|
||||
|
||||
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
|
||||
JsonObject filament = filteredSpool.createNestedObject("filament");
|
||||
filament["sm_id"] = spool["id"];
|
||||
filament["id"] = spool["filament"]["id"];
|
||||
filament["name"] = spool["filament"]["name"];
|
||||
@@ -73,7 +73,7 @@ JsonDocument fetchSpoolsForWebsite() {
|
||||
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
|
||||
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
|
||||
|
||||
JsonObject vendor = filament["vendor"].to<JsonObject>();
|
||||
JsonObject vendor = filament.createNestedObject("vendor");
|
||||
vendor["id"] = spool["filament"]["vendor"]["id"];
|
||||
vendor["name"] = spool["filament"]["vendor"]["name"];
|
||||
}
|
||||
@@ -110,13 +110,13 @@ JsonDocument fetchAllSpoolsInfo() {
|
||||
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
|
||||
|
||||
for (JsonObject spool : spools) {
|
||||
JsonObject filteredSpool = filteredSpools.add<JsonObject>();
|
||||
JsonObject filteredSpool = filteredSpools.createNestedObject();
|
||||
filteredSpool["price"] = spool["price"];
|
||||
filteredSpool["remaining_weight"] = spool["remaining_weight"];
|
||||
filteredSpool["used_weight"] = spool["used_weight"];
|
||||
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
|
||||
|
||||
JsonObject filament = filteredSpool["filament"].to<JsonObject>();
|
||||
JsonObject filament = filteredSpool.createNestedObject("filament");
|
||||
filament["id"] = spool["filament"]["id"];
|
||||
filament["name"] = spool["filament"]["name"];
|
||||
filament["material"] = spool["filament"]["material"];
|
||||
@@ -125,11 +125,11 @@ JsonDocument fetchAllSpoolsInfo() {
|
||||
filament["spool_weight"] = spool["filament"]["spool_weight"];
|
||||
filament["color_hex"] = spool["filament"]["color_hex"];
|
||||
|
||||
JsonObject vendor = filament["vendor"].to<JsonObject>();
|
||||
JsonObject vendor = filament.createNestedObject("vendor");
|
||||
vendor["id"] = spool["filament"]["vendor"]["id"];
|
||||
vendor["name"] = spool["filament"]["vendor"]["name"];
|
||||
|
||||
JsonObject extra = filament["extra"].to<JsonObject>();
|
||||
JsonObject extra = filament.createNestedObject("extra");
|
||||
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
|
||||
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
|
||||
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
|
||||
@@ -186,7 +186,7 @@ bool updateSpoolTagId(String uidString, const char* payload) {
|
||||
}
|
||||
|
||||
// Überprüfe, ob die erforderlichen Felder vorhanden sind
|
||||
if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") {
|
||||
if (!doc.containsKey("sm_id") || doc["sm_id"] == "") {
|
||||
Serial.println("Keine Spoolman-ID gefunden.");
|
||||
return false;
|
||||
}
|
||||
@@ -368,7 +368,7 @@ bool checkSpoolmanExtraFields() {
|
||||
for (uint8_t s = 0; s < extraLength; s++) {
|
||||
bool found = false;
|
||||
for (JsonObject field : doc.as<JsonArray>()) {
|
||||
if (field["key"].is<String>() && field["key"] == extraFields[s]) {
|
||||
if (field.containsKey("key") && field["key"] == extraFields[s]) {
|
||||
Serial.println("Feld gefunden: " + extraFields[s]);
|
||||
found = true;
|
||||
break;
|
||||
@@ -430,7 +430,7 @@ bool checkSpoolmanInstance(const String& url) {
|
||||
String payload = http.getString();
|
||||
JsonDocument doc;
|
||||
DeserializationError error = deserializeJson(doc, payload);
|
||||
if (!error && doc["status"].is<String>()) {
|
||||
if (!error && doc.containsKey("status")) {
|
||||
const char* status = doc["status"];
|
||||
http.end();
|
||||
|
||||
@@ -469,7 +469,7 @@ bool saveSpoolmanUrl(const String& url) {
|
||||
|
||||
String loadSpoolmanUrl() {
|
||||
JsonDocument doc;
|
||||
if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) {
|
||||
if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) {
|
||||
return doc["url"].as<String>();
|
||||
}
|
||||
Serial.println("Keine gültige Spoolman-URL gefunden.");
|
||||
|
@@ -58,7 +58,7 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
|
||||
|
||||
bool loadBambuCredentials() {
|
||||
JsonDocument doc;
|
||||
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
|
||||
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
|
||||
// Temporäre Strings für die Werte
|
||||
String ip = doc["bambu_ip"].as<String>();
|
||||
String code = doc["bambu_accesscode"].as<String>();
|
||||
@@ -270,9 +270,9 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
||||
}
|
||||
|
||||
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
|
||||
if (doc["print"]["upgrade_state"].is<String>()) {
|
||||
if (doc["print"].containsKey("upgrade_state")) {
|
||||
// Prüfen ob AMS-Daten vorhanden sind
|
||||
if (!doc["print"]["ams"].is<String>() || !doc["print"]["ams"]["ams"].is<String>()) {
|
||||
if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -315,7 +315,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
||||
}
|
||||
|
||||
// Prüfe die externe Spule
|
||||
if (!hasChanges && doc["print"]["vt_tray"].is<String>()) {
|
||||
if (!hasChanges && doc["print"].containsKey("vt_tray")) {
|
||||
JsonObject vtTray = doc["print"]["vt_tray"];
|
||||
bool foundExternal = false;
|
||||
|
||||
@@ -363,7 +363,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
||||
ams_count = amsArray.size();
|
||||
|
||||
// Wenn externe Spule vorhanden, füge sie hinzu
|
||||
if (doc["print"]["vt_tray"].is<String>()) {
|
||||
if (doc["print"].containsKey("vt_tray")) {
|
||||
JsonObject vtTray = doc["print"]["vt_tray"];
|
||||
int extIdx = ams_count; // Index für externe Spule
|
||||
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
|
||||
@@ -387,14 +387,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
||||
JsonArray wsArray = wsDoc.to<JsonArray>();
|
||||
|
||||
for (int i = 0; i < ams_count; i++) {
|
||||
JsonObject amsObj = wsArray.add<JsonObject>();
|
||||
JsonObject amsObj = wsArray.createNestedObject();
|
||||
amsObj["ams_id"] = ams_data[i].ams_id;
|
||||
|
||||
JsonArray trays = amsObj["tray"].to<JsonArray>();
|
||||
JsonArray trays = amsObj.createNestedArray("tray");
|
||||
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
|
||||
|
||||
for (int j = 0; j < maxTrays; j++) {
|
||||
JsonObject trayObj = trays.add<JsonObject>();
|
||||
JsonObject trayObj = trays.createNestedObject();
|
||||
trayObj["id"] = ams_data[i].trays[j].id;
|
||||
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
|
||||
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
|
||||
@@ -427,14 +427,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
|
||||
JsonArray wsArray = wsDoc.to<JsonArray>();
|
||||
|
||||
for (int j = 0; j < ams_count; j++) {
|
||||
JsonObject amsObj = wsArray.add<JsonObject>();
|
||||
JsonObject amsObj = wsArray.createNestedObject();
|
||||
amsObj["ams_id"] = ams_data[j].ams_id;
|
||||
|
||||
JsonArray trays = amsObj["tray"].to<JsonArray>();
|
||||
JsonArray trays = amsObj.createNestedArray("tray");
|
||||
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
|
||||
|
||||
for (int k = 0; k < maxTrays; k++) {
|
||||
JsonObject trayObj = trays.add<JsonObject>();
|
||||
JsonObject trayObj = trays.createNestedObject();
|
||||
trayObj["id"] = ams_data[j].trays[k].id;
|
||||
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
|
||||
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;
|
||||
|
@@ -36,5 +36,4 @@ void mqtt_loop(void * parameter);
|
||||
bool setBambuSpool(String payload);
|
||||
void bambu_restart();
|
||||
|
||||
extern TaskHandle_t BambuMqttTask;
|
||||
#endif
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#include "commonFS.h"
|
||||
#include <SPIFFS.h>
|
||||
|
||||
bool saveJsonValue(const char* filename, const JsonDocument& doc) {
|
||||
File file = SPIFFS.open(filename, "w");
|
||||
@@ -36,12 +35,23 @@ bool loadJsonValue(const char* filename, JsonDocument& doc) {
|
||||
return true;
|
||||
}
|
||||
|
||||
void initializeSPIFFS() {
|
||||
if (!SPIFFS.begin(true, "/spiffs", 10, "spiffs")) {
|
||||
Serial.println("SPIFFS Mount Failed");
|
||||
return;
|
||||
bool initializeSPIFFS() {
|
||||
// Erster Versuch
|
||||
if (SPIFFS.begin(true)) {
|
||||
Serial.println("SPIFFS mounted successfully.");
|
||||
return true;
|
||||
}
|
||||
Serial.printf("SPIFFS Total: %u bytes\n", SPIFFS.totalBytes());
|
||||
Serial.printf("SPIFFS Used: %u bytes\n", SPIFFS.usedBytes());
|
||||
Serial.printf("SPIFFS Free: %u bytes\n", SPIFFS.totalBytes() - SPIFFS.usedBytes());
|
||||
|
||||
// Formatierung versuchen
|
||||
Serial.println("Failed to mount SPIFFS. Formatting...");
|
||||
SPIFFS.format();
|
||||
|
||||
// Zweiter Versuch nach Formatierung
|
||||
if (SPIFFS.begin(true)) {
|
||||
Serial.println("SPIFFS formatted and mounted successfully.");
|
||||
return true;
|
||||
}
|
||||
|
||||
Serial.println("SPIFFS initialization failed completely.");
|
||||
return false;
|
||||
}
|
@@ -7,6 +7,6 @@
|
||||
|
||||
bool saveJsonValue(const char* filename, const JsonDocument& doc);
|
||||
bool loadJsonValue(const char* filename, JsonDocument& doc);
|
||||
void initializeSPIFFS();
|
||||
bool initializeSPIFFS();
|
||||
|
||||
#endif
|
||||
|
48
src/main.cpp
48
src/main.cpp
@@ -19,12 +19,6 @@
|
||||
void setup() {
|
||||
Serial.begin(115200);
|
||||
|
||||
uint64_t chipid;
|
||||
|
||||
chipid = ESP.getEfuseMac(); //The chip ID is essentially its MAC address(length: 6 bytes).
|
||||
Serial.printf("ESP32 Chip ID = %04X", (uint16_t)(chipid >> 32)); //print High 2 bytes
|
||||
Serial.printf("%08X\n", (uint32_t)chipid); //print Low 4bytes.
|
||||
|
||||
// Initialize SPIFFS
|
||||
initializeSPIFFS();
|
||||
|
||||
@@ -58,22 +52,7 @@ void setup() {
|
||||
|
||||
startNfc();
|
||||
|
||||
uint8_t scaleCalibrated = start_scale();
|
||||
if (scaleCalibrated == 3) {
|
||||
oledShowMessage("Scale not calibrated!");
|
||||
for (uint16_t i = 0; i < 50000; i++) {
|
||||
yield();
|
||||
vTaskDelay(pdMS_TO_TICKS(1));
|
||||
esp_task_wdt_reset();
|
||||
}
|
||||
} else if (scaleCalibrated == 0) {
|
||||
oledShowMessage("HX711 not found");
|
||||
for (uint16_t i = 0; i < 50000; i++) {
|
||||
yield();
|
||||
vTaskDelay(pdMS_TO_TICKS(1));
|
||||
esp_task_wdt_reset();
|
||||
}
|
||||
}
|
||||
start_scale();
|
||||
|
||||
// WDT initialisieren mit 10 Sekunden Timeout
|
||||
bool panic = true; // Wenn true, löst ein WDT-Timeout einen System-Panik aus
|
||||
@@ -99,33 +78,30 @@ uint8_t wifiErrorCounter = 0;
|
||||
|
||||
// ##### PROGRAM START #####
|
||||
void loop() {
|
||||
// Überprüfe den WLAN-Status
|
||||
if (WiFi.status() != WL_CONNECTED) {
|
||||
wifiErrorCounter++;
|
||||
wifiOn = false;
|
||||
} else {
|
||||
wifiErrorCounter = 0;
|
||||
wifiOn = true;
|
||||
}
|
||||
if (wifiErrorCounter > 20) ESP.restart();
|
||||
|
||||
unsigned long currentMillis = millis();
|
||||
|
||||
// Send AMS Data min every Minute
|
||||
if (currentMillis - lastAmsSendTime >= amsSendInterval)
|
||||
{
|
||||
if (currentMillis - lastAmsSendTime >= amsSendInterval) {
|
||||
lastAmsSendTime = currentMillis;
|
||||
sendAmsData(nullptr);
|
||||
}
|
||||
|
||||
// Wenn Waage nicht Kalibriert
|
||||
if (scaleCalibrated == 3)
|
||||
{
|
||||
oledShowMessage("Scale not calibrated!");
|
||||
vTaskDelay(5000 / portTICK_PERIOD_MS);
|
||||
yield();
|
||||
esp_task_wdt_reset();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Ausgabe der Waage auf Display
|
||||
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0)
|
||||
{
|
||||
(weight < 0) ? oledShowMessage("!! -1") : oledShowWeight(weight);
|
||||
}
|
||||
|
||||
|
||||
// Wenn Timer abgelaufen und nicht gerade ein RFID-Tag geschrieben wird
|
||||
if (currentMillis - lastWeightReadTime >= weightReadInterval && hasReadRfidTag < 3)
|
||||
{
|
||||
|
46
src/ota.cpp
Normal file
46
src/ota.cpp
Normal file
@@ -0,0 +1,46 @@
|
||||
#include <Arduino.h>
|
||||
#include "ota.h"
|
||||
#include <Update.h>
|
||||
#include <SPIFFS.h>
|
||||
#include "commonFS.h"
|
||||
|
||||
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final) {
|
||||
static size_t contentLength = 0;
|
||||
|
||||
if (!index) {
|
||||
contentLength = request->contentLength();
|
||||
Serial.printf("Update size: %u bytes\n", contentLength);
|
||||
|
||||
if (contentLength == 0) {
|
||||
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Invalid file size\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!Update.begin(contentLength)) {
|
||||
Serial.printf("Not enough space: %u required\n", contentLength);
|
||||
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Not enough space available\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
Serial.println("Update started");
|
||||
}
|
||||
|
||||
if (Update.write(data, len) != len) {
|
||||
Update.printError(Serial);
|
||||
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Error writing update\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
if (final) {
|
||||
if (Update.end(true)) {
|
||||
Serial.println("Update complete");
|
||||
request->send(200, "application/json", "{\"status\":\"success\",\"message\":\"Update successful! Device will restart...\",\"restart\":true}");
|
||||
delay(1000);
|
||||
ESP.restart();
|
||||
} else {
|
||||
Update.printError(Serial);
|
||||
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Update failed\"}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
8
src/ota.h
Normal file
8
src/ota.h
Normal file
@@ -0,0 +1,8 @@
|
||||
#ifndef OTA_H
|
||||
#define OTA_H
|
||||
|
||||
#include <ESPAsyncWebServer.h>
|
||||
|
||||
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
|
||||
|
||||
#endif
|
@@ -3,9 +3,9 @@
|
||||
#include <ArduinoJson.h>
|
||||
#include "config.h"
|
||||
#include "HX711.h"
|
||||
#include <EEPROM.h>
|
||||
#include "display.h"
|
||||
#include "esp_task_wdt.h"
|
||||
#include <Preferences.h>
|
||||
|
||||
HX711 scale;
|
||||
|
||||
@@ -16,11 +16,6 @@ int16_t weight = 0;
|
||||
uint8_t weigthCouterToApi = 0;
|
||||
uint8_t scale_tare_counter = 0;
|
||||
uint8_t pauseMainTask = 0;
|
||||
uint8_t scaleCalibrated = 1;
|
||||
|
||||
Preferences preferences;
|
||||
const char* NVS_NAMESPACE = "scale";
|
||||
const char* NVS_KEY_CALIBRATION = "cal_value";
|
||||
|
||||
// ##### Funktionen für Waage #####
|
||||
uint8_t tareScale() {
|
||||
@@ -51,24 +46,22 @@ void scale_loop(void * parameter) {
|
||||
}
|
||||
}
|
||||
|
||||
uint8_t start_scale() {
|
||||
void start_scale() {
|
||||
Serial.println("Prüfe Calibration Value");
|
||||
long calibrationValue;
|
||||
long calibrationValue; // calibration value (see example file "Calibration.ino")
|
||||
//calibrationValue = 696.0; // uncomment this if you want to set the calibration value in the sketch
|
||||
|
||||
// NVS
|
||||
preferences.begin(NVS_NAMESPACE, true); // true = readonly
|
||||
calibrationValue = preferences.getLong(NVS_KEY_CALIBRATION, defaultScaleCalibrationValue);
|
||||
preferences.end();
|
||||
EEPROM.begin(512);
|
||||
EEPROM.get(calVal_eepromAdress, calibrationValue); // uncomment this if you want to fetch the calibration value from eeprom
|
||||
|
||||
//calibrationValue = EEPROM.read(calVal_eepromAdress);
|
||||
|
||||
Serial.print("Read Scale Calibration Value ");
|
||||
Serial.println(calibrationValue);
|
||||
|
||||
scale.begin(LOADCELL_DOUT_PIN, LOADCELL_SCK_PIN);
|
||||
|
||||
if (isnan(calibrationValue) || calibrationValue < 1) {
|
||||
calibrationValue = defaultScaleCalibrationValue;
|
||||
scaleCalibrated = 0;
|
||||
}
|
||||
if (isnan(calibrationValue) || calibrationValue < 1) calibrationValue = defaultScaleCalibrationValue;
|
||||
|
||||
oledShowMessage("Scale Tare Please remove all");
|
||||
for (uint16_t i = 0; i < 2000; i++) {
|
||||
@@ -101,8 +94,6 @@ uint8_t start_scale() {
|
||||
} else {
|
||||
Serial.println("ScaleLoop-Task erfolgreich erstellt");
|
||||
}
|
||||
|
||||
return (scaleCalibrated == 1) ? 1 : 3;
|
||||
}
|
||||
|
||||
uint8_t calibrate_scale() {
|
||||
@@ -146,19 +137,18 @@ uint8_t calibrate_scale() {
|
||||
{
|
||||
Serial.print("New calibration value has been set to: ");
|
||||
Serial.println(newCalibrationValue);
|
||||
Serial.print("Save this value to EEPROM adress ");
|
||||
Serial.println(calVal_eepromAdress);
|
||||
|
||||
// Speichern mit NVS
|
||||
preferences.begin(NVS_NAMESPACE, false); // false = readwrite
|
||||
preferences.putLong(NVS_KEY_CALIBRATION, newCalibrationValue);
|
||||
preferences.end();
|
||||
//EEPROM.put(calVal_eepromAdress, newCalibrationValue);
|
||||
EEPROM.put(calVal_eepromAdress, newCalibrationValue);
|
||||
EEPROM.commit();
|
||||
|
||||
// Verifizieren
|
||||
preferences.begin(NVS_NAMESPACE, true);
|
||||
long verifyValue = preferences.getLong(NVS_KEY_CALIBRATION, 0);
|
||||
preferences.end();
|
||||
EEPROM.get(calVal_eepromAdress, newCalibrationValue);
|
||||
//newCalibrationValue = EEPROM.read(calVal_eepromAdress);
|
||||
|
||||
Serial.print("Verified stored value: ");
|
||||
Serial.println(verifyValue);
|
||||
Serial.print("Read Value ");
|
||||
Serial.println(newCalibrationValue);
|
||||
|
||||
Serial.println("End calibration, revome weight");
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
#include "HX711.h"
|
||||
|
||||
|
||||
uint8_t start_scale();
|
||||
void start_scale();
|
||||
uint8_t calibrate_scale();
|
||||
uint8_t tareScale();
|
||||
|
||||
@@ -14,8 +14,5 @@ extern int16_t weight;
|
||||
extern uint8_t weigthCouterToApi;
|
||||
extern uint8_t scale_tare_counter;
|
||||
extern uint8_t pauseMainTask;
|
||||
extern uint8_t scaleCalibrated;
|
||||
|
||||
extern TaskHandle_t ScaleTask;
|
||||
|
||||
#endif
|
254
src/website.cpp
254
src/website.cpp
@@ -7,15 +7,10 @@
|
||||
#include "nfc.h"
|
||||
#include "scale.h"
|
||||
#include "esp_task_wdt.h"
|
||||
#include <Update.h>
|
||||
#include "display.h"
|
||||
|
||||
#ifndef VERSION
|
||||
#define VERSION "1.1.0"
|
||||
#endif
|
||||
#include "ota.h"
|
||||
|
||||
// Cache-Control Header definieren
|
||||
#define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche
|
||||
#define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr
|
||||
|
||||
AsyncWebServer server(webserverPort);
|
||||
AsyncWebSocket ws("/ws");
|
||||
@@ -23,49 +18,6 @@ AsyncWebSocket ws("/ws");
|
||||
uint8_t lastSuccess = 0;
|
||||
uint8_t lastHasReadRfidTag = 0;
|
||||
|
||||
// Globale Variablen für Config Backups hinzufügen
|
||||
String bambuCredentialsBackup;
|
||||
String spoolmanUrlBackup;
|
||||
|
||||
// Globale Variable für den Update-Typ
|
||||
static int currentUpdateCommand = 0;
|
||||
|
||||
// Globale Update-Variablen
|
||||
static size_t updateTotalSize = 0;
|
||||
static size_t updateWritten = 0;
|
||||
static bool isSpiffsUpdate = false;
|
||||
|
||||
void sendUpdateProgress(int progress, const char* status = nullptr, const char* message = nullptr) {
|
||||
static int lastSentProgress = -1;
|
||||
|
||||
// Verhindere zu häufige Updates
|
||||
if (progress == lastSentProgress && !status && !message) {
|
||||
return;
|
||||
}
|
||||
|
||||
String progressMsg = "{\"type\":\"updateProgress\",\"progress\":" + String(progress);
|
||||
if (status) {
|
||||
progressMsg += ",\"status\":\"" + String(status) + "\"";
|
||||
}
|
||||
if (message) {
|
||||
progressMsg += ",\"message\":\"" + String(message) + "\"";
|
||||
}
|
||||
progressMsg += "}";
|
||||
|
||||
// Sende die Nachricht mehrmals mit Verzögerung für wichtige Updates
|
||||
if (status || abs(progress - lastSentProgress) >= 10 || progress == 100) {
|
||||
for (int i = 0; i < 2; i++) {
|
||||
ws.textAll(progressMsg);
|
||||
delay(100); // Längerer Delay zwischen Nachrichten
|
||||
}
|
||||
} else {
|
||||
ws.textAll(progressMsg);
|
||||
delay(50);
|
||||
}
|
||||
|
||||
lastSentProgress = progress;
|
||||
}
|
||||
|
||||
void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) {
|
||||
if (type == WS_EVT_CONNECT) {
|
||||
Serial.println("Neuer Client verbunden!");
|
||||
@@ -76,10 +28,6 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
|
||||
sendWriteResult(client, 3);
|
||||
} else if (type == WS_EVT_DISCONNECT) {
|
||||
Serial.println("Client getrennt.");
|
||||
} else if (type == WS_EVT_ERROR) {
|
||||
Serial.printf("WebSocket Client #%u error(%u): %s\n", client->id(), *((uint16_t*)arg), (char*)data);
|
||||
} else if (type == WS_EVT_PONG) {
|
||||
Serial.printf("WebSocket Client #%u pong\n", client->id());
|
||||
} else if (type == WS_EVT_DATA) {
|
||||
String message = String((char*)data);
|
||||
JsonDocument doc;
|
||||
@@ -96,7 +44,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
|
||||
}
|
||||
|
||||
else if (doc["type"] == "writeNfcTag") {
|
||||
if (doc["payload"].is<String>()) {
|
||||
if (doc.containsKey("payload")) {
|
||||
// Versuche NFC-Daten zu schreiben
|
||||
String payloadString;
|
||||
serializeJson(doc["payload"], payloadString);
|
||||
@@ -203,121 +151,11 @@ void sendNfcData(AsyncWebSocketClient *client) {
|
||||
|
||||
void sendAmsData(AsyncWebSocketClient *client) {
|
||||
if (ams_count > 0) {
|
||||
ws.textAll("{\"type\":\"amsData\",\"payload\":" + amsJsonData + "}");
|
||||
ws.textAll("{\"type\":\"amsData\", \"payload\":" + amsJsonData + "}");
|
||||
}
|
||||
}
|
||||
|
||||
void handleUpdate(AsyncWebServer &server) {
|
||||
AsyncCallbackWebHandler* updateHandler = new AsyncCallbackWebHandler();
|
||||
updateHandler->setUri("/update");
|
||||
updateHandler->setMethod(HTTP_POST);
|
||||
|
||||
updateHandler->onUpload([](AsyncWebServerRequest *request, String filename,
|
||||
size_t index, uint8_t *data, size_t len, bool final) {
|
||||
if (!index) {
|
||||
updateTotalSize = request->contentLength();
|
||||
updateWritten = 0;
|
||||
isSpiffsUpdate = (filename.indexOf("website") > -1);
|
||||
|
||||
if (isSpiffsUpdate) {
|
||||
// Backup vor dem Update
|
||||
sendUpdateProgress(0, "backup", "Backing up configurations...");
|
||||
delay(200);
|
||||
backupJsonConfigs();
|
||||
delay(200);
|
||||
|
||||
const esp_partition_t *partition = esp_partition_find_first(ESP_PARTITION_TYPE_DATA, ESP_PARTITION_SUBTYPE_DATA_SPIFFS, NULL);
|
||||
if (!partition || !Update.begin(partition->size, U_SPIFFS)) {
|
||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
|
||||
return;
|
||||
}
|
||||
sendUpdateProgress(5, "starting", "Starting SPIFFS update...");
|
||||
delay(200);
|
||||
} else {
|
||||
if (!Update.begin(updateTotalSize)) {
|
||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
|
||||
return;
|
||||
}
|
||||
sendUpdateProgress(0, "starting", "Starting firmware update...");
|
||||
delay(200);
|
||||
}
|
||||
}
|
||||
|
||||
if (len) {
|
||||
if (Update.write(data, len) != len) {
|
||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Write failed\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
updateWritten += len;
|
||||
int currentProgress;
|
||||
|
||||
// Berechne den Fortschritt basierend auf dem Update-Typ
|
||||
if (isSpiffsUpdate) {
|
||||
// SPIFFS: 5-75% für Upload
|
||||
currentProgress = 5 + (updateWritten * 100) / updateTotalSize;
|
||||
} else {
|
||||
// Firmware: 0-100% für Upload
|
||||
currentProgress = 1 + (updateWritten * 100) / updateTotalSize;
|
||||
}
|
||||
|
||||
static int lastProgress = -1;
|
||||
if (currentProgress != lastProgress && (currentProgress % 10 == 0 || final)) {
|
||||
sendUpdateProgress(currentProgress, "uploading");
|
||||
oledShowMessage("Update: " + String(currentProgress) + "%");
|
||||
delay(50);
|
||||
lastProgress = currentProgress;
|
||||
}
|
||||
}
|
||||
|
||||
if (final) {
|
||||
if (Update.end(true)) {
|
||||
if (isSpiffsUpdate) {
|
||||
restoreJsonConfigs();
|
||||
}
|
||||
} else {
|
||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update finalization failed\"}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
updateHandler->onRequest([](AsyncWebServerRequest *request) {
|
||||
if (Update.hasError()) {
|
||||
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update failed\"}");
|
||||
return;
|
||||
}
|
||||
|
||||
// Erste 100% Nachricht
|
||||
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
|
||||
delay(2000); // Längerer Delay für die erste Nachricht
|
||||
|
||||
AsyncWebServerResponse *response = request->beginResponse(200, "application/json",
|
||||
"{\"success\":true,\"message\":\"Update successful! Restarting device...\"}");
|
||||
response->addHeader("Connection", "close");
|
||||
request->send(response);
|
||||
|
||||
// Zweite 100% Nachricht zur Sicherheit
|
||||
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
|
||||
delay(3000); // Noch längerer Delay vor dem Neustart
|
||||
|
||||
ESP.restart();
|
||||
});
|
||||
|
||||
server.addHandler(updateHandler);
|
||||
}
|
||||
|
||||
void setupWebserver(AsyncWebServer &server) {
|
||||
// Deaktiviere alle Debug-Ausgaben
|
||||
Serial.setDebugOutput(false);
|
||||
|
||||
// WebSocket-Optimierungen
|
||||
ws.onEvent(onWsEvent);
|
||||
ws.enable(true);
|
||||
|
||||
// Konfiguriere Server für große Uploads
|
||||
server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){});
|
||||
server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){});
|
||||
|
||||
// Lade die Spoolman-URL beim Booten
|
||||
spoolmanUrl = loadSpoolmanUrl();
|
||||
Serial.print("Geladene Spoolman-URL: ");
|
||||
@@ -384,8 +222,7 @@ void setupWebserver(AsyncWebServer &server) {
|
||||
html.replace("{{spoolmanUrl}}", spoolmanUrl);
|
||||
|
||||
JsonDocument doc;
|
||||
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>())
|
||||
{
|
||||
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
|
||||
String bambuIp = doc["bambu_ip"].as<String>();
|
||||
String bambuSerial = doc["bambu_serialnr"].as<String>();
|
||||
String bambuCode = doc["bambu_accesscode"].as<String>();
|
||||
@@ -397,12 +234,6 @@ void setupWebserver(AsyncWebServer &server) {
|
||||
html.replace("{{bambuSerial}}", bambuSerial ? bambuSerial : "");
|
||||
html.replace("{{bambuCode}}", bambuCode ? bambuCode : "");
|
||||
}
|
||||
else
|
||||
{
|
||||
html.replace("{{bambuIp}}", "");
|
||||
html.replace("{{bambuSerial}}", "");
|
||||
html.replace("{{bambuCode}}", "");
|
||||
}
|
||||
|
||||
request->send(200, "text/html", html);
|
||||
});
|
||||
@@ -507,22 +338,30 @@ void setupWebserver(AsyncWebServer &server) {
|
||||
Serial.println("RFID.js gesendet");
|
||||
});
|
||||
|
||||
// Vereinfachter Update-Handler
|
||||
// Route for Firmware Update
|
||||
server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) {
|
||||
// During OTA, reduce memory usage
|
||||
ws.enable(false); // Temporarily disable WebSocket
|
||||
ws.cleanupClients();
|
||||
|
||||
Serial.println("Request for /upgrade received");
|
||||
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/upgrade.html.gz", "text/html");
|
||||
response->addHeader("Content-Encoding", "gzip");
|
||||
response->addHeader("Cache-Control", "no-store");
|
||||
response->addHeader("Cache-Control", CACHE_CONTROL);
|
||||
request->send(response);
|
||||
});
|
||||
|
||||
// Update-Handler registrieren
|
||||
handleUpdate(server);
|
||||
|
||||
server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){
|
||||
String fm_version = VERSION;
|
||||
String jsonResponse = "{\"version\": \""+ fm_version +"\"}";
|
||||
request->send(200, "application/json", jsonResponse);
|
||||
});
|
||||
server.on("/update", HTTP_POST,
|
||||
[](AsyncWebServerRequest *request) {
|
||||
// The response will be sent from handleOTAUpload when the upload is complete
|
||||
},
|
||||
[](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) {
|
||||
// Free memory before handling update
|
||||
ws.enable(false);
|
||||
ws.cleanupClients();
|
||||
handleOTAUpload(request, filename, index, data, len, final);
|
||||
}
|
||||
);
|
||||
|
||||
// Fehlerbehandlung für nicht gefundene Seiten
|
||||
server.onNotFound([](AsyncWebServerRequest *request){
|
||||
@@ -540,50 +379,3 @@ void setupWebserver(AsyncWebServer &server) {
|
||||
server.begin();
|
||||
Serial.println("Webserver gestartet");
|
||||
}
|
||||
|
||||
|
||||
void backupJsonConfigs() {
|
||||
// Bambu Credentials backup
|
||||
if (SPIFFS.exists("/bambu_credentials.json")) {
|
||||
File file = SPIFFS.open("/bambu_credentials.json", "r");
|
||||
if (file) {
|
||||
bambuCredentialsBackup = file.readString();
|
||||
file.close();
|
||||
Serial.println("Bambu credentials backed up");
|
||||
}
|
||||
}
|
||||
|
||||
// Spoolman URL backup
|
||||
if (SPIFFS.exists("/spoolman_url.json")) {
|
||||
File file = SPIFFS.open("/spoolman_url.json", "r");
|
||||
if (file) {
|
||||
spoolmanUrlBackup = file.readString();
|
||||
file.close();
|
||||
Serial.println("Spoolman URL backed up");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void restoreJsonConfigs() {
|
||||
// Restore Bambu credentials
|
||||
if (bambuCredentialsBackup.length() > 0) {
|
||||
File file = SPIFFS.open("/bambu_credentials.json", "w");
|
||||
if (file) {
|
||||
file.print(bambuCredentialsBackup);
|
||||
file.close();
|
||||
Serial.println("Bambu credentials restored");
|
||||
}
|
||||
bambuCredentialsBackup = ""; // Clear backup
|
||||
}
|
||||
|
||||
// Restore Spoolman URL
|
||||
if (spoolmanUrlBackup.length() > 0) {
|
||||
File file = SPIFFS.open("/spoolman_url.json", "w");
|
||||
if (file) {
|
||||
file.print(spoolmanUrlBackup);
|
||||
file.close();
|
||||
Serial.println("Spoolman URL restored");
|
||||
}
|
||||
spoolmanUrlBackup = ""; // Clear backup
|
||||
}
|
||||
}
|
||||
|
@@ -6,8 +6,8 @@
|
||||
#include "commonFS.h"
|
||||
#include "api.h"
|
||||
#include <ArduinoJson.h>
|
||||
#include <Update.h>
|
||||
#include <AsyncTCP.h>
|
||||
#include <ESPAsyncWebServer.h>
|
||||
#include <AsyncWebSocket.h>
|
||||
#include "bambu.h"
|
||||
#include "nfc.h"
|
||||
#include "scale.h"
|
||||
@@ -17,20 +17,10 @@ extern String spoolmanUrl;
|
||||
extern AsyncWebServer server;
|
||||
extern AsyncWebSocket ws;
|
||||
|
||||
// Server-Initialisierung und Handler
|
||||
void initWebServer();
|
||||
void handleUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
|
||||
void handleBody(AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total);
|
||||
void setupWebserver(AsyncWebServer &server);
|
||||
|
||||
// WebSocket-Funktionen
|
||||
void sendAmsData(AsyncWebSocketClient *client);
|
||||
void sendNfcData(AsyncWebSocketClient *client);
|
||||
void foundNfcTag(AsyncWebSocketClient *client, uint8_t success);
|
||||
void sendWriteResult(AsyncWebSocketClient *client, uint8_t success);
|
||||
|
||||
// Upgrade-Funktionen
|
||||
void backupJsonConfigs();
|
||||
void restoreJsonConfigs();
|
||||
|
||||
#endif
|
||||
|
12
src/wlan.cpp
12
src/wlan.cpp
@@ -10,19 +10,9 @@ WiFiManager wm;
|
||||
bool wm_nonblocking = false;
|
||||
|
||||
void initWiFi() {
|
||||
// Optimierte WiFi-Einstellungen
|
||||
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
|
||||
WiFi.setSleep(false); // disable sleep mode
|
||||
esp_wifi_set_ps(WIFI_PS_NONE);
|
||||
|
||||
// Maximale Sendeleistung
|
||||
WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power
|
||||
|
||||
// Optimiere TCP/IP Stack
|
||||
esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N);
|
||||
|
||||
// Aktiviere WiFi-Roaming für bessere Stabilität
|
||||
esp_wifi_set_rssi_threshold(-80);
|
||||
esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm
|
||||
|
||||
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
|
||||
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen
|
||||
|
Reference in New Issue
Block a user