Compare commits

..

2 Commits

42 changed files with 730 additions and 2785 deletions

View File

@@ -1,208 +0,0 @@
name: Gitea Release
on:
workflow_call:
secrets:
GITEA_TOKEN:
description: 'Token für Gitea API-Zugriff'
required: true
outputs:
version:
description: 'The version that was released'
value: ${{ jobs.create-release.outputs.version }}
jobs:
create-release:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.VERSION }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio esptool
- name: Install xxd
run: |
sudo apt-get update
sudo apt-get install xxd
- name: Build Firmware
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
# Build firmware and SPIFFS
echo "Building firmware and SPIFFS..."
pio run -e esp32dev
pio run -t buildfs
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
# Create SPIFFS binary - direct copy without header
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
# Create full binary
(cd .pio/build/esp32dev &&
esptool.py --chip esp32 merge_bin \
--fill-flash-size 4MB \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o filaman_full_${VERSION}.bin \
0x1000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x3D0000 spiffs.bin)
# Verify file sizes
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Generate Release Notes
id: release_notes
run: |
# Get the latest tag
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
if [ -n "$LATEST_TAG" ]; then
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Get all commits since last release with commit hash and author
echo "### Added" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
else
# First release
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Initial Release" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Add all commits for initial release
echo "### Added" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Determine Gitea URL
id: gitea_url
run: |
echo "Debug Environment:"
echo "GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-not set}"
echo "GITEA_SERVER_URL=${GITEA_SERVER_URL:-not set}"
echo "GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-not set}"
echo "GITEA_REPOSITORY=${GITEA_REPOSITORY:-not set}"
echo "RUNNER_NAME=${RUNNER_NAME:-not set}"
# Set API URL based on environment
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
GITEA_API_URL="${GITHUB_SERVER_URL}"
GITEA_REPO=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f2)
GITEA_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1)
else
echo "Error: This workflow is only for Gitea"
exit 1
fi
echo "GITEA_API_URL=${GITEA_API_URL}" >> $GITHUB_OUTPUT
echo "GITEA_REPO=${GITEA_REPO}" >> $GITHUB_OUTPUT
echo "GITEA_OWNER=${GITEA_OWNER}" >> $GITHUB_OUTPUT
- name: Create Gitea Release
env:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
GITEA_API_URL: ${{ steps.gitea_url.outputs.GITEA_API_URL }}
GITEA_REPO: ${{ steps.gitea_url.outputs.GITEA_REPO }}
GITEA_OWNER: ${{ steps.gitea_url.outputs.GITEA_OWNER }}
run: |
# Debug Token (nur Länge ausgeben für Sicherheit)
echo "Debug: Token length: ${#GITEA_TOKEN}"
if [ -z "$GITEA_TOKEN" ]; then
echo "Error: GITEA_TOKEN is empty"
exit 1
fi
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
# Debug-Ausgaben
echo "Debug: API URL: ${GITEA_API_URL}"
echo "Debug: Repository: ${GITEA_OWNER}/${GITEA_REPO}"
# Erstelle zuerst den Release ohne Dateien
echo "Debug: Creating release..."
RELEASE_DATA="{\"tag_name\":\"v${VERSION}\",\"name\":\"v${VERSION}\",\"body\":\"${{ steps.release_notes.outputs.CHANGES }}\"}"
RELEASE_RESPONSE=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-d "${RELEASE_DATA}" \
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases")
RELEASE_STATUS=$(echo "$RELEASE_RESPONSE" | tail -n1)
RELEASE_BODY=$(echo "$RELEASE_RESPONSE" | head -n -1)
if [ "$RELEASE_STATUS" != "201" ]; then
echo "Error: Failed to create release"
echo "Response: $RELEASE_BODY"
exit 1
fi
# Extrahiere die Release-ID aus der Antwort
RELEASE_ID=$(echo "$RELEASE_BODY" | grep -o '"id":[0-9]*' | cut -d':' -f2)
# Lade die Dateien einzeln hoch
for file in upgrade_filaman_firmware_v${VERSION}.bin upgrade_filaman_website_v${VERSION}.bin filaman_full_${VERSION}.bin; do
if [ -f "$file" ]; then
echo "Debug: Uploading $file..."
UPLOAD_RESPONSE=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/octet-stream" \
--data-binary @"$file" \
"${GITEA_API_URL}/api/v1/repos/${GITEA_OWNER}/${GITEA_REPO}/releases/${RELEASE_ID}/assets?name=${file}")
UPLOAD_STATUS=$(echo "$UPLOAD_RESPONSE" | tail -n1)
if [ "$UPLOAD_STATUS" != "201" ]; then
echo "Warning: Failed to upload $file"
echo "Response: $(echo "$UPLOAD_RESPONSE" | head -n -1)"
else
echo "Successfully uploaded $file"
fi
fi
done

View File

@@ -1,185 +0,0 @@
name: GitHub Release
on:
workflow_call:
secrets:
RELEASE_TOKEN:
description: 'GitHub token for release creation'
required: true
permissions:
contents: write
jobs:
create-release:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio esptool
- name: Install xxd
run: |
sudo apt-get update
sudo apt-get install xxd
- name: Build Firmware
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
# Always build firmware and SPIFFS
echo "Building firmware and SPIFFS..."
pio run -e esp32dev
pio run -t buildfs
# Copy firmware binary
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/upgrade_filaman_firmware_v${VERSION}.bin
# Create SPIFFS binary - direct copy without header
cp .pio/build/esp32dev/spiffs.bin .pio/build/esp32dev/upgrade_filaman_website_v${VERSION}.bin
# Create full binary (always)
(cd .pio/build/esp32dev &&
esptool.py --chip esp32 merge_bin \
--fill-flash-size 4MB \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o filaman_full_${VERSION}.bin \
0x1000 bootloader.bin \
0x8000 partitions.bin \
0x10000 firmware.bin \
0x3D0000 spiffs.bin)
# Verify file sizes
echo "File sizes:"
(cd .pio/build/esp32dev && ls -lh *.bin)
- name: Get version from platformio.ini
id: get_version
run: |
VERSION=$(grep '^version = ' platformio.ini | cut -d'"' -f2)
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
- name: Generate Release Notes
id: release_notes
run: |
# Get the latest tag
LATEST_TAG=$(git for-each-ref --sort=-creatordate --format '%(refname:short)' refs/tags | sed -n '2p')
if [ -n "$LATEST_TAG" ]; then
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Changes since ${LATEST_TAG}:" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Get all commits since last release with commit hash and author
echo "### Added" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log ${LATEST_TAG}..HEAD --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
else
# First release
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "Initial Release" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
# Add all commits for initial release
echo "### Added" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - (feat|add|new)' | sed 's/^[a-f0-9]* - feat: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Fixed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -iE '^[a-f0-9]+ - fix' | sed 's/^[a-f0-9]* - fix: /- /' >> $GITHUB_OUTPUT || true
echo "" >> $GITHUB_OUTPUT
echo "### Changed" >> $GITHUB_OUTPUT
git log --pretty=format:"%h - %s (%an)" | grep -ivE '^[a-f0-9]+ - (feat|fix|add|new)' | sed 's/^[a-f0-9]* - /- /' >> $GITHUB_OUTPUT || true
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Create GitHub Release
env:
GH_TOKEN: ${{ secrets.RELEASE_TOKEN }}
run: |
VERSION=${{ steps.get_version.outputs.VERSION }}
cd .pio/build/esp32dev
# Create release with available files
FILES_TO_UPLOAD=""
# Always add firmware
if [ -f "upgrade_filaman_firmware_v${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_firmware_v${VERSION}.bin"
fi
# Add SPIFFS and full binary only if they exist
if [ -f "upgrade_filaman_website_v${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD upgrade_filaman_website_v${VERSION}.bin"
fi
if [ -f "filaman_full_${VERSION}.bin" ]; then
FILES_TO_UPLOAD="$FILES_TO_UPLOAD filaman_full_${VERSION}.bin"
fi
# Create release with available files
if [ -n "$FILES_TO_UPLOAD" ]; then
gh release create "v${VERSION}" \
--title "Release ${VERSION}" \
--notes "${{ steps.release_notes.outputs.CHANGES }}" \
$FILES_TO_UPLOAD
else
echo "Error: No files found to upload"
exit 1
fi
- name: Install lftp
run: sudo apt-get install -y lftp
- name: Upload Firmware via FTP
if: success()
env:
FTP_PASSWORD: ${{ vars.FTP_PASSWORD }}
FTP_USER: ${{ vars.FTP_USER }}
FTP_HOST: ${{ vars.FTP_HOST }}
VERSION: ${{ steps.get_version.outputs.VERSION }}
run: |
echo "Environment variables:"
env | grep -E '^FTP_' | while read -r line; do
var_name=$(echo "$line" | cut -d= -f1)
var_value=$(echo "$line" | cut -d= -f2-)
echo "$var_name is $(if [ -n "$var_value" ]; then echo "set"; else echo "empty"; fi)"
done
cd .pio/build/esp32dev
if [ -n "$FTP_USER" ] && [ -n "$FTP_PASSWORD" ] && [ -n "$FTP_HOST" ]; then
echo "All FTP credentials are present, attempting upload..."
lftp -c "set ssl:verify-certificate no; \
set ftp:ssl-protect-data true; \
set ftp:ssl-force true; \
set ssl:check-hostname false; \
set ftp:ssl-auth TLS; \
open -u $FTP_USER,$FTP_PASSWORD $FTP_HOST; \
put -O / filaman_full_${VERSION}.bin -o filaman_full.bin"
else
echo "Error: Some FTP credentials are missing"
exit 1
fi

View File

@@ -0,0 +1,103 @@
name: Gitea Release
on:
push:
tags:
- 'v*'
permissions:
contents: write # Required for creating releases
issues: read # Required for reading changelog
pull-requests: read # Required for reading changelog
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio
- name: Build Firmware
run: |
pio run -t buildfs # Build SPIFFS
pio run # Build firmware
- name: Install esptool
run: |
pip install esptool
- name: Merge firmware and SPIFFS
run: |
esptool.py --chip esp32 merge_bin \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o .pio/build/esp32dev/filaman_full.bin \
0x1000 .pio/build/esp32dev/bootloader.bin \
0x8000 .pio/build/esp32dev/partitions.bin \
0x10000 .pio/build/esp32dev/firmware.bin \
0x290000 .pio/build/esp32dev/spiffs.bin
- name: Prepare OTA firmware
run: |
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
- name: Get version from tag
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
- name: Read CHANGELOG.md
id: changelog
run: |
CHANGELOG=$(awk "/## \\[${{ steps.get_version.outputs.VERSION }}\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create Release
env:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
GITEA_API_URL: ${{ secrets.GITEA_API_URL }}
GITEA_REPOSITORY: ${{ secrets.GITEA_REPOSITORY }}
run: |
# Create release using Gitea API
RESPONSE=$(curl -X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/json" \
-H "accept: application/json" \
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases" \
-d '{
"tag_name": "${{ github.ref_name }}",
"name": "Release ${{ steps.get_version.outputs.VERSION }}",
"body": "${{ steps.changelog.outputs.CHANGES }}",
"draft": false,
"prerelease": false
}')
# Extract release ID from response
RELEASE_ID=$(echo $RESPONSE | jq -r .id)
# Upload full firmware
curl -X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/octet-stream" \
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases/${RELEASE_ID}/assets?name=filaman_full.bin" \
--data-binary @.pio/build/esp32dev/filaman_full.bin
# Upload OTA firmware
curl -X POST \
-H "Authorization: token ${GITEA_TOKEN}" \
-H "Content-Type: application/octet-stream" \
"${GITEA_API_URL}/repos/${GITEA_REPOSITORY}/releases/${RELEASE_ID}/assets?name=filaman_ota.bin" \
--data-binary @.pio/build/esp32dev/filaman_ota.bin

View File

@@ -0,0 +1,85 @@
name: GitHub Release
on:
push:
tags:
- 'v*'
permissions:
contents: write # Required for creating releases
issues: read # Required for reading changelog
pull-requests: read # Required for reading changelog
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write # Required for creating releases at job level
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install PlatformIO
run: |
python -m pip install --upgrade pip
pip install --upgrade platformio
- name: Build Firmware
run: |
pio run -t buildfs # Build SPIFFS
pio run # Build firmware
- name: Install esptool
run: |
pip install esptool
- name: Merge firmware and SPIFFS
run: |
esptool.py --chip esp32 merge_bin \
--flash_mode dio \
--flash_freq 40m \
--flash_size 4MB \
-o .pio/build/esp32dev/filaman_full.bin \
0x1000 .pio/build/esp32dev/bootloader.bin \
0x8000 .pio/build/esp32dev/partitions.bin \
0x10000 .pio/build/esp32dev/firmware.bin \
0x290000 .pio/build/esp32dev/spiffs.bin
- name: Prepare OTA firmware
run: |
# Use PlatformIO to create a proper OTA image
cp .pio/build/esp32dev/firmware.bin .pio/build/esp32dev/filaman_ota.bin
- name: Get version from tag
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
- name: Read CHANGELOG.md
id: changelog
run: |
CHANGELOG=$(awk "/## \\[${{ steps.get_version.outputs.VERSION }}\\]/{p=1;print;next} /## \\[/{p=0} p" CHANGELOG.md)
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Install and Configure GitHub CLI
run: |
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
&& sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& sudo apt update \
&& sudo apt install gh -y
- name: Create Release with GitHub CLI
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh release create "${{ github.ref_name }}" \
--title "Release ${{ steps.get_version.outputs.VERSION }}" \
--notes "${{ steps.changelog.outputs.CHANGES }}" \
".pio/build/esp32dev/filaman_full.bin#filaman_full.bin" \
".pio/build/esp32dev/filaman_ota.bin#filaman_ota.bin"

View File

@@ -1,41 +1,32 @@
name: Release Workflow
name: Release
on:
push:
tags:
- 'v*'
permissions:
contents: write
jobs:
detect-provider:
detect-and-run:
runs-on: ubuntu-latest
outputs:
provider: ${{ steps.provider.outputs.provider }}
steps:
- name: Determine CI Provider
id: provider
shell: bash
- name: Checkout code
uses: actions/checkout@v4
- name: Determine hosting platform
id: platform
run: |
if [ -n "${GITEA_ACTIONS}" ] || [ -n "${GITEA_REPOSITORY}" ] || [[ "${RUNNER_NAME}" == *"gitea"* ]]; then
echo "provider=gitea" >> "$GITHUB_OUTPUT"
if [[ "$GITHUB_SERVER_URL" == "https://github.com" ]]; then
echo "platform=github" >> $GITHUB_OUTPUT
elif [[ "$CI_SERVER_URL" == *"gitlab"* ]]; then
echo "platform=gitlab" >> $GITHUB_OUTPUT
else
echo "provider=github" >> "$GITHUB_OUTPUT"
echo "platform=gitea" >> $GITHUB_OUTPUT
fi
github-release:
needs: detect-provider
permissions:
contents: write
if: needs.detect-provider.outputs.provider == 'github'
uses: ./.github/workflows/github-release.yml
secrets:
RELEASE_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run GitHub Release
if: steps.platform.outputs.platform == 'github'
uses: ./.github/workflows/providers/github-release.yml
gitea-release:
needs: detect-provider
if: needs.detect-provider.outputs.provider == 'gitea'
uses: ./.github/workflows/gitea-release.yml
secrets:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
- name: Run Gitea Release
if: steps.platform.outputs.platform == 'gitea'
uses: ./.github/workflows/providers/gitea-release.yml

File diff suppressed because it is too large Load Diff

View File

@@ -53,14 +53,14 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
### Komponenten
- **ESP32 Entwicklungsboard:** Jede ESP32-Variante.
[Amazon Link](https://amzn.eu/d/aXThslf)
- **HX711 5kg Wägezellen-Verstärker:** Für Gewichtsmessung.
[Amazon Link](https://amzn.eu/d/06A0DLb)
- **OLED 0.96 Zoll I2C weiß/gelb Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/0AuBp2c)
- **PN532 NFC NXP RFID-Modul V3:** Für NFC-Tag-Operationen.
[Amazon Link](https://amzn.eu/d/jfIuQXb)
- **NFC Tags Ntag215:** RFID Tag
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
- **HX711 Wägezellen-Verstärker:** Für Gewichtsmessung.
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
- **OLED Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/dozAYDU)
- **PN532 NFC Modul:** Für NFC-Tag-Operationen.
[Amazon Link](https://amzn.eu/d/8205DDh)
- **NFC-Tag:** NTAG215
[Amazon Link](https://amzn.eu/d/fywy4c4)
### Pin-Konfiguration
| Komponente | ESP32 Pin |
@@ -71,15 +71,10 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
| OLED SCL | 22 |
| PN532 IRQ | 32 |
| PN532 RESET | 33 |
| PN532 SDA | 21 |
| PN532 SCL | 22 |
**Achte darauf, dass am PN532 die DIP-Schalter auf I2C gestellt sind**
![Wiring](./img/Schaltplan.png)
![myWiring](./img/IMG_2589.jpeg)
![myWiring](./img/IMG_2590.jpeg)
| PN532 SCK | 14 |
| PN532 MOSI | 13 |
| PN532 MISO | 12 |
| PN532 CS/SS | 15 |
## Software-Abhängigkeiten
@@ -106,31 +101,7 @@ Deutsches Erklärvideo: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62zaO
- PN532 NFC Modul
- Verbindungskabel
## Wichtiger Hinweis
Du musst Spoolman auf DEBUG Modus setzten, da man bisher in Spoolman keine CORS Domains setzen kann!
```
# Enable debug mode
# If enabled, the client will accept requests from any host
# This can be useful when developing, but is also a security risk
# Default: FALSE
#SPOOLMAN_DEBUG_MODE=TRUE
```
## Schritt-für-Schritt Installation
### Einfache Installation
1. **Gehe auf [FilaMan Installer](https://www.filaman.app/installer.html)**
2. **Stecke dein ESP an den Rechner und klicke Connect**
3. **Wähle dein Device Port und klicke Intall**
4. **Ersteinrichtung:**
- Mit dem "FilaMan" WLAN-Zugangspunkt verbinden.
- WLAN-Einstellungen über das Konfigurationsportal vornehmen.
- Weboberfläche unter `http://filaman.local` oder der IP-Adresse aufrufen.
### Compile by yourself
### Schritt-für-Schritt Installation
1. **Repository klonen:**
```bash
git clone https://github.com/ManuelW77/Filaman.git

View File

@@ -56,14 +56,14 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
### Components
- **ESP32 Development Board:** Any ESP32 variant.
[Amazon Link](https://amzn.eu/d/aXThslf)
- **HX711 5kg Load Cell Amplifier:** For weight measurement.
[Amazon Link](https://amzn.eu/d/06A0DLb)
- **OLED 0.96 Zoll I2C white/yellow Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/0AuBp2c)
- **PN532 NFC NXP RFID-Modul V3:** For NFC tag operations.
[Amazon Link](https://amzn.eu/d/jfIuQXb)
- **NFC Tags Ntag215:** RFID Tag
[Amazon Link](https://amzn.eu/d/9Z6mXc1)
- **HX711 Load Cell Amplifier:** For weight measurement.
[Amazon Link](https://amzn.eu/d/1wZ4v0x)
- **OLED Display:** 128x64 SSD1306.
[Amazon Link](https://amzn.eu/d/dozAYDU)
- **PN532 NFC Module:** For NFC tag operations.
[Amazon Link](https://amzn.eu/d/8205DDh)
- **NFC-Tag:** NTAG215
[Amazon Link](https://amzn.eu/d/fywy4c4)
### Pin Configuration
@@ -75,15 +75,10 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
| OLED SCL | 22 |
| PN532 IRQ | 32 |
| PN532 RESET | 33 |
| PN532 SDA | 21 |
| PN532 SCL | 22 |
**Make sure that the DIP switches on the PN532 are set to I2C**
![Wiring](./img/Schaltplan.png)
![myWiring](./img/IMG_2589.jpeg)
![myWiring](./img/IMG_2590.jpeg)
| PN532 SCK | 14 |
| PN532 MOSI | 13 |
| PN532 MISO | 12 |
| PN532 CS/SS | 15 |
## Software Dependencies
@@ -96,9 +91,9 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- `Adafruit_SSD1306`: OLED display control
- `HX711`: Load cell communication
### Installation
## Installation
## Prerequisites
### Prerequisites
- **Software:**
- [PlatformIO](https://platformio.org/) in VS Code
- [Spoolman](https://github.com/Donkie/Spoolman) instance
@@ -110,32 +105,7 @@ german explanatory video: [Youtube](https://youtu.be/uNDe2wh9SS8?si=b-jYx4I1w62z
- PN532 NFC Module
- Connecting wires
## Important Note
You have to activate Spoolman in debug mode, because you are not able to set CORS Domains in Spoolman yet.
```
# Enable debug mode
# If enabled, the client will accept requests from any host
# This can be useful when developing, but is also a security risk
# Default: FALSE
#SPOOLMAN_DEBUG_MODE=TRUE
```
## Step-by-Step Installation
### Easy Installation
1. **Go to [FilaMan Installer](https://www.filaman.app/installer.html)**
2. **Plug you device in and push Connect button**
3. **Select your Device Port and push Intall**
4. **Initial Setup:**
- Connect to the "FilaMan" WiFi access point.
- Configure WiFi settings through the captive portal.
- Access the web interface at `http://filaman.local` or the IP address.
### Compile by yourself
### Step-by-Step Installation
1. **Clone the Repository:**
```bash
git clone https://github.com/ManuelW77/Filaman.git
@@ -154,6 +124,7 @@ You have to activate Spoolman in debug mode, because you are not able to set COR
- Configure WiFi settings through the captive portal.
- Access the web interface at `http://filaman.local` or the IP address.
## Documentation
### Relevant Links

View File

@@ -1,31 +1,7 @@
{
"GFU99": "TPU",
"GFN99": "PA",
"GFN98": "PA-CF",
"GFL99": "PLA",
"GFL96": "PLA Silk",
"GFL98": "PLA-CF",
"GFL95": "PLA High Speed",
"GFG99": "PETG",
"GFG98": "PETG-CF",
"GFG97": "PCTG",
"GFB99": "ABS",
"GFC99": "PC",
"GFB98": "ASA",
"GFS99": "PVA",
"GFS98": "HIPS",
"GFT98": "PPS-CF",
"GFT97": "PPS",
"GFN97": "PPA-CF",
"GFN96": "PPA-GF",
"GFP99": "PE",
"GFP98": "PE-CF",
"GFP97": "PP",
"GFP96": "PP-CF",
"GFP95": "PP-GF",
"GFR99": "EVA",
"GFR98": "PHA",
"GFS97": "BVOH",
"GFU99": "Generic TPU",
"GFN99": "Generic PA",
"GFN98": "Generic PA-CF",
"GFA01": "Bambu PLA Matte",
"GFA00": "Bambu PLA Basic",
"GFA09": "Bambu PLA Tough",
@@ -37,11 +13,15 @@
"GFL03": "eSUN PLA+",
"GFL01": "PolyTerra PLA",
"GFL00": "PolyLite PLA",
"GFL99": "Generic PLA",
"GFL96": "Generic PLA Silk",
"GFL98": "Generic PLA-CF",
"GFA50": "Bambu PLA-CF",
"GFS02": "Bambu Support For PLA",
"GFA11": "Bambu PLA Aero",
"GFL04": "Overture PLA",
"GFL05": "Overture Matte PLA",
"GFL95": "Generic PLA High Speed",
"GFA12": "Bambu PLA Glow",
"GFA13": "Bambu PLA Dynamic",
"GFA15": "Bambu PLA Galaxy",
@@ -50,21 +30,41 @@
"GFU00": "Bambu TPU 95A HF",
"GFG00": "Bambu PETG Basic",
"GFT01": "Bambu PET-CF",
"GFG99": "Generic PETG",
"GFG98": "Generic PETG-CF",
"GFG50": "Bambu PETG-CF",
"GFG60": "PolyLite PETG",
"GFG01": "Bambu PETG Translucent",
"GFG97": "Generic PCTG",
"GFB00": "Bambu ABS",
"GFB99": "Generic ABS",
"GFB60": "PolyLite ABS",
"GFB50": "Bambu ABS-GF",
"GFC00": "Bambu PC",
"GFC99": "Generic PC",
"GFB98": "Generic ASA",
"GFB01": "Bambu ASA",
"GFB61": "PolyLite ASA",
"GFB02": "Bambu ASA-Aero",
"GFS99": "Generic PVA",
"GFS04": "Bambu PVA",
"GFS01": "Bambu Support G",
"GFN03": "Bambu PA-CF",
"GFN04": "Bambu PAHT-CF",
"GFS03": "Bambu Support For PA/PET",
"GFN05": "Bambu PA6-CF",
"GFN08": "Bambu PA6-GF"
"GFN08": "Bambu PA6-GF",
"GFS98": "Generic HIPS",
"GFT98": "Generic PPS-CF",
"GFT97": "Generic PPS",
"GFN97": "Generic PPA-CF",
"GFN96": "Generic PPA-GF",
"GFP99": "Generic PE",
"GFP98": "Generic PE-CF",
"GFP97": "Generic PP",
"GFP96": "Generic PP-CF",
"GFP95": "Generic PP-GF",
"GFR99": "Generic EVA",
"GFR98": "Generic PHA",
"GFS97": "Generic BVOH"
}

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>
@@ -47,7 +36,7 @@
<!-- head -->
<div class="content">
<div class="container">
<h1>FilaMan</h1>
<p>Filament Management Tool</p>
<p>Your smart solution for <strong>Filament Management</strong> in 3D printing.</p>
@@ -55,11 +44,10 @@
<h2>About FilaMan</h2>
<p>
FilaMan is a tool designed to simplify filament spool management. It allows you to identify and weigh filament spools,
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform.
automatically sync data with the self-hosted <a href="https://github.com/Donkie/Spoolman" target="_blank">Spoolman</a> platform,
and ensure compatibility with <a href="https://github.com/spuder/OpenSpool" target="_blank">OpenSpool</a> for Bambu printers.
</p>
<p>Get more information at <a href="https://www.filaman.app" target="_blank">https://www.filaman.app</a> and <a href="https://github.com/ManuelW77/Filaman" target="_blank">https://github.com/ManuelW77/Filaman</a>.</p>
<div class="features">
<div class="feature">
<h3>Spool Identification</h3>
@@ -74,6 +62,12 @@
<p>Works with OpenSpool to recognize and activate spools on Bambu printers.</p>
</div>
</div>
<h2>Future Plans</h2>
<p>
We are working on expanding compatibility to support smaller NFC tags like NTag213
and developing custom software to enhance the OpenSpool experience.
</p>
</div>
</body>
</html>

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>
@@ -74,9 +63,8 @@
const ip = document.getElementById('bambuIp').value;
const serial = document.getElementById('bambuSerial').value;
const code = document.getElementById('bambuCode').value;
const autoSend = document.getElementById('autoSend').checked;
fetch(`/api/bambu?bambu_ip=${encodeURIComponent(ip)}&bambu_serialnr=${encodeURIComponent(serial)}&bambu_accesscode=${encodeURIComponent(code)}&autoSend=${autoSend}`)
fetch(`/api/bambu?bambu_ip=${encodeURIComponent(ip)}&bambu_serialnr=${encodeURIComponent(serial)}&bambu_accesscode=${encodeURIComponent(code)}`)
.then(response => response.json())
.then(data => {
if (data.healthy) {
@@ -96,42 +84,27 @@
<div class="content">
<h1>Spoolman API URL / Bambu Credentials</h1>
<label for="spoolmanUrl">Set URL/IP to your Spoolman-Instanz:</label>
<input type="text" id="spoolmanUrl" placeholder="http://ip-or-url-of-your-spoolman-instanz:port">
<button onclick="checkSpoolmanInstance()">Save Spoolman URL</button>
<p id="statusMessage"></p>
<div class="card">
<div class="card-body">
<h5 class="card-title">Set URL/IP to your Spoolman-Instanz</h5>
<input type="text" id="spoolmanUrl" placeholder="http://ip-or-url-of-your-spoolman-instanz:port">
<button onclick="checkSpoolmanInstance()">Save Spoolman URL</button>
<p id="statusMessage"></p>
<h2>Bambu Lab Printer Credentials</h2>
<div class="bambu-settings">
<div class="input-group">
<label for="bambuIp">Bambu Drucker IP-Adresse:</label>
<input type="text" id="bambuIp" placeholder="192.168.1.xxx" value="{{bambuIp}}">
</div>
</div>
<div class="card">
<div class="card-body">
<h5 class="card-title">Bambu Lab Printer Credentials</h5>
<div class="bambu-settings">
<div class="input-group">
<label for="bambuIp">Bambu Drucker IP-Adresse:</label>
<input type="text" id="bambuIp" placeholder="192.168.1.xxx" value="{{bambuIp}}">
</div>
<div class="input-group">
<label for="bambuSerial">Drucker Seriennummer:</label>
<input type="text" id="bambuSerial" placeholder="BBLXXXXXXXX" value="{{bambuSerial}}">
</div>
<div class="input-group">
<label for="bambuCode">Access Code:</label>
<input type="text" id="bambuCode" placeholder="Access Code vom Drucker" value="{{bambuCode}}">
</div>
<div class="input-group">
If activated, FilaMan will automatically update the next filled tray with the last scanned and weighed spool.
<label for="autoSend">Auto Send to Bambu:</label>
<input type="checkbox" id="autoSend">
</div>
<button onclick="saveBambuCredentials()">Save Bambu Credentials</button>
<p id="bambuStatusMessage"></p>
</div>
<div class="input-group">
<label for="bambuSerial">Drucker Seriennummer:</label>
<input type="text" id="bambuSerial" placeholder="BBLXXXXXXXX" value="{{bambuSerial}}">
</div>
<div class="input-group">
<label for="bambuCode">Access Code:</label>
<input type="text" id="bambuCode" placeholder="Access Code vom Drucker" value="{{bambuCode}}">
</div>
<button onclick="saveBambuCredentials()">Save Bambu Credentials</button>
<p id="bambuStatusMessage"></p>
</div>
</div>
</body>

View File

@@ -279,10 +279,9 @@ a:hover {
/* Karten-Stil für optische Trennung */
.card {
background: var(--primary-color);
width: 500px;
background: #f9f9f9;
padding: 15px;
margin: 20px auto;
margin: 20px 0;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
@@ -960,6 +959,7 @@ input[type="submit"]:disabled,
/* Bambu Settings Erweiterung */
.bambu-settings {
background: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
@@ -1051,10 +1051,9 @@ input[type="submit"]:disabled,
}
.update-form {
background: var(--primary-color);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.05);
border: var(--glass-border);
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
margin: 0 auto;
width: 400px;
text-align: center;
@@ -1065,7 +1064,7 @@ input[type="submit"]:disabled,
padding: 8px;
border: 1px solid #ddd;
border-radius: 4px;
background-color: #4CAF50;
background: white;
}
.update-form input[type="submit"] {
background-color: #4CAF50;
@@ -1087,66 +1086,10 @@ input[type="submit"]:disabled,
.warning {
background-color: var(--primary-color);
border: 1px solid #ffe0b2;
color: white;
padding: 15px;
margin: 20px auto;
border-radius: 4px;
max-width: 600px;
text-align: center;
color: #e65100;
padding: 15px;
}
.update-options {
display: flex;
gap: 2rem;
margin: 2rem 0;
}
.update-section {
flex: 1;
background: var(--background-green);
padding: 1.5rem;
border-radius: 8px;
}
.update-section h2 {
margin-top: 0;
color: #333;
}
.update-section p {
color: #666;
margin-bottom: 1rem;
}
.progress-container {
margin: 20px 0;
background: #f0f0f0;
border-radius: 4px;
overflow: hidden;
}
.progress-bar {
width: 0;
height: 20px;
background: #4CAF50;
transition: width 0.3s ease-in-out;
text-align: center;
line-height: 20px;
color: white;
}
.status {
margin-top: 20px;
padding: 10px;
border-radius: 4px;
display: none;
}
.status.success {
background: #e8f5e9;
color: #2e7d32;
}
.status.error {
background: #ffebee;
color: #c62828;
}
.warning {
background: #fff3e0;
color: #e65100;
padding: 15px;
border-radius: 4px;
margin-bottom: 20px;
}

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>
@@ -51,34 +40,18 @@
<h1>Firmware Upgrade</h1>
<div class="warning">
<strong>Warning:</strong> Do not power off the device during update.
<strong>Warning:</strong> Please do not turn off or restart the device during the update.
The device will restart automatically after the update.
</div>
<div class="update-options">
<div class="update-section">
<h2>Firmware Update</h2>
<p>Upload a new firmware file (filaman_*.bin)</p>
<div class="update-form">
<form id="firmwareForm" enctype='multipart/form-data' data-type="firmware">
<input type='file' name='update' accept='.bin' required>
<input type='submit' value='Start Firmware Update'>
</form>
</div>
</div>
<div class="update-section">
<h2>Webpage Update</h2>
<p>Upload a new webpage file (webpage_*.bin)</p>
<div class="update-form">
<form id="webpageForm" enctype='multipart/form-data' data-type="webpage">
<input type='file' name='update' accept='.bin' required>
<input type='submit' value='Start Webpage Update'>
</form>
</div>
</div>
<div class="update-form">
<form id="updateForm" enctype='multipart/form-data'>
<input type='file' name='update' accept='.bin' required>
<input type='submit' value='Start Firmware Update'>
</form>
</div>
<div class="progress-container" style="display: none;">
<div class="progress-container">
<div class="progress-bar">0%</div>
</div>
<div class="status"></div>
@@ -91,163 +64,91 @@
statusContainer.style.display = 'none';
}
const progress = document.querySelector('.progress-bar');
const progressContainer = document.querySelector('.progress-container');
const status = document.querySelector('.status');
let updateInProgress = false;
let lastReceivedProgress = 0;
// WebSocket Handling
let ws = null;
let wsReconnectTimer = null;
function connectWebSocket() {
ws = new WebSocket('ws://' + window.location.host + '/ws');
ws.onmessage = function(event) {
try {
const data = JSON.parse(event.data);
if (data.type === "updateProgress" && updateInProgress) {
// Zeige Fortschrittsbalken
progressContainer.style.display = 'block';
// Aktualisiere den Fortschritt nur wenn er größer ist
const newProgress = parseInt(data.progress);
if (!isNaN(newProgress) && newProgress >= lastReceivedProgress) {
progress.style.width = newProgress + '%';
progress.textContent = newProgress + '%';
lastReceivedProgress = newProgress;
}
// Zeige Status-Nachricht
if (data.message || data.status) {
status.textContent = data.message || getStatusMessage(data.status);
status.className = 'status success';
status.style.display = 'block';
// Starte Reload wenn Update erfolgreich
if (data.status === 'success' || lastReceivedProgress >= 98) {
clearTimeout(wsReconnectTimer);
setTimeout(() => {
window.location.href = '/';
}, 30000);
}
}
}
} catch (e) {
console.error('WebSocket message error:', e);
}
};
ws.onclose = function() {
if (updateInProgress) {
// Wenn der Fortschritt hoch genug ist, gehen wir von einem erfolgreichen Update aus
if (lastReceivedProgress >= 85) {
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
status.className = 'status success';
status.style.display = 'block';
clearTimeout(wsReconnectTimer);
setTimeout(() => {
window.location.href = '/';
}, 30000);
} else {
// Versuche Reconnect bei niedrigem Fortschritt
wsReconnectTimer = setTimeout(connectWebSocket, 1000);
}
}
};
ws.onerror = function(err) {
console.error('WebSocket error:', err);
if (updateInProgress && lastReceivedProgress >= 85) {
status.textContent = "Update appears successful! Device is restarting... Page will reload in 30 seconds.";
status.className = 'status success';
status.style.display = 'block';
setTimeout(() => {
window.location.href = '/';
}, 30000);
}
};
}
// Initial WebSocket connection
connectWebSocket();
function getStatusMessage(status) {
switch(status) {
case 'starting': return 'Starting update...';
case 'uploading': return 'Uploading...';
case 'finalizing': return 'Finalizing update...';
case 'restoring': return 'Restoring configurations...';
case 'preparing': return 'Preparing for restart...';
case 'success': return 'Update successful! Device is restarting... Page will reload in 30 seconds.';
default: return 'Updating...';
}
}
function handleUpdate(e) {
document.getElementById('updateForm').addEventListener('submit', async (e) => {
e.preventDefault();
const form = e.target;
const file = form.update.files[0];
const updateType = form.dataset.type;
if (!file) {
alert('Please select a file.');
alert('Please select a firmware file.');
return;
}
// Validate file name pattern
if (updateType === 'firmware' && !file.name.startsWith('upgrade_filaman_firmware_')) {
alert('Please select a valid firmware file (upgrade_filaman_firmware_*.bin)');
return;
}
if (updateType === 'webpage' && !file.name.startsWith('upgrade_filaman_website_')) {
alert('Please select a valid webpage file (upgrade_filaman_website_*.bin)');
return;
}
// Reset UI
updateInProgress = true;
progressContainer.style.display = 'block';
status.style.display = 'none';
status.className = 'status';
progress.style.width = '0%';
progress.textContent = '0%';
// Disable submit buttons
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = true);
// Send update
const xhr = new XMLHttpRequest();
xhr.open('POST', '/update', true);
xhr.onload = function() {
if (xhr.status !== 200 && !progress.textContent.startsWith('100')) {
status.textContent = "Update failed: " + (xhr.responseText || "Unknown error");
status.className = 'status error';
status.style.display = 'block';
updateInProgress = false;
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
}
};
xhr.onerror = function() {
if (!progress.textContent.startsWith('100')) {
status.textContent = "Network error during update";
status.className = 'status error';
status.style.display = 'block';
updateInProgress = false;
document.querySelectorAll('form input[type=submit]').forEach(btn => btn.disabled = false);
}
};
const formData = new FormData();
formData.append('update', file);
xhr.send(formData);
}
document.getElementById('firmwareForm').addEventListener('submit', handleUpdate);
document.getElementById('webpageForm').addEventListener('submit', handleUpdate);
const progress = document.querySelector('.progress-bar');
const progressContainer = document.querySelector('.progress-container');
const status = document.querySelector('.status');
progressContainer.style.display = 'block';
status.style.display = 'none';
status.className = 'status';
form.querySelector('input[type=submit]').disabled = true;
const xhr = new XMLHttpRequest();
xhr.open('POST', '/update', true);
xhr.upload.onprogress = (e) => {
if (e.lengthComputable) {
const percentComplete = (e.loaded / e.total) * 100;
progress.style.width = percentComplete + '%';
progress.textContent = Math.round(percentComplete) + '%';
}
};
xhr.onload = function() {
try {
let response = this.responseText;
try {
const jsonResponse = JSON.parse(response);
response = jsonResponse.message;
if (jsonResponse.restart) {
status.textContent = response + " Redirecting in 20 seconds...";
let countdown = 20;
const timer = setInterval(() => {
countdown--;
if (countdown <= 0) {
clearInterval(timer);
window.location.href = '/';
} else {
status.textContent = response + ` Redirecting in ${countdown} seconds...`;
}
}, 1000);
}
} catch (e) {
if (!isNaN(response)) {
const percent = parseInt(response);
progress.style.width = percent + '%';
progress.textContent = percent + '%';
return;
}
}
status.textContent = response;
status.classList.add(xhr.status === 200 ? 'success' : 'error');
status.style.display = 'block';
if (xhr.status !== 200) {
form.querySelector('input[type=submit]').disabled = false;
}
} catch (error) {
status.textContent = 'Error: ' + error.message;
status.classList.add('error');
status.style.display = 'block';
form.querySelector('input[type=submit]').disabled = false;
}
};
xhr.onerror = function() {
status.textContent = 'Update failed: Network error';
status.classList.add('error');
status.style.display = 'block';
form.querySelector('input[type=submit]').disabled = false;
};
xhr.send(formData);
});
</script>
</body>
</html>

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>

View File

@@ -6,24 +6,13 @@
<title>FilaMan - Filament Management Tool</title>
<link rel="icon" type="image/png" href="/favicon.ico">
<link rel="stylesheet" href="style.css">
<script>
fetch('/api/version')
.then(response => response.json())
.then(data => {
const versionSpan = document.querySelector('.version');
if (versionSpan) {
versionSpan.textContent = 'v' + data.version;
}
})
.catch(error => console.error('Error fetching version:', error));
</script>
</head>
<body>
<div class="navbar">
<div style="display: flex; align-items: center; gap: 2rem;">
<img src="/logo.png" alt="FilaMan Logo" class="logo">
<div class="logo-text">
<h1>FilaMan<span class="version"></span></h1>
<h1>FilaMan<span class="version">v1.2.4</span></h1>
<h4>Filament Management Tool</h4>
</div>
</div>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 143 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 143 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 283 KiB

View File

@@ -9,8 +9,8 @@
; https://docs.platformio.org/page/projectconf.html
[common]
version = "1.3.94"
##
version = "1.2.4"
[env:esp32dev]
platform = espressif32
board = esp32dev
@@ -20,10 +20,7 @@ monitor_speed = 115200
lib_deps =
tzapu/WiFiManager @ ^2.0.17
https://github.com/me-no-dev/ESPAsyncWebServer.git#master
#me-no-dev/AsyncTCP @ ^1.1.1
https://github.com/esphome/AsyncTCP.git
#mathieucarbou/ESPAsyncWebServer @ ^3.6.0
#esp32async/AsyncTCP @ ^3.3.5
me-no-dev/AsyncTCP @ ^1.1.1
bogde/HX711 @ ^0.7.5
adafruit/Adafruit SSD1306 @ ^2.5.13
adafruit/Adafruit GFX Library @ ^1.11.11
@@ -46,26 +43,34 @@ build_flags =
-fdata-sections
-DNDEBUG
-mtext-section-literals
-DVERSION=\"${common.version}\"
'-D VERSION="${common.version}"'
-DASYNCWEBSERVER_REGEX
-DCORE_DEBUG_LEVEL=3
-DCORE_DEBUG_LEVEL=1
-DCONFIG_ARDUHAL_LOG_COLORS=1
-DOTA_DEBUG=1
-DARDUINO_RUNNING_CORE=1
-DARDUINO_EVENT_RUNNING_CORE=1
-DCONFIG_OPTIMIZATION_LEVEL_DEBUG=1
-DBOOT_APP_PARTITION_OTA_0=1
-DCONFIG_LWIP_TCP_MSL=60000
-DCONFIG_LWIP_TCP_RCV_BUF_DEFAULT=4096
-DCONFIG_LWIP_MAX_ACTIVE_TCP=16
-DCONFIG_ESP32_PANIC_PRINT_REBOOT
-DCONFIG_ARDUINO_OTA_READSIZE=1024
-DCONFIG_ASYNC_TCP_RUNNING_CORE=1
-DCONFIG_ASYNC_TCP_USE_WDT=0
-DCONFIG_LWIP_TCP_MSS=1460
-DOTA_PARTITION_SUBTYPE=0x10
-DPARTITION_TABLE_OFFSET=0x8000
-DPARTITION_TABLE_SIZE=0x1000
extra_scripts =
scripts/extra_script.py
${env:buildfs.extra_scripts}
pre:scripts/pre_build.py ; wird zuerst ausgeführt
pre:scripts/pre_spiffs.py ; wird als zweites ausgeführt
pre:scripts/combine_html.py ; wird als drittes ausgeführt
scripts/gzip_files.py
[env:buildfs]
extra_scripts =
pre:scripts/combine_html.py ; Combine header with HTML files
scripts/gzip_files.py ; Compress files for SPIFFS
; Remove or comment out the targets line
;targets = buildfs, build
; Add a custom target to build both
[platformio]
default_envs = esp32dev

View File

@@ -1,39 +1,7 @@
Import("env")
board_config = env.BoardConfig()
# Calculate SPIFFS size based on partition table
SPIFFS_START = 0x310000 # From partitions.csv
SPIFFS_SIZE = 0xE0000 # From partitions.csv
SPIFFS_PAGE = 256
SPIFFS_BLOCK = 4096
env.Replace(
MKSPIFFSTOOL="mkspiffs",
SPIFFSBLOCKSZ=SPIFFS_BLOCK,
SPIFFSBLOCKSIZE=SPIFFS_BLOCK,
SPIFFSSTART=SPIFFS_START,
SPIFFSEND=SPIFFS_START + SPIFFS_SIZE,
SPIFFSPAGESZ=SPIFFS_PAGE,
SPIFFSSIZE=SPIFFS_SIZE
)
# Wiederverwendung der replace_version Funktion
exec(open("./scripts/pre_build.py").read())
# Bind to SPIFFS build
env.AddPreAction("buildfs", replace_version)
import os
import shutil
from SCons.Script import DefaultEnvironment
env = DefaultEnvironment()
# Format SPIFFS partition before uploading new files
spiffs_dir = os.path.join(env.subst("$BUILD_DIR"), "spiffs")
if os.path.exists(spiffs_dir):
shutil.rmtree(spiffs_dir)
os.makedirs(spiffs_dir)
print("SPIFFS partition formatted.")
env.AddPreAction("buildfs", replace_version)

View File

@@ -64,10 +64,29 @@ def get_changes_from_git():
return changes
def push_changes(version):
"""Push changes to upstream"""
try:
# Stage the CHANGELOG.md
subprocess.run(['git', 'add', 'CHANGELOG.md'], check=True)
# Commit the changelog
commit_msg = f"docs: update changelog for version {version}"
subprocess.run(['git', 'commit', '-m', commit_msg], check=True)
# Push to origin (local)
subprocess.run(['git', 'push', 'origin'], check=True)
print("Successfully pushed to origin")
except subprocess.CalledProcessError as e:
print(f"Error during git operations: {e}")
return False
return True
def update_changelog():
print("Starting changelog update...")
print("Starting changelog update...") # Add this line
version = get_version()
print(f"Current version: {version}")
print(f"Current version: {version}") # Add this line
today = datetime.now().strftime('%Y-%m-%d')
script_dir = os.path.dirname(os.path.abspath(__file__))
@@ -92,7 +111,7 @@ def update_changelog():
if not os.path.exists(changelog_path):
with open(changelog_path, 'w') as f:
f.write(f"# Changelog\n\n{changelog_entry}")
print(f"Created new changelog file with version {version}")
push_changes(version)
else:
with open(changelog_path, 'r') as f:
content = f.read()
@@ -101,30 +120,9 @@ def update_changelog():
updated_content = content.replace("# Changelog\n", f"# Changelog\n\n{changelog_entry}")
with open(changelog_path, 'w') as f:
f.write(updated_content)
print(f"Added new version {version} to changelog")
push_changes(version)
else:
# Version existiert bereits, aktualisiere die bestehenden Einträge
version_pattern = f"## \\[{version}\\] - \\d{{4}}-\\d{{2}}-\\d{{2}}"
next_version_pattern = "## \\[.*?\\] - \\d{4}-\\d{2}-\\d{2}"
# Finde den Start der aktuellen Version
version_match = re.search(version_pattern, content)
if version_match:
version_start = version_match.start()
# Suche nach der nächsten Version
next_version_match = re.search(next_version_pattern, content[version_start + 1:])
if next_version_match:
# Ersetze den Inhalt zwischen aktueller und nächster Version
next_version_pos = version_start + 1 + next_version_match.start()
updated_content = content[:version_start] + changelog_entry + content[next_version_pos:]
else:
# Wenn keine nächste Version existiert, ersetze bis zum Ende
updated_content = content[:version_start] + changelog_entry + "\n"
with open(changelog_path, 'w') as f:
f.write(updated_content)
print(f"Updated entries for version {version}")
print(f"Version {version} already exists in changelog")
if __name__ == "__main__":
update_changelog()

View File

@@ -37,9 +37,9 @@ struct SendToApiParams {
}
*/
JsonDocument fetchSingleSpoolInfo(int spoolId) {
JsonDocument fetchSpoolsForWebsite() {
HTTPClient http;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool/" + spoolId;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool";
Serial.print("Rufe Spool-Daten von: ");
Serial.println(spoolsUrl);
@@ -56,45 +56,84 @@ JsonDocument fetchSingleSpoolInfo(int spoolId) {
Serial.print("Fehler beim Parsen der JSON-Antwort: ");
Serial.println(error.c_str());
} else {
String filamentType = doc["filament"]["material"].as<String>();
String filamentBrand = doc["filament"]["vendor"]["name"].as<String>();
JsonArray spools = doc.as<JsonArray>();
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
int nozzle_temp_min = 0;
int nozzle_temp_max = 0;
if (doc["filament"]["extra"]["nozzle_temperature"].is<String>()) {
String tempString = doc["filament"]["extra"]["nozzle_temperature"].as<String>();
tempString.replace("[", "");
tempString.replace("]", "");
int commaIndex = tempString.indexOf(',');
if (commaIndex != -1) {
nozzle_temp_min = tempString.substring(0, commaIndex).toInt();
nozzle_temp_max = tempString.substring(commaIndex + 1).toInt();
}
}
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.createNestedObject();
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
String filamentColor = doc["filament"]["color_hex"].as<String>();
filamentColor.toUpperCase();
JsonObject filament = filteredSpool.createNestedObject("filament");
filament["sm_id"] = spool["id"];
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
filament["color_hex"] = spool["filament"]["color_hex"];
filament["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"]; // [190,230]
filament["price_meter"] = spool["filament"]["extra"]["price_meter"];
filament["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
String tray_info_idx = doc["filament"]["extra"]["bambu_idx"].as<String>();
tray_info_idx.replace("\"", "");
String cali_idx = doc["filament"]["extra"]["bambu_cali_id"].as<String>(); // "\"153\""
cali_idx.replace("\"", "");
String bambu_setting_id = doc["filament"]["extra"]["bambu_setting_id"].as<String>(); // "\"PFUSf40e9953b40d3d\""
bambu_setting_id.replace("\"", "");
JsonObject vendor = filament.createNestedObject("vendor");
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
}
}
} else {
Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: ");
Serial.println(httpCode);
}
doc.clear();
http.end();
return filteredDoc;
}
filteredDoc["color"] = filamentColor;
filteredDoc["type"] = filamentType;
filteredDoc["nozzle_temp_min"] = nozzle_temp_min;
filteredDoc["nozzle_temp_max"] = nozzle_temp_max;
filteredDoc["brand"] = filamentBrand;
filteredDoc["tray_info_idx"] = tray_info_idx;
filteredDoc["cali_idx"] = cali_idx;
filteredDoc["bambu_setting_id"] = bambu_setting_id;
JsonDocument fetchAllSpoolsInfo() {
HTTPClient http;
String spoolsUrl = spoolmanUrl + apiUrl + "/spool";
Serial.print("Rufe Spool-Daten von: ");
Serial.println(spoolsUrl);
http.begin(spoolsUrl);
int httpCode = http.GET();
JsonDocument filteredDoc;
if (httpCode == HTTP_CODE_OK) {
String payload = http.getString();
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (error) {
Serial.print("Fehler beim Parsen der JSON-Antwort: ");
Serial.println(error.c_str());
} else {
JsonArray spools = doc.as<JsonArray>();
JsonArray filteredSpools = filteredDoc.to<JsonArray>();
for (JsonObject spool : spools) {
JsonObject filteredSpool = filteredSpools.createNestedObject();
filteredSpool["price"] = spool["price"];
filteredSpool["remaining_weight"] = spool["remaining_weight"];
filteredSpool["used_weight"] = spool["used_weight"];
filteredSpool["extra"]["nfc_id"] = spool["extra"]["nfc_id"];
JsonObject filament = filteredSpool.createNestedObject("filament");
filament["id"] = spool["filament"]["id"];
filament["name"] = spool["filament"]["name"];
filament["material"] = spool["filament"]["material"];
filament["density"] = spool["filament"]["density"];
filament["diameter"] = spool["filament"]["diameter"];
filament["spool_weight"] = spool["filament"]["spool_weight"];
filament["color_hex"] = spool["filament"]["color_hex"];
JsonObject vendor = filament.createNestedObject("vendor");
vendor["id"] = spool["filament"]["vendor"]["id"];
vendor["name"] = spool["filament"]["vendor"]["name"];
JsonObject extra = filament.createNestedObject("extra");
extra["nozzle_temperature"] = spool["filament"]["extra"]["nozzle_temperature"];
extra["price_gramm"] = spool["filament"]["extra"]["price_gramm"];
extra["price_meter"] = spool["filament"]["extra"]["price_meter"];
}
}
} else {
Serial.print("Fehler beim Abrufen der Spool-Daten. HTTP-Code: ");
@@ -147,7 +186,7 @@ bool updateSpoolTagId(String uidString, const char* payload) {
}
// Überprüfe, ob die erforderlichen Felder vorhanden sind
if (!doc["sm_id"].is<String>() || doc["sm_id"].as<String>() == "") {
if (!doc.containsKey("sm_id") || doc["sm_id"] == "") {
Serial.println("Keine Spoolman-ID gefunden.");
return false;
}
@@ -329,7 +368,7 @@ bool checkSpoolmanExtraFields() {
for (uint8_t s = 0; s < extraLength; s++) {
bool found = false;
for (JsonObject field : doc.as<JsonArray>()) {
if (field["key"].is<String>() && field["key"] == extraFields[s]) {
if (field.containsKey("key") && field["key"] == extraFields[s]) {
Serial.println("Feld gefunden: " + extraFields[s]);
found = true;
break;
@@ -391,7 +430,7 @@ bool checkSpoolmanInstance(const String& url) {
String payload = http.getString();
JsonDocument doc;
DeserializationError error = deserializeJson(doc, payload);
if (!error && doc["status"].is<String>()) {
if (!error && doc.containsKey("status")) {
const char* status = doc["status"];
http.end();
@@ -430,7 +469,7 @@ bool saveSpoolmanUrl(const String& url) {
String loadSpoolmanUrl() {
JsonDocument doc;
if (loadJsonValue("/spoolman_url.json", doc) && doc["url"].is<String>()) {
if (loadJsonValue("/spoolman_url.json", doc) && doc.containsKey("url")) {
return doc["url"].as<String>();
}
Serial.println("Keine gültige Spoolman-URL gefunden.");

View File

@@ -14,7 +14,8 @@ bool checkSpoolmanInstance(const String& url);
bool saveSpoolmanUrl(const String& url);
String loadSpoolmanUrl(); // Neue Funktion zum Laden der URL
bool checkSpoolmanExtraFields(); // Neue Funktion zum Überprüfen der Extrafelder
JsonDocument fetchSingleSpoolInfo(int spoolId); // API-Funktion für die Webseite
JsonDocument fetchSpoolsForWebsite(); // API-Funktion für die Webseite
JsonDocument fetchAllSpoolsInfo();
void sendAmsData(AsyncWebSocketClient *client); // Neue Funktion zum Senden von AMS-Daten
bool updateSpoolTagId(String uidString, const char* payload); // Neue Funktion zum Aktualisieren eines Spools
uint8_t updateSpoolWeight(String spoolId, uint16_t weight); // Neue Funktion zum Aktualisieren des Gewichts

View File

@@ -24,15 +24,13 @@ const char* bambu_ip = nullptr;
const char* bambu_accesscode = nullptr;
const char* bambu_serialnr = nullptr;
bool bambu_connected = false;
bool autoSendToBambu = false;
int autoSetToBambuSpoolId = 0;
// Globale Variablen für AMS-Daten
int ams_count = 0;
String amsJsonData; // Speichert das fertige JSON für WebSocket-Clients
AMSData ams_data[MAX_AMS]; // Definition des Arrays;
AMSData ams_data[MAX_AMS]; // Definition des Arrays
bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode, bool autoSend) {
bool saveBambuCredentials(const String& ip, const String& serialnr, const String& accesscode) {
if (BambuMqttTask) {
vTaskDelete(BambuMqttTask);
}
@@ -41,7 +39,6 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
doc["bambu_ip"] = ip;
doc["bambu_accesscode"] = accesscode;
doc["bambu_serialnr"] = serialnr;
doc["autoSendToBambu"] = autoSend;
if (!saveJsonValue("/bambu_credentials.json", doc)) {
Serial.println("Fehler beim Speichern der Bambu-Credentials.");
@@ -52,7 +49,6 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
bambu_ip = ip.c_str();
bambu_accesscode = accesscode.c_str();
bambu_serialnr = serialnr.c_str();
autoSendToBambu = autoSend;
vTaskDelay(100 / portTICK_PERIOD_MS);
if (!setupMqtt()) return false;
@@ -62,12 +58,11 @@ bool saveBambuCredentials(const String& ip, const String& serialnr, const String
bool loadBambuCredentials() {
JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>()) {
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
// Temporäre Strings für die Werte
String ip = doc["bambu_ip"].as<String>();
String code = doc["bambu_accesscode"].as<String>();
String serial = doc["bambu_serialnr"].as<String>();
autoSendToBambu = doc["autoSendToBambu"].as<bool>();
ip.trim();
code.trim();
@@ -86,23 +81,19 @@ bool loadBambuCredentials() {
return false;
}
struct FilamentResult {
String key;
String type;
};
FilamentResult findFilamentIdx(String brand, String type) {
String findFilamentIdx(String brand, String type) {
// JSON-Dokument für die Filament-Daten erstellen
JsonDocument doc;
// Laden der bambu_filaments.json
if (!loadJsonValue("/bambu_filaments.json", doc)) {
Serial.println("Fehler beim Laden der Filament-Daten");
return {"GFL99", "PLA"}; // Fallback auf Generic PLA
return "GFL99"; // Fallback auf Generic PLA
}
// 1. Erst versuchen wir die exakte Brand + Type Kombination zu finden
String searchKey;
// 1. Suche nach Brand + Type Kombination
if (brand == "Bambu" || brand == "Bambulab") {
searchKey = "Bambu " + type;
} else if (brand == "PolyLite") {
@@ -118,43 +109,20 @@ FilamentResult findFilamentIdx(String brand, String type) {
// Durchsuche alle Einträge nach der Brand + Type Kombination
for (JsonPair kv : doc.as<JsonObject>()) {
if (kv.value().as<String>() == searchKey) {
return {kv.key().c_str(), kv.value().as<String>()};
return kv.key().c_str();
}
}
// 2. Wenn nicht gefunden, zerlege den type String in Wörter und suche nach jedem Wort
// Sammle alle vorhandenen Filamenttypen aus der JSON
std::vector<String> knownTypes;
// 2. Wenn nicht gefunden, suche nach Generic + Type
searchKey = "Generic " + type;
for (JsonPair kv : doc.as<JsonObject>()) {
String value = kv.value().as<String>();
// Extrahiere den Typ ohne Markennamen
if (value.indexOf(" ") != -1) {
value = value.substring(value.indexOf(" ") + 1);
}
if (!value.isEmpty()) {
knownTypes.push_back(value);
}
}
// Zerlege den Input-Type in Wörter
String typeStr = type;
typeStr.trim();
// Durchsuche für jedes bekannte Filament, ob es im Input vorkommt
for (const String& knownType : knownTypes) {
if (typeStr.indexOf(knownType) != -1) {
// Suche nach diesem Typ in der Original-JSON
for (JsonPair kv : doc.as<JsonObject>()) {
String value = kv.value().as<String>();
if (value.indexOf(knownType) != -1) {
return {kv.key().c_str(), knownType};
}
}
if (kv.value().as<String>() == searchKey) {
return kv.key().c_str();
}
}
// 3. Wenn immer noch nichts gefunden, gebe GFL99 zurück (Generic PLA)
return {"GFL99", "PLA"};
return "GFL99";
}
bool sendMqttMessage(String payload) {
@@ -188,22 +156,15 @@ bool setBambuSpool(String payload) {
int minTemp = doc["nozzle_temp_min"];
int maxTemp = doc["nozzle_temp_max"];
String type = doc["type"].as<String>();
(type == "PLA+") ? type = "PLA" : type;
String brand = doc["brand"].as<String>();
String tray_info_idx = (doc["tray_info_idx"].as<String>() != "-1") ? doc["tray_info_idx"].as<String>() : "";
if (tray_info_idx == "") {
if (brand != "" && type != "") {
FilamentResult result = findFilamentIdx(brand, type);
tray_info_idx = result.key;
type = result.type; // Aktualisiere den type mit dem gefundenen Basistyp
}
}
if (tray_info_idx == "") tray_info_idx = (brand != "" && type != "") ? findFilamentIdx(brand, type) : "";
String setting_id = doc["bambu_setting_id"].as<String>();
String cali_idx = doc["cali_idx"].as<String>();
doc.clear();
doc["print"]["sequence_id"] = "0";
doc["print"]["sequence_id"] = 0;
doc["print"]["command"] = "ams_filament_setting";
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
@@ -211,10 +172,10 @@ bool setBambuSpool(String payload) {
doc["print"]["nozzle_temp_min"] = minTemp;
doc["print"]["nozzle_temp_max"] = maxTemp;
doc["print"]["tray_type"] = type;
//doc["print"]["cali_idx"] = (cali_idx != "") ? cali_idx : "";
doc["print"]["cali_idx"] = (cali_idx != "") ? cali_idx : "";
doc["print"]["tray_info_idx"] = tray_info_idx;
doc["print"]["setting_id"] = setting_id;
// Serialize the JSON
String output;
serializeJson(doc, output);
@@ -233,13 +194,13 @@ bool setBambuSpool(String payload) {
if (cali_idx != "") {
yield();
doc["print"]["sequence_id"] = "0";
doc["print"]["sequence_id"] = 0;
doc["print"]["command"] = "extrusion_cali_sel";
doc["print"]["filament_id"] = tray_info_idx;
doc["print"]["nozzle_diameter"] = "0.4";
doc["print"]["cali_idx"] = cali_idx.toInt();
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
//doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
// Serialize the JSON
String output;
@@ -257,34 +218,44 @@ bool setBambuSpool(String payload) {
doc.clear();
yield();
}
/*
if (setting_id != "") {
yield();
doc["print"]["sequence_id"] = 0;
doc["print"]["command"] = "ams_filament_setting";
doc["print"]["nozzle_temp_min"] = minTemp;
doc["print"]["nozzle_temp_max"] = maxTemp;
doc["print"]["setting_id"] = setting_id;
doc["print"]["tray_color"] = color.length() == 8 ? color : color+"FF";
doc["print"]["ams_id"] = amsId < 200 ? amsId : 255;
doc["print"]["tray_id"] = trayId < 200 ? trayId : 254;
doc["print"]["tray_info_idx"] = tray_info_idx;
doc["print"]["tray_type"] = type;
// Serialize the JSON
String output;
serializeJson(doc, output);
if (sendMqttMessage(output)) {
Serial.println("Filament Setting successfully set");
}
else
{
Serial.println("Failed to set Filament setting");
return false;
}
doc.clear();
yield();
}
*/
return true;
}
void autoSetSpool(int spoolId, uint8_t trayId) {
// wenn neue spule erkannt und autoSetToBambu > 0
JsonDocument spoolInfo = fetchSingleSpoolInfo(spoolId);
if (!spoolInfo.isNull())
{
// AMS und TRAY id ergänzen
spoolInfo["amsId"] = 0;
spoolInfo["trayId"] = trayId;
Serial.println("Auto set spool");
Serial.println(spoolInfo.as<String>());
setBambuSpool(spoolInfo.as<String>());
}
// id wieder zurücksetzen damit abgeschlossen
autoSetToBambuSpoolId = 0;
}
// init
void mqtt_callback(char* topic, byte* payload, unsigned int length) {
String message;
for (int i = 0; i < length; i++) {
message += (char)payload[i];
}
@@ -292,27 +263,16 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
// JSON-Dokument parsen
JsonDocument doc;
DeserializationError error = deserializeJson(doc, message);
if (error)
{
if (error) {
Serial.print("Fehler beim Parsen des JSON: ");
Serial.println(error.c_str());
return;
}
// Wenn bambu auto set spool aktiv und eine spule erkannt und mqtt meldung das neue spule im ams
if (autoSendToBambu && autoSetToBambuSpoolId > 0 &&
doc["print"]["command"].as<String>() == "push_status" && doc["print"]["ams"]["tray_pre"].as<uint8_t>()
&& !doc["print"]["ams"]["ams"].as<JsonArray>())
{
autoSetSpool(autoSetToBambuSpoolId, doc["print"]["ams"]["tray_pre"].as<uint8_t>());
}
// Prüfen, ob "print->upgrade_state" und "print.ams.ams" existieren
if (doc["print"]["upgrade_state"].is<JsonObject>())
{
if (doc["print"].containsKey("upgrade_state")) {
// Prüfen ob AMS-Daten vorhanden sind
if (!doc["print"]["ams"].is<JsonObject>() || !doc["print"]["ams"]["ams"].is<JsonArray>())
{
if (!doc["print"].containsKey("ams") || !doc["print"]["ams"].containsKey("ams")) {
return;
}
@@ -355,7 +315,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
}
// Prüfe die externe Spule
if (!hasChanges && doc["print"]["vt_tray"].is<JsonObject>()) {
if (!hasChanges && doc["print"].containsKey("vt_tray")) {
JsonObject vtTray = doc["print"]["vt_tray"];
bool foundExternal = false;
@@ -403,7 +363,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
ams_count = amsArray.size();
// Wenn externe Spule vorhanden, füge sie hinzu
if (doc["print"]["vt_tray"].is<JsonObject>()) {
if (doc["print"].containsKey("vt_tray")) {
JsonObject vtTray = doc["print"]["vt_tray"];
int extIdx = ams_count; // Index für externe Spule
ams_data[extIdx].ams_id = 255; // Spezielle ID für externe Spule
@@ -414,12 +374,8 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
ams_data[extIdx].trays[0].tray_color = vtTray["tray_color"].as<String>();
ams_data[extIdx].trays[0].nozzle_temp_min = vtTray["nozzle_temp_min"].as<int>();
ams_data[extIdx].trays[0].nozzle_temp_max = vtTray["nozzle_temp_max"].as<int>();
if (doc["print"]["vt_tray"]["tray_type"].as<String>() != "")
{
ams_data[extIdx].trays[0].setting_id = vtTray["setting_id"].as<String>();
ams_data[extIdx].trays[0].cali_idx = vtTray["cali_idx"].as<String>();
}
ams_data[extIdx].trays[0].setting_id = vtTray["setting_id"].as<String>();
ams_data[extIdx].trays[0].cali_idx = vtTray["cali_idx"].as<String>();
ams_count++; // Erhöhe ams_count für die externe Spule
}
@@ -431,14 +387,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int i = 0; i < ams_count; i++) {
JsonObject amsObj = wsArray.add<JsonObject>();
JsonObject amsObj = wsArray.createNestedObject();
amsObj["ams_id"] = ams_data[i].ams_id;
JsonArray trays = amsObj["tray"].to<JsonArray>();
JsonArray trays = amsObj.createNestedArray("tray");
int maxTrays = (ams_data[i].ams_id == 255) ? 1 : 4;
for (int j = 0; j < maxTrays; j++) {
JsonObject trayObj = trays.add<JsonObject>();
JsonObject trayObj = trays.createNestedObject();
trayObj["id"] = ams_data[i].trays[j].id;
trayObj["tray_info_idx"] = ams_data[i].trays[j].tray_info_idx;
trayObj["tray_type"] = ams_data[i].trays[j].tray_type;
@@ -459,7 +415,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
int amsId = doc["print"]["ams_id"].as<int>();
int trayId = doc["print"]["tray_id"].as<int>();
String settingId = doc["print"]["setting_id"].as<String>();
// Finde das entsprechende AMS und Tray
for (int i = 0; i < ams_count; i++) {
if (ams_data[i].ams_id == amsId) {
@@ -471,14 +427,14 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
JsonArray wsArray = wsDoc.to<JsonArray>();
for (int j = 0; j < ams_count; j++) {
JsonObject amsObj = wsArray.add<JsonObject>();
JsonObject amsObj = wsArray.createNestedObject();
amsObj["ams_id"] = ams_data[j].ams_id;
JsonArray trays = amsObj["tray"].to<JsonArray>();
JsonArray trays = amsObj.createNestedArray("tray");
int maxTrays = (ams_data[j].ams_id == 255) ? 1 : 4;
for (int k = 0; k < maxTrays; k++) {
JsonObject trayObj = trays.add<JsonObject>();
JsonObject trayObj = trays.createNestedObject();
trayObj["id"] = ams_data[j].trays[k].id;
trayObj["tray_info_idx"] = ams_data[j].trays[k].tray_info_idx;
trayObj["tray_type"] = ams_data[j].trays[k].tray_type;
@@ -506,7 +462,7 @@ void mqtt_callback(char* topic, byte* payload, unsigned int length) {
void reconnect() {
// Loop until we're reconnected
while (!client.connected()) {
Serial.println("Attempting MQTT connection...");
Serial.print("Attempting MQTT connection...");
bambu_connected = false;
oledShowTopRow();

View File

@@ -28,15 +28,12 @@ extern bool bambu_connected;
extern int ams_count;
extern AMSData ams_data[MAX_AMS];
extern bool autoSendToBambu;
extern int autoSetToBambuSpoolId;
bool loadBambuCredentials();
bool saveBambuCredentials(const String& bambu_ip, const String& bambu_serialnr, const String& bambu_accesscode, const bool autoSend);
bool saveBambuCredentials(const String& bambu_ip, const String& bambu_serialnr, const String& bambu_accesscode);
bool setupMqtt();
void mqtt_loop(void * parameter);
bool setBambuSpool(String payload);
void bambu_restart();
extern TaskHandle_t BambuMqttTask;
#endif

View File

@@ -1,5 +1,4 @@
#include "commonFS.h"
#include <SPIFFS.h>
bool saveJsonValue(const char* filename, const JsonDocument& doc) {
File file = SPIFFS.open(filename, "w");
@@ -36,12 +35,23 @@ bool loadJsonValue(const char* filename, JsonDocument& doc) {
return true;
}
void initializeSPIFFS() {
if (!SPIFFS.begin(true, "/spiffs", 10, "spiffs")) {
Serial.println("SPIFFS Mount Failed");
return;
bool initializeSPIFFS() {
// Erster Versuch
if (SPIFFS.begin(true)) {
Serial.println("SPIFFS mounted successfully.");
return true;
}
Serial.printf("SPIFFS Total: %u bytes\n", SPIFFS.totalBytes());
Serial.printf("SPIFFS Used: %u bytes\n", SPIFFS.usedBytes());
Serial.printf("SPIFFS Free: %u bytes\n", SPIFFS.totalBytes() - SPIFFS.usedBytes());
// Formatierung versuchen
Serial.println("Failed to mount SPIFFS. Formatting...");
SPIFFS.format();
// Zweiter Versuch nach Formatierung
if (SPIFFS.begin(true)) {
Serial.println("SPIFFS formatted and mounted successfully.");
return true;
}
Serial.println("SPIFFS initialization failed completely.");
return false;
}

View File

@@ -7,6 +7,6 @@
bool saveJsonValue(const char* filename, const JsonDocument& doc);
bool loadJsonValue(const char* filename, JsonDocument& doc);
void initializeSPIFFS();
bool initializeSPIFFS();
#endif

View File

@@ -40,10 +40,6 @@ const uint8_t webserverPort = 80;
const char* apiUrl = "/api/v1";
// ***** API
// ***** Bambu Auto Set Spool
uint8_t autoSetBambuAmsCounter = 60;
// ***** Bambu Auto Set Spool
// ***** Task Prios
uint8_t rfidTaskCore = 1;
uint8_t rfidTaskPrio = 1;

View File

@@ -23,8 +23,6 @@ extern const uint8_t OLED_DATA_END;
extern const char* apiUrl;
extern const uint8_t webserverPort;
extern uint8_t autoSetBambuAmsCounter;
extern const unsigned char wifi_on[];
extern const unsigned char wifi_off[];
extern const unsigned char cloud_on[];

View File

@@ -117,6 +117,7 @@ std::vector<String> splitTextIntoLines(String text, uint8_t textSize) {
lines.push_back(currentLine);
}
Serial.println(lines.size());
return lines;
}

View File

@@ -19,12 +19,6 @@
void setup() {
Serial.begin(115200);
uint64_t chipid;
chipid = ESP.getEfuseMac(); //The chip ID is essentially its MAC address(length: 6 bytes).
Serial.printf("ESP32 Chip ID = %04X", (uint16_t)(chipid >> 32)); //print High 2 bytes
Serial.printf("%08X\n", (uint32_t)chipid); //print Low 4bytes.
// Initialize SPIFFS
initializeSPIFFS();
@@ -58,22 +52,7 @@ void setup() {
startNfc();
uint8_t scaleCalibrated = start_scale();
if (scaleCalibrated == 3) {
oledShowMessage("Scale not calibrated!");
for (uint16_t i = 0; i < 50000; i++) {
yield();
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
} else if (scaleCalibrated == 0) {
oledShowMessage("HX711 not found");
for (uint16_t i = 0; i < 50000; i++) {
yield();
vTaskDelay(pdMS_TO_TICKS(1));
esp_task_wdt_reset();
}
}
start_scale();
// WDT initialisieren mit 10 Sekunden Timeout
bool panic = true; // Wenn true, löst ein WDT-Timeout einen System-Panik aus
@@ -90,10 +69,6 @@ void setup() {
unsigned long lastWeightReadTime = 0;
const unsigned long weightReadInterval = 1000; // 1 second
unsigned long lastAutoSetBambuAmsTime = 0;
const unsigned long autoSetBambuAmsInterval = 1000; // 1 second
uint8_t autoAmsCounter = 0;
unsigned long lastAmsSendTime = 0;
const unsigned long amsSendInterval = 60000; // 1 minute
@@ -103,49 +78,30 @@ uint8_t wifiErrorCounter = 0;
// ##### PROGRAM START #####
void loop() {
// Überprüfe den WLAN-Status
if (WiFi.status() != WL_CONNECTED) {
wifiErrorCounter++;
wifiOn = false;
} else {
wifiErrorCounter = 0;
wifiOn = true;
}
if (wifiErrorCounter > 20) ESP.restart();
unsigned long currentMillis = millis();
// Send AMS Data min every Minute
if (currentMillis - lastAmsSendTime >= amsSendInterval)
{
if (currentMillis - lastAmsSendTime >= amsSendInterval) {
lastAmsSendTime = currentMillis;
sendAmsData(nullptr);
}
// Wenn Bambu auto set Spool aktiv
if (autoSendToBambu && autoSetToBambuSpoolId > 0 && currentMillis - lastAutoSetBambuAmsTime >= autoSetBambuAmsInterval)
{
lastAutoSetBambuAmsTime = currentMillis;
oledShowMessage("Auto Set " + String(autoSetBambuAmsCounter - autoAmsCounter) + "s");
autoAmsCounter++;
if (autoAmsCounter >= autoSetBambuAmsCounter)
{
autoSetToBambuSpoolId = 0;
autoAmsCounter = 0;
oledShowWeight(weight);
}
}
// Wenn Waage nicht Kalibriert
if (scaleCalibrated == 3)
{
oledShowMessage("Scale not calibrated!");
vTaskDelay(5000 / portTICK_PERIOD_MS);
yield();
esp_task_wdt_reset();
return;
}
// Ausgabe der Waage auf Display
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0 && (!autoSendToBambu || autoSetToBambuSpoolId == 0))
if (pauseMainTask == 0 && weight != lastWeight && hasReadRfidTag == 0)
{
(weight < 2) ? ((weight < -2) ? oledShowMessage("!! -0") : oledShowWeight(0)) : oledShowWeight(weight);
(weight < 0) ? oledShowMessage("!! -1") : oledShowWeight(weight);
}
// Wenn Timer abgelaufen und nicht gerade ein RFID-Tag geschrieben wird
if (currentMillis - lastWeightReadTime >= weightReadInterval && hasReadRfidTag < 3)
{
@@ -189,7 +145,6 @@ void loop() {
oledShowIcon("success");
vTaskDelay(2000 / portTICK_PERIOD_MS);
weightSend = 1;
autoSetToBambuSpoolId = spoolId.toInt();
}
else
{

View File

@@ -420,7 +420,7 @@ void scanRfidTask(void * parameter) {
//uidString = "";
nfcJsonData = "";
Serial.println("Tag entfernt");
if (!autoSendToBambu) oledShowWeight(weight);
oledShowWeight(0);
}
// aktualisieren der Website wenn sich der Status ändert

46
src/ota.cpp Normal file
View File

@@ -0,0 +1,46 @@
#include <Arduino.h>
#include "ota.h"
#include <Update.h>
#include <SPIFFS.h>
#include "commonFS.h"
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final) {
static size_t contentLength = 0;
if (!index) {
contentLength = request->contentLength();
Serial.printf("Update size: %u bytes\n", contentLength);
if (contentLength == 0) {
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Invalid file size\"}");
return;
}
if (!Update.begin(contentLength)) {
Serial.printf("Not enough space: %u required\n", contentLength);
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Not enough space available\"}");
return;
}
Serial.println("Update started");
}
if (Update.write(data, len) != len) {
Update.printError(Serial);
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Error writing update\"}");
return;
}
if (final) {
if (Update.end(true)) {
Serial.println("Update complete");
request->send(200, "application/json", "{\"status\":\"success\",\"message\":\"Update successful! Device will restart...\",\"restart\":true}");
delay(1000);
ESP.restart();
} else {
Update.printError(Serial);
request->send(400, "application/json", "{\"status\":\"error\",\"message\":\"Update failed\"}");
}
}
}

8
src/ota.h Normal file
View File

@@ -0,0 +1,8 @@
#ifndef OTA_H
#define OTA_H
#include <ESPAsyncWebServer.h>
void handleOTAUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
#endif

View File

@@ -3,9 +3,9 @@
#include <ArduinoJson.h>
#include "config.h"
#include "HX711.h"
#include <EEPROM.h>
#include "display.h"
#include "esp_task_wdt.h"
#include <Preferences.h>
HX711 scale;
@@ -16,11 +16,6 @@ int16_t weight = 0;
uint8_t weigthCouterToApi = 0;
uint8_t scale_tare_counter = 0;
uint8_t pauseMainTask = 0;
uint8_t scaleCalibrated = 1;
Preferences preferences;
const char* NVS_NAMESPACE = "scale";
const char* NVS_KEY_CALIBRATION = "cal_value";
// ##### Funktionen für Waage #####
uint8_t tareScale() {
@@ -51,24 +46,22 @@ void scale_loop(void * parameter) {
}
}
uint8_t start_scale() {
void start_scale() {
Serial.println("Prüfe Calibration Value");
long calibrationValue;
long calibrationValue; // calibration value (see example file "Calibration.ino")
//calibrationValue = 696.0; // uncomment this if you want to set the calibration value in the sketch
// NVS lesen
preferences.begin(NVS_NAMESPACE, true); // true = readonly
calibrationValue = preferences.getLong(NVS_KEY_CALIBRATION, defaultScaleCalibrationValue);
preferences.end();
EEPROM.begin(512);
EEPROM.get(calVal_eepromAdress, calibrationValue); // uncomment this if you want to fetch the calibration value from eeprom
//calibrationValue = EEPROM.read(calVal_eepromAdress);
Serial.print("Read Scale Calibration Value ");
Serial.println(calibrationValue);
scale.begin(LOADCELL_DOUT_PIN, LOADCELL_SCK_PIN);
if (isnan(calibrationValue) || calibrationValue < 1) {
calibrationValue = defaultScaleCalibrationValue;
scaleCalibrated = 0;
}
if (isnan(calibrationValue) || calibrationValue < 1) calibrationValue = defaultScaleCalibrationValue;
oledShowMessage("Scale Tare Please remove all");
for (uint16_t i = 0; i < 2000; i++) {
@@ -101,8 +94,6 @@ uint8_t start_scale() {
} else {
Serial.println("ScaleLoop-Task erfolgreich erstellt");
}
return (scaleCalibrated == 1) ? 1 : 3;
}
uint8_t calibrate_scale() {
@@ -146,19 +137,18 @@ uint8_t calibrate_scale() {
{
Serial.print("New calibration value has been set to: ");
Serial.println(newCalibrationValue);
Serial.print("Save this value to EEPROM adress ");
Serial.println(calVal_eepromAdress);
// Speichern mit NVS
preferences.begin(NVS_NAMESPACE, false); // false = readwrite
preferences.putLong(NVS_KEY_CALIBRATION, newCalibrationValue);
preferences.end();
//EEPROM.put(calVal_eepromAdress, newCalibrationValue);
EEPROM.put(calVal_eepromAdress, newCalibrationValue);
EEPROM.commit();
// Verifizieren
preferences.begin(NVS_NAMESPACE, true);
long verifyValue = preferences.getLong(NVS_KEY_CALIBRATION, 0);
preferences.end();
EEPROM.get(calVal_eepromAdress, newCalibrationValue);
//newCalibrationValue = EEPROM.read(calVal_eepromAdress);
Serial.print("Verified stored value: ");
Serial.println(verifyValue);
Serial.print("Read Value ");
Serial.println(newCalibrationValue);
Serial.println("End calibration, revome weight");

View File

@@ -5,7 +5,7 @@
#include "HX711.h"
uint8_t start_scale();
void start_scale();
uint8_t calibrate_scale();
uint8_t tareScale();
@@ -14,8 +14,5 @@ extern int16_t weight;
extern uint8_t weigthCouterToApi;
extern uint8_t scale_tare_counter;
extern uint8_t pauseMainTask;
extern uint8_t scaleCalibrated;
extern TaskHandle_t ScaleTask;
#endif

View File

@@ -7,15 +7,10 @@
#include "nfc.h"
#include "scale.h"
#include "esp_task_wdt.h"
#include <Update.h>
#include "display.h"
#ifndef VERSION
#define VERSION "1.1.0"
#endif
#include "ota.h"
// Cache-Control Header definieren
#define CACHE_CONTROL "max-age=604800" // Cache für 1 Woche
#define CACHE_CONTROL "max-age=31536000" // Cache für 1 Jahr
AsyncWebServer server(webserverPort);
AsyncWebSocket ws("/ws");
@@ -23,49 +18,6 @@ AsyncWebSocket ws("/ws");
uint8_t lastSuccess = 0;
uint8_t lastHasReadRfidTag = 0;
// Globale Variablen für Config Backups hinzufügen
String bambuCredentialsBackup;
String spoolmanUrlBackup;
// Globale Variable für den Update-Typ
static int currentUpdateCommand = 0;
// Globale Update-Variablen
static size_t updateTotalSize = 0;
static size_t updateWritten = 0;
static bool isSpiffsUpdate = false;
void sendUpdateProgress(int progress, const char* status = nullptr, const char* message = nullptr) {
static int lastSentProgress = -1;
// Verhindere zu häufige Updates
if (progress == lastSentProgress && !status && !message) {
return;
}
String progressMsg = "{\"type\":\"updateProgress\",\"progress\":" + String(progress);
if (status) {
progressMsg += ",\"status\":\"" + String(status) + "\"";
}
if (message) {
progressMsg += ",\"message\":\"" + String(message) + "\"";
}
progressMsg += "}";
// Sende die Nachricht mehrmals mit Verzögerung für wichtige Updates
if (status || abs(progress - lastSentProgress) >= 10 || progress == 100) {
for (int i = 0; i < 2; i++) {
ws.textAll(progressMsg);
delay(100); // Längerer Delay zwischen Nachrichten
}
} else {
ws.textAll(progressMsg);
delay(50);
}
lastSentProgress = progress;
}
void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len) {
if (type == WS_EVT_CONNECT) {
Serial.println("Neuer Client verbunden!");
@@ -76,15 +28,11 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
sendWriteResult(client, 3);
} else if (type == WS_EVT_DISCONNECT) {
Serial.println("Client getrennt.");
} else if (type == WS_EVT_ERROR) {
Serial.printf("WebSocket Client #%u error(%u): %s\n", client->id(), *((uint16_t*)arg), (char*)data);
} else if (type == WS_EVT_PONG) {
Serial.printf("WebSocket Client #%u pong\n", client->id());
} else if (type == WS_EVT_DATA) {
String message = String((char*)data);
JsonDocument doc;
deserializeJson(doc, message);
if (doc["type"] == "heartbeat") {
// Sende Heartbeat-Antwort
ws.text(client->id(), "{"
@@ -96,7 +44,7 @@ void onWsEvent(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventTyp
}
else if (doc["type"] == "writeNfcTag") {
if (doc["payload"].is<JsonObject>()) {
if (doc.containsKey("payload")) {
// Versuche NFC-Daten zu schreiben
String payloadString;
serializeJson(doc["payload"], payloadString);
@@ -203,121 +151,11 @@ void sendNfcData(AsyncWebSocketClient *client) {
void sendAmsData(AsyncWebSocketClient *client) {
if (ams_count > 0) {
ws.textAll("{\"type\":\"amsData\",\"payload\":" + amsJsonData + "}");
ws.textAll("{\"type\":\"amsData\", \"payload\":" + amsJsonData + "}");
}
}
void handleUpdate(AsyncWebServer &server) {
AsyncCallbackWebHandler* updateHandler = new AsyncCallbackWebHandler();
updateHandler->setUri("/update");
updateHandler->setMethod(HTTP_POST);
updateHandler->onUpload([](AsyncWebServerRequest *request, String filename,
size_t index, uint8_t *data, size_t len, bool final) {
if (!index) {
updateTotalSize = request->contentLength();
updateWritten = 0;
isSpiffsUpdate = (filename.indexOf("website") > -1);
if (isSpiffsUpdate) {
// Backup vor dem Update
sendUpdateProgress(0, "backup", "Backing up configurations...");
delay(200);
backupJsonConfigs();
delay(200);
const esp_partition_t *partition = esp_partition_find_first(ESP_PARTITION_TYPE_DATA, ESP_PARTITION_SUBTYPE_DATA_SPIFFS, NULL);
if (!partition || !Update.begin(partition->size, U_SPIFFS)) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
return;
}
sendUpdateProgress(5, "starting", "Starting SPIFFS update...");
delay(200);
} else {
if (!Update.begin(updateTotalSize)) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update initialization failed\"}");
return;
}
sendUpdateProgress(0, "starting", "Starting firmware update...");
delay(200);
}
}
if (len) {
if (Update.write(data, len) != len) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Write failed\"}");
return;
}
updateWritten += len;
int currentProgress;
// Berechne den Fortschritt basierend auf dem Update-Typ
if (isSpiffsUpdate) {
// SPIFFS: 5-75% für Upload
currentProgress = 5 + (updateWritten * 100) / updateTotalSize;
} else {
// Firmware: 0-100% für Upload
currentProgress = 1 + (updateWritten * 100) / updateTotalSize;
}
static int lastProgress = -1;
if (currentProgress != lastProgress && (currentProgress % 10 == 0 || final)) {
sendUpdateProgress(currentProgress, "uploading");
oledShowMessage("Update: " + String(currentProgress) + "%");
delay(50);
lastProgress = currentProgress;
}
}
if (final) {
if (Update.end(true)) {
if (isSpiffsUpdate) {
restoreJsonConfigs();
}
} else {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update finalization failed\"}");
}
}
});
updateHandler->onRequest([](AsyncWebServerRequest *request) {
if (Update.hasError()) {
request->send(400, "application/json", "{\"success\":false,\"message\":\"Update failed\"}");
return;
}
// Erste 100% Nachricht
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
delay(2000); // Längerer Delay für die erste Nachricht
AsyncWebServerResponse *response = request->beginResponse(200, "application/json",
"{\"success\":true,\"message\":\"Update successful! Restarting device...\"}");
response->addHeader("Connection", "close");
request->send(response);
// Zweite 100% Nachricht zur Sicherheit
ws.textAll("{\"type\":\"updateProgress\",\"progress\":100,\"status\":\"success\",\"message\":\"Update successful! Restarting device...\"}");
delay(3000); // Noch längerer Delay vor dem Neustart
ESP.restart();
});
server.addHandler(updateHandler);
}
void setupWebserver(AsyncWebServer &server) {
// Deaktiviere alle Debug-Ausgaben
Serial.setDebugOutput(false);
// WebSocket-Optimierungen
ws.onEvent(onWsEvent);
ws.enable(true);
// Konfiguriere Server für große Uploads
server.onRequestBody([](AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total){});
server.onFileUpload([](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final){});
// Lade die Spoolman-URL beim Booten
spoolmanUrl = loadSpoolmanUrl();
Serial.print("Geladene Spoolman-URL: ");
@@ -351,6 +189,17 @@ void setupWebserver(AsyncWebServer &server) {
Serial.println("RFID-Seite gesendet");
});
/*
// Neue API-Route für das Abrufen der Spool-Daten
server.on("/api/spools", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("API-Aufruf: /api/spools");
JsonDocument spoolsData = fetchSpoolsForWebsite();
String response;
serializeJson(spoolsData, response);
request->send(200, "application/json", response);
});
*/
server.on("/api/url", HTTP_GET, [](AsyncWebServerRequest *request){
Serial.println("API-Aufruf: /api/url");
String jsonResponse = "{\"spoolman_url\": \"" + String(spoolmanUrl) + "\"}";
@@ -373,12 +222,10 @@ void setupWebserver(AsyncWebServer &server) {
html.replace("{{spoolmanUrl}}", spoolmanUrl);
JsonDocument doc;
if (loadJsonValue("/bambu_credentials.json", doc) && doc["bambu_ip"].is<String>())
{
if (loadJsonValue("/bambu_credentials.json", doc) && doc.containsKey("bambu_ip")) {
String bambuIp = doc["bambu_ip"].as<String>();
String bambuSerial = doc["bambu_serialnr"].as<String>();
String bambuCode = doc["bambu_accesscode"].as<String>();
autoSendToBambu = doc["autoSendToBambu"].as<bool>();
bambuIp.trim();
bambuSerial.trim();
bambuCode.trim();
@@ -386,15 +233,7 @@ void setupWebserver(AsyncWebServer &server) {
html.replace("{{bambuIp}}", bambuIp ? bambuIp : "");
html.replace("{{bambuSerial}}", bambuSerial ? bambuSerial : "");
html.replace("{{bambuCode}}", bambuCode ? bambuCode : "");
html.replace("{{autoSendToBambu}}", autoSendToBambu ? "checked" : "");
}
else
{
html.replace("{{bambuIp}}", "");
html.replace("{{bambuSerial}}", "");
html.replace("{{bambuCode}}", "");
html.replace("{{autoSendToBambu}}", "");
}
}
request->send(200, "text/html", html);
});
@@ -425,8 +264,6 @@ void setupWebserver(AsyncWebServer &server) {
String bambu_ip = request->getParam("bambu_ip")->value();
String bambu_serialnr = request->getParam("bambu_serialnr")->value();
String bambu_accesscode = request->getParam("bambu_accesscode")->value();
bool autoSend = (request->getParam("autoSend")->value() == "true") ? true : false;
Serial.println(autoSend);
bambu_ip.trim();
bambu_serialnr.trim();
bambu_accesscode.trim();
@@ -436,7 +273,7 @@ void setupWebserver(AsyncWebServer &server) {
return;
}
bool success = saveBambuCredentials(bambu_ip, bambu_serialnr, bambu_accesscode, autoSend);
bool success = saveBambuCredentials(bambu_ip, bambu_serialnr, bambu_accesscode);
request->send(200, "application/json", "{\"healthy\": " + String(success ? "true" : "false") + "}");
});
@@ -501,22 +338,30 @@ void setupWebserver(AsyncWebServer &server) {
Serial.println("RFID.js gesendet");
});
// Vereinfachter Update-Handler
// Route for Firmware Update
server.on("/upgrade", HTTP_GET, [](AsyncWebServerRequest *request) {
// During OTA, reduce memory usage
ws.enable(false); // Temporarily disable WebSocket
ws.cleanupClients();
Serial.println("Request for /upgrade received");
AsyncWebServerResponse *response = request->beginResponse(SPIFFS, "/upgrade.html.gz", "text/html");
response->addHeader("Content-Encoding", "gzip");
response->addHeader("Cache-Control", "no-store");
response->addHeader("Cache-Control", CACHE_CONTROL);
request->send(response);
});
// Update-Handler registrieren
handleUpdate(server);
server.on("/api/version", HTTP_GET, [](AsyncWebServerRequest *request){
String fm_version = VERSION;
String jsonResponse = "{\"version\": \""+ fm_version +"\"}";
request->send(200, "application/json", jsonResponse);
});
server.on("/update", HTTP_POST,
[](AsyncWebServerRequest *request) {
// The response will be sent from handleOTAUpload when the upload is complete
},
[](AsyncWebServerRequest *request, const String& filename, size_t index, uint8_t *data, size_t len, bool final) {
// Free memory before handling update
ws.enable(false);
ws.cleanupClients();
handleOTAUpload(request, filename, index, data, len, final);
}
);
// Fehlerbehandlung für nicht gefundene Seiten
server.onNotFound([](AsyncWebServerRequest *request){
@@ -534,50 +379,3 @@ void setupWebserver(AsyncWebServer &server) {
server.begin();
Serial.println("Webserver gestartet");
}
void backupJsonConfigs() {
// Bambu Credentials backup
if (SPIFFS.exists("/bambu_credentials.json")) {
File file = SPIFFS.open("/bambu_credentials.json", "r");
if (file) {
bambuCredentialsBackup = file.readString();
file.close();
Serial.println("Bambu credentials backed up");
}
}
// Spoolman URL backup
if (SPIFFS.exists("/spoolman_url.json")) {
File file = SPIFFS.open("/spoolman_url.json", "r");
if (file) {
spoolmanUrlBackup = file.readString();
file.close();
Serial.println("Spoolman URL backed up");
}
}
}
void restoreJsonConfigs() {
// Restore Bambu credentials
if (bambuCredentialsBackup.length() > 0) {
File file = SPIFFS.open("/bambu_credentials.json", "w");
if (file) {
file.print(bambuCredentialsBackup);
file.close();
Serial.println("Bambu credentials restored");
}
bambuCredentialsBackup = ""; // Clear backup
}
// Restore Spoolman URL
if (spoolmanUrlBackup.length() > 0) {
File file = SPIFFS.open("/spoolman_url.json", "w");
if (file) {
file.print(spoolmanUrlBackup);
file.close();
Serial.println("Spoolman URL restored");
}
spoolmanUrlBackup = ""; // Clear backup
}
}

View File

@@ -6,8 +6,8 @@
#include "commonFS.h"
#include "api.h"
#include <ArduinoJson.h>
#include <Update.h>
#include <AsyncTCP.h>
#include <ESPAsyncWebServer.h>
#include <AsyncWebSocket.h>
#include "bambu.h"
#include "nfc.h"
#include "scale.h"
@@ -17,20 +17,10 @@ extern String spoolmanUrl;
extern AsyncWebServer server;
extern AsyncWebSocket ws;
// Server-Initialisierung und Handler
void initWebServer();
void handleUpload(AsyncWebServerRequest *request, String filename, size_t index, uint8_t *data, size_t len, bool final);
void handleBody(AsyncWebServerRequest *request, uint8_t *data, size_t len, size_t index, size_t total);
void setupWebserver(AsyncWebServer &server);
// WebSocket-Funktionen
void sendAmsData(AsyncWebSocketClient *client);
void sendNfcData(AsyncWebSocketClient *client);
void foundNfcTag(AsyncWebSocketClient *client, uint8_t success);
void sendWriteResult(AsyncWebSocketClient *client, uint8_t success);
// Upgrade-Funktionen
void backupJsonConfigs();
void restoreJsonConfigs();
#endif

View File

@@ -10,19 +10,9 @@ WiFiManager wm;
bool wm_nonblocking = false;
void initWiFi() {
// Optimierte WiFi-Einstellungen
WiFi.mode(WIFI_STA); // explicitly set mode, esp defaults to STA+AP
WiFi.setSleep(false); // disable sleep mode
esp_wifi_set_ps(WIFI_PS_NONE);
// Maximale Sendeleistung
WiFi.setTxPower(WIFI_POWER_19_5dBm); // Set maximum transmit power
// Optimiere TCP/IP Stack
esp_wifi_set_protocol(WIFI_IF_STA, WIFI_PROTOCOL_11B | WIFI_PROTOCOL_11G | WIFI_PROTOCOL_11N);
// Aktiviere WiFi-Roaming für bessere Stabilität
esp_wifi_set_rssi_threshold(-80);
esp_wifi_set_max_tx_power(72); // Setze maximale Sendeleistung auf 20dBm
if(wm_nonblocking) wm.setConfigPortalBlocking(false);
wm.setConfigPortalTimeout(320); // Portal nach 5min schließen