Migrate to Poetry packaging and restructure codebase #21
@@ -3,24 +3,36 @@ README.md
|
|||||||
LICENSE
|
LICENSE
|
||||||
donate.md
|
donate.md
|
||||||
screenshots/
|
screenshots/
|
||||||
|
docs/
|
||||||
|
|
||||||
# Development files
|
# Development files
|
||||||
.github/
|
.github/
|
||||||
electron/
|
electron/
|
||||||
|
scripts/
|
||||||
|
Makefile
|
||||||
|
|
||||||
# Build artifacts and cache
|
# Build artifacts and cache
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
public/
|
public/
|
||||||
|
meshchatx/public/
|
||||||
node_modules/
|
node_modules/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.pyc
|
*.py[cod]
|
||||||
*.pyo
|
*$py.class
|
||||||
*.pyd
|
*.so
|
||||||
.Python
|
.Python
|
||||||
|
*.egg-info/
|
||||||
|
*.egg
|
||||||
|
python-dist/
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
env/
|
env/
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
.venv/
|
||||||
|
|
||||||
# IDE and editor files
|
# IDE and editor files
|
||||||
.vscode/
|
.vscode/
|
||||||
@@ -47,9 +59,19 @@ Dockerfile*
|
|||||||
docker-compose*.yml
|
docker-compose*.yml
|
||||||
.dockerignore
|
.dockerignore
|
||||||
|
|
||||||
|
# Local storage and runtime data
|
||||||
|
storage/
|
||||||
|
testing/
|
||||||
|
telemetry_test_lxmf/
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
*.log
|
*.log
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
*.tmp
|
*.tmp
|
||||||
*.temp
|
*.temp
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
290
.github/workflows/build.yml
vendored
@@ -27,96 +27,14 @@ on:
|
|||||||
default: 'true'
|
default: 'true'
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_windows:
|
build_frontend:
|
||||||
runs-on: windows-latest
|
|
||||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_windows == 'true')
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Clone Repo
|
|
||||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
|
||||||
|
|
||||||
- name: Install NodeJS
|
|
||||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
|
||||||
with:
|
|
||||||
node-version: 22
|
|
||||||
|
|
||||||
- name: Install Python
|
|
||||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
|
||||||
with:
|
|
||||||
python-version: "3.12"
|
|
||||||
|
|
||||||
- name: Install Python Deps
|
|
||||||
run: |
|
|
||||||
python -m venv venv
|
|
||||||
venv\Scripts\pip install --upgrade pip
|
|
||||||
venv\Scripts\pip install -r requirements.txt
|
|
||||||
|
|
||||||
- name: Install NodeJS Deps
|
|
||||||
run: npm install
|
|
||||||
|
|
||||||
- name: Build Electron App
|
|
||||||
run: npm run dist
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
id: create_release
|
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
|
||||||
with:
|
|
||||||
draft: true
|
|
||||||
allowUpdates: true
|
|
||||||
replacesArtifacts: true
|
|
||||||
omitDraftDuringUpdate: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
|
||||||
|
|
||||||
build_mac:
|
|
||||||
runs-on: macos-13
|
|
||||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_mac == 'true')
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Clone Repo
|
|
||||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
|
||||||
|
|
||||||
- name: Install NodeJS
|
|
||||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Install Python
|
|
||||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
|
||||||
with:
|
|
||||||
python-version: "3.11"
|
|
||||||
|
|
||||||
- name: Install Python Deps
|
|
||||||
run: |
|
|
||||||
python3 -m venv venv
|
|
||||||
venv/bin/pip install --upgrade pip
|
|
||||||
venv/bin/pip install -r requirements.txt
|
|
||||||
|
|
||||||
- name: Install NodeJS Deps
|
|
||||||
run: npm install
|
|
||||||
|
|
||||||
- name: Build Electron App
|
|
||||||
run: npm run dist
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
id: create_release
|
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
|
||||||
with:
|
|
||||||
draft: true
|
|
||||||
allowUpdates: true
|
|
||||||
replacesArtifacts: true
|
|
||||||
omitDraftDuringUpdate: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
artifacts: "dist/*-mac.dmg"
|
|
||||||
|
|
||||||
build_linux:
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_linux == 'true')
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Clone Repo
|
- name: Clone Repo
|
||||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||||
@@ -131,31 +49,201 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
|
|
|||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: Sync versions
|
||||||
|
run: python scripts/sync_version.py
|
||||||
|
|
||||||
|
- name: Install NodeJS Deps
|
||||||
|
run: npm install
|
||||||
|
|
||||||
|
- name: Build Frontend
|
||||||
|
run: npm run build-frontend
|
||||||
|
|
||||||
|
- name: Upload frontend artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: frontend-build
|
||||||
|
path: meshchatx/public
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
build_desktop:
|
||||||
|
name: Build Desktop (${{ matrix.name }})
|
||||||
|
needs: build_frontend
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- name: windows
|
||||||
|
os: windows-latest
|
||||||
|
node: 22
|
||||||
|
python: "3.12"
|
||||||
|
release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
||||||
|
build_input: build_windows
|
||||||
|
- name: mac
|
||||||
|
os: macos-13
|
||||||
|
node: 18
|
||||||
|
python: "3.11"
|
||||||
|
release_artifacts: "dist/*-mac.dmg"
|
||||||
|
build_input: build_mac
|
||||||
|
- name: linux
|
||||||
|
os: ubuntu-latest
|
||||||
|
node: 22
|
||||||
|
python: "3.12"
|
||||||
|
release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl"
|
||||||
|
build_input: build_linux
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Clone Repo
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||||
|
|
||||||
|
- name: Install NodeJS
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node }}
|
||||||
|
|
||||||
|
- name: Install Python
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python -m pip install --upgrade pip poetry
|
||||||
|
|
||||||
|
- name: Sync versions
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python scripts/sync_version.py
|
||||||
|
|
||||||
|
- name: Install Python Deps
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python -m poetry install
|
||||||
|
|
||||||
|
- name: Install NodeJS Deps
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: npm install
|
||||||
|
|
||||||
|
- name: Prepare frontend directory
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python scripts/prepare_frontend_dir.py
|
||||||
|
|
||||||
|
- name: Download frontend artifact
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: frontend-build
|
||||||
|
path: meshchatx/public
|
||||||
|
|
||||||
- name: Install patchelf
|
- name: Install patchelf
|
||||||
|
if: |
|
||||||
|
matrix.name == 'linux' &&
|
||||||
|
(github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||||
run: sudo apt-get update && sudo apt-get install -y patchelf
|
run: sudo apt-get update && sudo apt-get install -y patchelf
|
||||||
|
|
||||||
- name: Install Python Deps
|
- name: Build Python wheel
|
||||||
|
if: |
|
||||||
|
matrix.name == 'linux' &&
|
||||||
|
(github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||||
run: |
|
run: |
|
||||||
python3 -m venv venv
|
python -m poetry build -f wheel
|
||||||
venv/bin/pip install --upgrade pip
|
mkdir -p python-dist
|
||||||
venv/bin/pip install -r requirements.txt
|
mv dist/*.whl python-dist/
|
||||||
|
rm -rf dist
|
||||||
- name: Install NodeJS Deps
|
|
||||||
run: npm install
|
|
||||||
|
|
||||||
- name: Build Electron App
|
- name: Build Electron App
|
||||||
run: npm run dist
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: npm run dist-prebuilt
|
||||||
|
|
||||||
|
- name: Upload build artifacts
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-${{ matrix.name }}
|
||||||
|
path: |
|
||||||
|
dist/*-win-installer.exe
|
||||||
|
dist/*-win-portable.exe
|
||||||
|
dist/*-mac.dmg
|
||||||
|
dist/*-linux.AppImage
|
||||||
|
dist/*-linux.deb
|
||||||
|
python-dist/*.whl
|
||||||
|
if-no-files-found: ignore
|
||||||
|
|
||||||
|
create_release:
|
||||||
|
name: Create Release
|
||||||
|
needs: build_desktop
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Download all artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifacts
|
||||||
|
|
||||||
|
- name: Display structure of downloaded files
|
||||||
|
run: ls -R artifacts
|
||||||
|
|
||||||
|
- name: Prepare release assets
|
||||||
|
run: |
|
||||||
|
mkdir -p release-assets
|
||||||
|
find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \;
|
||||||
|
ls -lh release-assets/
|
||||||
|
|
||||||
|
- name: Generate SHA256 checksums
|
||||||
|
run: |
|
||||||
|
cd release-assets
|
||||||
|
echo "## SHA256 Checksums" > release-body.md
|
||||||
|
echo "" >> release-body.md
|
||||||
|
|
||||||
|
for file in *.exe *.dmg *.AppImage *.deb *.whl; do
|
||||||
|
if [ -f "$file" ]; then
|
||||||
|
sha256sum "$file" | tee "${file}.sha256"
|
||||||
|
echo "\`$(cat "${file}.sha256")\`" >> release-body.md
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "" >> release-body.md
|
||||||
|
echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md
|
||||||
|
|
||||||
|
cat release-body.md
|
||||||
|
echo ""
|
||||||
|
echo "Generated .sha256 files:"
|
||||||
|
ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found"
|
||||||
|
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
id: create_release
|
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
allowUpdates: true
|
artifacts: "release-assets/*"
|
||||||
replacesArtifacts: true
|
bodyFile: "release-assets/release-body.md"
|
||||||
omitDraftDuringUpdate: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
artifacts: "dist/*-linux.AppImage,dist/*-linux.deb"
|
|
||||||
|
|
||||||
build_docker:
|
build_docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
54
.gitignore
vendored
@@ -1,13 +1,57 @@
|
|||||||
|
# IDE and editor files
|
||||||
.idea
|
.idea
|
||||||
node_modules
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
# build files
|
# Dependencies
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
*.egg-info/
|
||||||
|
dist/
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
venv/
|
||||||
|
env/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
.venv/
|
||||||
|
|
||||||
|
# Build files
|
||||||
/build/
|
/build/
|
||||||
/dist/
|
/dist/
|
||||||
/public/
|
/meshchatx/public/
|
||||||
|
public/
|
||||||
/electron/build/exe/
|
/electron/build/exe/
|
||||||
|
python-dist/
|
||||||
|
|
||||||
# local storage
|
# Local storage and runtime data
|
||||||
storage/
|
storage/
|
||||||
|
testing/
|
||||||
|
telemetry_test_lxmf/
|
||||||
|
|
||||||
*.pyc
|
# Logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
.DS_Store
|
||||||
|
.DS_Store?
|
||||||
|
._*
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
ehthumbs.db
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
15
Dockerfile
@@ -10,9 +10,8 @@ FROM node:${NODE_VERSION}-alpine@${NODE_ALPINE_SHA256} AS build-frontend
|
|||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
|
|
||||||
# Copy required source files
|
# Copy required source files
|
||||||
COPY *.json .
|
COPY package*.json vite.config.js ./
|
||||||
COPY *.js .
|
COPY meshchatx ./meshchatx
|
||||||
COPY src/frontend ./src/frontend
|
|
||||||
|
|
||||||
# Install NodeJS deps, exluding electron
|
# Install NodeJS deps, exluding electron
|
||||||
RUN npm install --omit=dev && \
|
RUN npm install --omit=dev && \
|
||||||
@@ -34,12 +33,10 @@ RUN apk add --no-cache --virtual .build-deps \
|
|||||||
apk del .build-deps
|
apk del .build-deps
|
||||||
|
|
||||||
# Copy prebuilt frontend
|
# Copy prebuilt frontend
|
||||||
COPY --from=build-frontend /src/public public
|
COPY --from=build-frontend /src/meshchatx/public meshchatx/public
|
||||||
|
|
||||||
# Copy other required source files
|
# Copy other required source files
|
||||||
COPY *.py .
|
COPY meshchatx ./meshchatx
|
||||||
COPY src/__init__.py ./src/__init__.py
|
COPY pyproject.toml poetry.lock ./
|
||||||
COPY src/backend ./src/backend
|
|
||||||
COPY *.json .
|
|
||||||
|
|
||||||
CMD ["python", "meshchat.py", "--host=0.0.0.0", "--reticulum-config-dir=/config/.reticulum", "--storage-dir=/config/.meshchat", "--headless"]
|
CMD ["python", "-m", "meshchatx.meshchat", "--host=0.0.0.0", "--reticulum-config-dir=/config/.reticulum", "--storage-dir=/config/.meshchat", "--headless"]
|
||||||
|
|||||||
53
Makefile
@@ -1,26 +1,39 @@
|
|||||||
.PHONY: install run clean build build-appimage build-exe dist
|
.PHONY: install run develop clean build build-appimage build-exe dist sync-version wheel node_modules python build-docker run-docker
|
||||||
|
|
||||||
VENV = venv
|
PYTHON ?= python
|
||||||
PYTHON = $(VENV)/bin/python
|
POETRY = $(PYTHON) -m poetry
|
||||||
PIP = $(VENV)/bin/pip
|
|
||||||
NPM = npm
|
NPM = npm
|
||||||
|
|
||||||
install: $(VENV) node_modules
|
DOCKER_COMPOSE_CMD ?= docker compose
|
||||||
|
DOCKER_COMPOSE_FILE ?= docker-compose.yml
|
||||||
|
DOCKER_IMAGE ?= reticulum-meshchatx:local
|
||||||
|
DOCKER_BUILDER ?= meshchatx-builder
|
||||||
|
DOCKER_PLATFORMS ?= linux/amd64
|
||||||
|
DOCKER_BUILD_FLAGS ?= --load
|
||||||
|
DOCKER_BUILD_ARGS ?=
|
||||||
|
DOCKER_CONTEXT ?= .
|
||||||
|
DOCKERFILE ?= Dockerfile
|
||||||
|
|
||||||
$(VENV):
|
install: sync-version node_modules python
|
||||||
python3 -m venv $(VENV)
|
|
||||||
$(PIP) install --upgrade pip
|
|
||||||
$(PIP) install -r requirements.txt
|
|
||||||
|
|
||||||
node_modules:
|
node_modules:
|
||||||
$(NPM) install
|
$(NPM) install
|
||||||
|
|
||||||
|
python:
|
||||||
|
$(POETRY) install
|
||||||
|
|
||||||
run: install
|
run: install
|
||||||
$(PYTHON) meshchat.py
|
$(POETRY) run meshchat
|
||||||
|
|
||||||
|
develop: run
|
||||||
|
|
||||||
build: install
|
build: install
|
||||||
$(NPM) run build
|
$(NPM) run build
|
||||||
|
|
||||||
|
wheel: install
|
||||||
|
$(POETRY) build -f wheel
|
||||||
|
$(PYTHON) scripts/move_wheels.py
|
||||||
|
|
||||||
build-appimage: build
|
build-appimage: build
|
||||||
$(NPM) run electron-postinstall
|
$(NPM) run electron-postinstall
|
||||||
$(NPM) run dist -- --linux AppImage
|
$(NPM) run dist -- --linux AppImage
|
||||||
@@ -32,10 +45,28 @@ build-exe: build
|
|||||||
dist: build-appimage
|
dist: build-appimage
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(VENV)
|
|
||||||
rm -rf node_modules
|
rm -rf node_modules
|
||||||
rm -rf build
|
rm -rf build
|
||||||
rm -rf dist
|
rm -rf dist
|
||||||
|
rm -rf python-dist
|
||||||
|
rm -rf meshchatx/public
|
||||||
|
|
||||||
|
sync-version:
|
||||||
|
$(PYTHON) scripts/sync_version.py
|
||||||
|
|
||||||
|
build-docker:
|
||||||
|
@if ! docker buildx inspect $(DOCKER_BUILDER) >/dev/null 2>&1; then \
|
||||||
|
docker buildx create --name $(DOCKER_BUILDER) --use >/dev/null; \
|
||||||
|
else \
|
||||||
|
docker buildx use $(DOCKER_BUILDER); \
|
||||||
|
fi
|
||||||
|
docker buildx build --builder $(DOCKER_BUILDER) --platform $(DOCKER_PLATFORMS) \
|
||||||
|
$(DOCKER_BUILD_FLAGS) \
|
||||||
|
-t $(DOCKER_IMAGE) \
|
||||||
|
$(DOCKER_BUILD_ARGS) \
|
||||||
|
-f $(DOCKERFILE) \
|
||||||
|
$(DOCKER_CONTEXT)
|
||||||
|
|
||||||
|
run-docker:
|
||||||
|
MESHCHAT_IMAGE="$(DOCKER_IMAGE)" \
|
||||||
|
$(DOCKER_COMPOSE_CMD) -f $(DOCKER_COMPOSE_FILE) up --remove-orphans --pull never reticulum-meshchatx
|
||||||
|
|||||||
73
README.md
@@ -8,22 +8,22 @@ A heavily customized fork of [Reticulum MeshChat](https://github.com/liamcottle/
|
|||||||
- [x] Ability to set inbound and propagation node stamps.
|
- [x] Ability to set inbound and propagation node stamps.
|
||||||
|
⚠️ Potential issue | 🟡 Minor Minor style issue: "etc" should be "etc." In American English, the abbreviation requires a period. 📝 Committable suggestion
🧰 Tools🪛 LanguageTool[style] ~16-~16: In American English, abbreviations like “etc.” require a period. (ETC_PERIOD) 🤖 Prompt for AI Agents_⚠️ Potential issue_ | _🟡 Minor_
**Minor style issue: "etc" should be "etc."**
In American English, the abbreviation requires a period.
```diff
-- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc)
+- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc.)
```
<!-- suggestion_start -->
<details>
<summary>📝 Committable suggestion</summary>
> ‼️ **IMPORTANT**
> Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.
```suggestion
- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc.)
```
</details>
<!-- suggestion_end -->
<details>
<summary>🧰 Tools</summary>
<details>
<summary>🪛 LanguageTool</summary>
[style] ~16-~16: In American English, abbreviations like “etc.” require a period.
Context: ...translate, LoRa calculator, LXMFy bots, etc) - [x] Codebase reorganization and clea...
(ETC_PERIOD)
</details>
</details>
<details>
<summary>🤖 Prompt for AI Agents</summary>
```
In README.md around line 16, the list item uses "etc" without the required
period; update the line to use "etc." (i.e., change "etc" to "etc.") to follow
American English abbreviation style.
```
</details>
<!-- fingerprinting:phantom:medusa:ocelot -->
<!-- This is an auto-generated comment by CodeRabbit -->
✅ Addressed in commits ac839df to c98131f
|
|||||||
- [x] Better config parsing.
|
- [x] Better config parsing.
|
||||||
- [x] Cancel page fetching or file downloads
|
- [x] Cancel page fetching or file downloads
|
||||||
- [x] Block recieving messages from users.
|
- [x] Block receiving messages from users.
|
||||||
- [ ] Spam filter (based on keywords)
|
- [ ] Spam filter (based on keywords)
|
||||||
- [ ] Multi-identity support.
|
- [ ] Multi-identity support.
|
||||||
- [ ] Multi-language support
|
- [ ] Multi-language support
|
||||||
- [ ] Offline Reticulum documentation tool
|
- [ ] Offline Reticulum documentation tool
|
||||||
- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc)
|
- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc.)
|
||||||
- [ ] Codebase reorginization and cleanup.
|
- [x] Codebase reorganization and cleanup.
|
||||||
- [ ] Tests and proper CI/CD pipeline.
|
- [ ] Tests and proper CI/CD pipeline.
|
||||||
- [ ] RNS hot reload
|
- [ ] RNS hot reload
|
||||||
- [ ] Backup/Import identities, messages and interfaces.
|
- [ ] Backup/Import identities, messages and interfaces.
|
||||||
- [ ] Full LXST support.
|
- [ ] Full LXST support.
|
||||||
- [ ] Move to Poetry and pyproject.toml for Python packaging.
|
- [x] Poetry for packaging and dependency management.
|
||||||
- [x] More stats on about page.
|
- [x] More stats on about page.
|
||||||
- [x] Actions are pinned to full-length SHA hashes.
|
- [x] Actions are pinned to full-length SHA hashes.
|
||||||
- [x] Docker images are smaller and use SHA256 hashes for the images.
|
- [x] Docker images are smaller and use SHA256 hashes for the images.
|
||||||
- [x] Electron improvements.
|
- [x] Electron improvements (ASAR and security).
|
||||||
- [x] Latest updates for NPM and Python dependencies (bleeding edge)
|
- [x] Latest updates for NPM and Python dependencies (bleeding edge)
|
||||||
- [x] Numerous Ruff, Deepsource, CodeQL Advanced and Bearer Linting/SAST fixes.
|
- [x] Numerous Ruff, Deepsource, CodeQL Advanced and Bearer Linting/SAST fixes.
|
||||||
- [x] Some performance improvements.
|
- [x] Some performance improvements.
|
||||||
@@ -35,25 +35,76 @@ Check [releases](https://github.com/Sudo-Ivan/reticulum-meshchatX/releases) for
|
|||||||
## Building
|
## Building
|
||||||
|
⚠️ Potential issue | 🟡 Minor Documentation references outdated The text mentions Consider updating to reflect the Poetry workflow: 📝 Committable suggestion
🤖 Prompt for AI Agents_⚠️ Potential issue_ | _🟡 Minor_
**Documentation references outdated `setup.py`.**
The text mentions `setup.py` but the PR appears to have migrated to Poetry with `pyproject.toml`. The `cx_setup.py` is for cx_Freeze, not for pip packaging.
Consider updating to reflect the Poetry workflow:
```diff
-The backend now provides `pyproject.toml` so you can build/install a wheel with `pip install .` or `python -m build`. Before packaging, run `python3 scripts/sync_version.py` (or `make sync-version`) so the generated `src/version.py` reflects the `package.json` version that the Electron artifacts use. The same version helper drives `meshchat.get_app_version()` and `setup.py`, so the CLI release metadata, wheel and AppImage/NSIS bundles stay aligned.
+The backend now provides `pyproject.toml` so you can build/install a wheel with `pip install .` or `poetry build`. Before packaging, run `python3 scripts/sync_version.py` (or `make sync-version`) so the generated `src/version.py` reflects the `package.json` version that the Electron artifacts use. The same version helper drives `meshchat.get_app_version()` and `cx_setup.py`, so the CLI release metadata, wheel and AppImage/NSIS bundles stay aligned.
```
<!-- suggestion_start -->
<details>
<summary>📝 Committable suggestion</summary>
> ‼️ **IMPORTANT**
> Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.
```suggestion
## Python packaging
The backend now provides `pyproject.toml` so you can build/install a wheel with `pip install .` or `poetry build`. Before packaging, run `python3 scripts/sync_version.py` (or `make sync-version`) so the generated `src/version.py` reflects the `package.json` version that the Electron artifacts use. The same version helper drives `meshchat.get_app_version()` and `cx_setup.py`, so the CLI release metadata, wheel and AppImage/NSIS bundles stay aligned.
```
</details>
<!-- suggestion_end -->
<details>
<summary>🤖 Prompt for AI Agents</summary>
```
In README.md around lines 63 to 65, the documentation still references setup.py
and causes confusion after migrating packaging to pyproject.toml/Poetry; update
the Python packaging section to remove references to setup.py and cx_setup.py,
explain the Poetry/pyproject.toml workflow (how to build/install with pip or
python -m build if still supported), and instruct to run the existing version
sync script (python3 scripts/sync_version.py or make sync-version) so
src/version.py matches package.json; also note that cx_setup.py is for cx_Freeze
(used for AppImage/NSIS) and should be documented separately if relevant.
```
</details>
<!-- fingerprinting:phantom:medusa:ocelot -->
<!-- This is an auto-generated comment by CodeRabbit -->
✅ Addressed in commits ac839df to c98131f
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
make install
|
make install # installs Python deps via Poetry and Node deps via npm
|
||||||
make build
|
make build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
You can run `make run` or `make develop` (a thin alias) to start the backend + frontend loop locally through `poetry run meshchat`.
|
||||||
|
|
||||||
|
### Python packaging
|
||||||
|
|
||||||
|
The Python build is driven entirely by Poetry now. Run `python3 scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m poetry install
|
||||||
|
make wheel # produces a wheel in python-dist/ that bundles the public assets
|
||||||
|
```
|
||||||
|
|
||||||
|
The wheel includes the frontend `public/` assets, `logo/`, and the CLI entry point, and `python-dist/` keeps the artifact separate from the Electron `dist/` output.
|
||||||
|
|
||||||
### Building in Docker
|
### Building in Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make docker-build
|
make build-docker
|
||||||
```
|
```
|
||||||
|
|
||||||
The build will be in the `dist` directory.
|
`build-docker` creates `reticulum-meshchatx:local` (or `$(DOCKER_IMAGE)` if you override it) via `docker buildx`. Set `DOCKER_PLATFORMS` to `linux/amd64,linux/arm64` when you need multi-arch images, and adjust `DOCKER_BUILD_FLAGS`/`DOCKER_BUILD_ARGS` to control `--load`/`--push`.
|
||||||
|
|
||||||
## Development
|
### Running with Docker Compose
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make develop
|
make run-docker
|
||||||
```
|
```
|
||||||
|
|
||||||
|
`run-docker` feeds the locally-built image into `docker compose -f docker-compose.yml up --remove-orphans --pull never reticulum-meshchatx`. The compose file uses the `MESHCHAT_IMAGE` env var so you can override the target image without editing the YAML (the default still points at `ghcr.io/sudo-ivan/reticulum-meshchatx:latest`). Use `docker compose down` or `Ctrl+C` to stop the container.
|
||||||
|
|
||||||
|
The Electron build artifacts will still live under `dist/` for releases.
|
||||||
|
|
||||||
## Python packaging
|
## Python packaging
|
||||||
|
|
||||||
The backend now provides `pyproject.toml` so you can build/install a wheel with `pip install .` or `python -m build`. Before packaging, run `python3 scripts/sync_version.py` (or `make sync-version`) so the generated `src/version.py` reflects the `package.json` version that the Electron artifacts use. The same version helper drives `meshchat.get_app_version()` and `setup.py`, so the CLI release metadata, wheel and AppImage/NSIS bundles stay aligned.
|
The backend uses Poetry with `pyproject.toml` for dependency management and packaging. Before building, run `python3 scripts/sync_version.py` (or `make sync-version`) to ensure the generated `src/version.py` reflects the version from `package.json` that the Electron artifacts use. This keeps the CLI release metadata, wheel packages, and other bundles aligned.
|
||||||
|
|
||||||
|
### Build artifact locations
|
||||||
|
|
||||||
|
Both `poetry build` and `python -m build` generate wheels inside the default `dist/` directory. The `make wheel` shortcut wraps `poetry build -f wheel` and then runs `python scripts/move_wheels.py` to relocate the generated `.whl` files into `python-dist/` (the layout expected by `scripts/test_wheel.sh` and the release automation). Use `make wheel` if you need the artifacts in `python-dist/`; `poetry build` or `python -m build` alone will leave them in `dist/`.
|
||||||
|
|
||||||
|
🛠️ Refactor suggestion | 🟠 Major Inconsistent Python invocation: use Line 46 uses Apply this diff: Or more simply, update line 46: 📝 Committable suggestion
🤖 Prompt for AI Agents_🛠️ Refactor suggestion_ | _🟠 Major_
**Inconsistent Python invocation: use `python3` consistently.**
Line 46 uses `python` while line 75 uses `python3` for the same `sync_version.py` script. Since Python 3 is standard and line 75 explicitly specifies `python3`, line 46 should do the same for consistency.
Apply this diff:
```diff
The Python build is driven entirely by Poetry now. Run `python scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
-python -m poetry install
+python3 scripts/sync_version.py
-make wheel # produces a wheel in python-dist/ that bundles the public assets
+make wheel
```
Or more simply, update line 46:
```diff
-Run `python scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
+Run `python3 scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
```
<!-- suggestion_start -->
<details>
<summary>📝 Committable suggestion</summary>
> ‼️ **IMPORTANT**
> Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.
```suggestion
### Python packaging
The Python build is driven entirely by Poetry now. Run `python3 scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
```
</details>
<!-- suggestion_end -->
<details>
<summary>🤖 Prompt for AI Agents</summary>
```
In README.md around lines 44 to 51, the invocation for the sync_version script
uses `python` while elsewhere (line 75) `python3` is used; update the command on
line 46 to use `python3` (i.e., replace `python scripts/sync_version.py` with
`python3 scripts/sync_version.py`) so the README consistently references Python
3 for running the script.
```
</details>
<!-- This is an auto-generated comment by CodeRabbit -->
✅ Addressed in commits 284517b to 3848613
|
|||||||
|
### Building with Poetry
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
poetry install
|
||||||
|
|
||||||
|
# Build the package (wheels land in dist/)
|
||||||
|
poetry build
|
||||||
|
|
||||||
|
# Install locally for testing (consumes dist/)
|
||||||
|
pip install dist/*.whl
|
||||||
|
```
|
||||||
|
|
||||||
|
### Building with pip (alternative)
|
||||||
|
|
||||||
|
If you prefer pip, you can build/install directly:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build the wheel
|
||||||
|
pip install build
|
||||||
|
python -m build
|
||||||
|
|
||||||
|
# Install locally
|
||||||
|
pip install .
|
||||||
|
```
|
||||||
|
|
||||||
|
### cx_Freeze (for AppImage/NSIS)
|
||||||
|
|
||||||
|
The `cx_setup.py` script uses cx_Freeze for creating standalone executables (AppImage for Linux, NSIS for Windows). This is separate from the Poetry/pip packaging workflow.
|
||||||
|
|
||||||
|
|||||||
47
cx_setup.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from cx_Freeze import Executable, setup
|
||||||
|
|
||||||
|
from meshchatx.src.version import __version__
|
||||||
|
|
||||||
|
ROOT = Path(__file__).resolve().parent
|
||||||
|
PUBLIC_DIR = ROOT / "meshchatx" / "public"
|
||||||
|
|
||||||
|
include_files = [
|
||||||
|
(str(PUBLIC_DIR), "public"),
|
||||||
|
("logo", "logo"),
|
||||||
|
]
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="ReticulumMeshChatX",
|
||||||
|
version=__version__,
|
||||||
|
description="A simple mesh network communications app powered by the Reticulum Network Stack",
|
||||||
|
executables=[
|
||||||
|
Executable(
|
||||||
|
script="meshchatx/meshchat.py",
|
||||||
|
base=None,
|
||||||
|
target_name="ReticulumMeshChatX",
|
||||||
|
shortcut_name="ReticulumMeshChatX",
|
||||||
|
shortcut_dir="ProgramMenuFolder",
|
||||||
|
icon="logo/icon.ico",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"build_exe": {
|
||||||
|
"packages": [
|
||||||
|
"RNS",
|
||||||
|
"RNS.Interfaces",
|
||||||
|
"LXMF",
|
||||||
|
],
|
||||||
|
"include_files": include_files,
|
||||||
|
"excludes": [
|
||||||
|
"PIL",
|
||||||
|
],
|
||||||
|
"optimize": 2,
|
||||||
|
"build_exe": "build/exe",
|
||||||
|
"replace_paths": [
|
||||||
|
("*", ""),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
services:
|
services:
|
||||||
reticulum-meshchatx:
|
reticulum-meshchatx:
|
||||||
container_name: reticulum-meshchatx
|
container_name: reticulum-meshchatx
|
||||||
image: ghcr.io/sudo-ivan/reticulum-meshchatx:latest
|
image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest}
|
||||||
pull_policy: always
|
pull_policy: always
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# Make the meshchat web interface accessible from the host on port 8000
|
# Make the meshchat web interface accessible from the host on port 8000
|
||||||
|
|||||||
@@ -133,6 +133,14 @@ app.whenReady().then(async () => {
|
|||||||
webPreferences: {
|
webPreferences: {
|
||||||
// used to inject logging over ipc
|
// used to inject logging over ipc
|
||||||
preload: path.join(__dirname, 'preload.js'),
|
preload: path.join(__dirname, 'preload.js'),
|
||||||
|
// Security: disable node integration in renderer
|
||||||
|
nodeIntegration: false,
|
||||||
|
// Security: enable context isolation (default in Electron 12+)
|
||||||
|
contextIsolation: true,
|
||||||
|
// Security: enable sandbox for additional protection
|
||||||
|
sandbox: true,
|
||||||
|
// Security: disable remote module (deprecated but explicit)
|
||||||
|
enableRemoteModule: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
3
meshchatx/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"""Reticulum MeshChatX - A mesh network communications app."""
|
||||||
|
|
||||||
|
__version__ = "2.41.0"
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from peewee import * # noqa: F403
|
from peewee import * # noqa: F403
|
||||||
from playhouse.migrate import SqliteMigrator
|
from playhouse.migrate import SqliteMigrator
|
||||||
@@ -68,8 +68,8 @@ class Config(BaseModel):
|
|||||||
id = BigAutoField() # noqa: F405
|
id = BigAutoField() # noqa: F405
|
||||||
key = CharField(unique=True) # noqa: F405
|
key = CharField(unique=True) # noqa: F405
|
||||||
value = TextField() # noqa: F405
|
value = TextField() # noqa: F405
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -95,8 +95,8 @@ class Announce(BaseModel):
|
|||||||
snr = FloatField(null=True) # noqa: F405
|
snr = FloatField(null=True) # noqa: F405
|
||||||
quality = FloatField(null=True) # noqa: F405
|
quality = FloatField(null=True) # noqa: F405
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -108,8 +108,8 @@ class CustomDestinationDisplayName(BaseModel):
|
|||||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||||
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -122,8 +122,8 @@ class FavouriteDestination(BaseModel):
|
|||||||
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
||||||
aspect = CharField() # noqa: F405 # e.g: nomadnetwork.node
|
aspect = CharField() # noqa: F405 # e.g: nomadnetwork.node
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -159,8 +159,8 @@ class LxmfMessage(BaseModel):
|
|||||||
snr = FloatField(null=True) # noqa: F405
|
snr = FloatField(null=True) # noqa: F405
|
||||||
quality = FloatField(null=True) # noqa: F405
|
quality = FloatField(null=True) # noqa: F405
|
||||||
is_spam = BooleanField(default=False) # noqa: F405 # if true, message is marked as spam
|
is_spam = BooleanField(default=False) # noqa: F405 # if true, message is marked as spam
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -172,8 +172,8 @@ class LxmfConversationReadState(BaseModel):
|
|||||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||||
last_read_at = DateTimeField() # noqa: F405
|
last_read_at = DateTimeField() # noqa: F405
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -189,8 +189,8 @@ class LxmfUserIcon(BaseModel):
|
|||||||
CharField() # noqa: F405
|
CharField() # noqa: F405
|
||||||
) # hex colour to use for background (background colour)
|
) # hex colour to use for background (background colour)
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -203,8 +203,8 @@ class BlockedDestination(BaseModel):
|
|||||||
unique=True,
|
unique=True,
|
||||||
index=True,
|
index=True,
|
||||||
) # unique destination hash that is blocked
|
) # unique destination hash that is blocked
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -217,8 +217,8 @@ class SpamKeyword(BaseModel):
|
|||||||
unique=True,
|
unique=True,
|
||||||
index=True,
|
index=True,
|
||||||
) # keyword to match against message content
|
) # keyword to match against message content
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -13,7 +13,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import webbrowser
|
import webbrowser
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import LXMF
|
import LXMF
|
||||||
import psutil
|
import psutil
|
||||||
@@ -24,31 +24,45 @@ from LXMF import LXMRouter
|
|||||||
from peewee import SqliteDatabase
|
from peewee import SqliteDatabase
|
||||||
from serial.tools import list_ports
|
from serial.tools import list_ports
|
||||||
|
|
||||||
import database
|
from meshchatx import database
|
||||||
from src.backend.announce_handler import AnnounceHandler
|
from meshchatx.src.backend.announce_handler import AnnounceHandler
|
||||||
from src.backend.async_utils import AsyncUtils
|
from meshchatx.src.backend.async_utils import AsyncUtils
|
||||||
from src.backend.audio_call_manager import AudioCall, AudioCallManager
|
from meshchatx.src.backend.audio_call_manager import AudioCall, AudioCallManager
|
||||||
from src.backend.colour_utils import ColourUtils
|
from meshchatx.src.backend.colour_utils import ColourUtils
|
||||||
from src.backend.interface_config_parser import InterfaceConfigParser
|
from meshchatx.src.backend.interface_config_parser import InterfaceConfigParser
|
||||||
from src.backend.interface_editor import InterfaceEditor
|
from meshchatx.src.backend.interface_editor import InterfaceEditor
|
||||||
from src.backend.lxmf_message_fields import (
|
from meshchatx.src.backend.lxmf_message_fields import (
|
||||||
LxmfAudioField,
|
LxmfAudioField,
|
||||||
LxmfFileAttachment,
|
LxmfFileAttachment,
|
||||||
LxmfFileAttachmentsField,
|
LxmfFileAttachmentsField,
|
||||||
LxmfImageField,
|
LxmfImageField,
|
||||||
)
|
)
|
||||||
from src.backend.sideband_commands import SidebandCommands
|
from meshchatx.src.backend.sideband_commands import SidebandCommands
|
||||||
|
from meshchatx.src.version import __version__ as app_version
|
||||||
|
|
||||||
|
|
||||||
# NOTE: this is required to be able to pack our app with cxfreeze as an exe, otherwise it can't access bundled assets
|
# NOTE: this is required to be able to pack our app with cxfreeze as an exe, otherwise it can't access bundled assets
|
||||||
# this returns a file path based on if we are running meshchat.py directly, or if we have packed it as an exe with cxfreeze
|
# this returns a file path based on if we are running meshchat.py directly, or if we have packed it as an exe with cxfreeze
|
||||||
# https://cx-freeze.readthedocs.io/en/latest/faq.html#using-data-files
|
# https://cx-freeze.readthedocs.io/en/latest/faq.html#using-data-files
|
||||||
|
# bearer:disable python_lang_path_traversal
|
||||||
def get_file_path(filename):
|
def get_file_path(filename):
|
||||||
if getattr(sys, "frozen", False):
|
if getattr(sys, "frozen", False):
|
||||||
datadir = os.path.dirname(sys.executable)
|
datadir = os.path.dirname(sys.executable)
|
||||||
else:
|
return os.path.join(datadir, filename)
|
||||||
datadir = os.path.dirname(__file__)
|
|
||||||
return os.path.join(datadir, filename)
|
# Assets live inside the meshchatx package when installed from a wheel
|
||||||
|
package_dir = os.path.dirname(__file__)
|
||||||
|
package_path = os.path.join(package_dir, filename)
|
||||||
|
if os.path.exists(package_path):
|
||||||
|
return package_path
|
||||||
|
|
||||||
|
# When running from the repository, fall back to the project root
|
||||||
|
repo_root = os.path.dirname(package_dir)
|
||||||
|
repo_path = os.path.join(repo_root, filename)
|
||||||
|
if os.path.exists(repo_path):
|
||||||
|
return repo_path
|
||||||
|
|
||||||
|
return package_path
|
||||||
|
|
||||||
|
|
||||||
class ReticulumMeshChat:
|
class ReticulumMeshChat:
|
||||||
@@ -225,12 +239,10 @@ class ReticulumMeshChat:
|
|||||||
thread.daemon = True
|
thread.daemon = True
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
# gets app version from package.json
|
# gets app version from the synchronized Python version helper
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_app_version() -> str:
|
def get_app_version() -> str:
|
||||||
with open(get_file_path("package.json")) as f:
|
return app_version
|
||||||
package_json = json.load(f)
|
|
||||||
return package_json["version"]
|
|
||||||
|
|
||||||
# automatically announces based on user config
|
# automatically announces based on user config
|
||||||
async def announce_loop(self):
|
async def announce_loop(self):
|
||||||
@@ -1104,10 +1116,10 @@ class ReticulumMeshChat:
|
|||||||
interfaces[interface_name] = interface_details
|
interfaces[interface_name] = interface_details
|
||||||
|
|
||||||
# handle SerialInterface, KISSInterface, and AX25KISSInterface
|
# handle SerialInterface, KISSInterface, and AX25KISSInterface
|
||||||
if (
|
if interface_type in (
|
||||||
interface_type == "SerialInterface"
|
"SerialInterface",
|
||||||
or interface_type == "KISSInterface"
|
"KISSInterface",
|
||||||
or interface_type == "AX25KISSInterface"
|
"AX25KISSInterface",
|
||||||
):
|
):
|
||||||
# ensure port provided
|
# ensure port provided
|
||||||
interface_port = data.get("port")
|
interface_port = data.get("port")
|
||||||
@@ -1129,10 +1141,7 @@ class ReticulumMeshChat:
|
|||||||
InterfaceEditor.update_value(interface_details, data, "stopbits")
|
InterfaceEditor.update_value(interface_details, data, "stopbits")
|
||||||
|
|
||||||
# Handle KISS and AX25KISS specific options
|
# Handle KISS and AX25KISS specific options
|
||||||
if (
|
if interface_type in ("KISSInterface", "AX25KISSInterface"):
|
||||||
interface_type == "KISSInterface"
|
|
||||||
or interface_type == "AX25KISSInterface"
|
|
||||||
):
|
|
||||||
# set optional options
|
# set optional options
|
||||||
InterfaceEditor.update_value(interface_details, data, "preamble")
|
InterfaceEditor.update_value(interface_details, data, "preamble")
|
||||||
InterfaceEditor.update_value(interface_details, data, "txtail")
|
InterfaceEditor.update_value(interface_details, data, "txtail")
|
||||||
@@ -1379,7 +1388,7 @@ class ReticulumMeshChat:
|
|||||||
print(e)
|
print(e)
|
||||||
elif msg.type == WSMsgType.ERROR:
|
elif msg.type == WSMsgType.ERROR:
|
||||||
# ignore errors while handling message
|
# ignore errors while handling message
|
||||||
print("ws connection error %s" % websocket_response.exception())
|
print(f"ws connection error {websocket_response.exception()}")
|
||||||
|
|
||||||
# websocket closed
|
# websocket closed
|
||||||
self.websocket_clients.remove(websocket_response)
|
self.websocket_clients.remove(websocket_response)
|
||||||
@@ -1702,7 +1711,7 @@ class ReticulumMeshChat:
|
|||||||
print(e)
|
print(e)
|
||||||
elif msg.type == WSMsgType.ERROR:
|
elif msg.type == WSMsgType.ERROR:
|
||||||
# ignore errors while handling message
|
# ignore errors while handling message
|
||||||
print("ws connection error %s" % websocket_response.exception())
|
print(f"ws connection error {websocket_response.exception()}")
|
||||||
|
|
||||||
# unregister audio packet handler now that the websocket has been closed
|
# unregister audio packet handler now that the websocket has been closed
|
||||||
audio_call.register_audio_packet_listener(on_audio_packet)
|
audio_call.register_audio_packet_listener(on_audio_packet)
|
||||||
@@ -2052,9 +2061,7 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# check if user wants to request the path from the network right now
|
# check if user wants to request the path from the network right now
|
||||||
request_query_param = request.query.get("request", "false")
|
request_query_param = request.query.get("request", "false")
|
||||||
should_request_now = (
|
should_request_now = request_query_param in ("true", "1")
|
||||||
request_query_param == "true" or request_query_param == "1"
|
|
||||||
)
|
|
||||||
if should_request_now:
|
if should_request_now:
|
||||||
# determine how long we should wait for a path response
|
# determine how long we should wait for a path response
|
||||||
timeout_seconds = int(request.query.get("timeout", 15))
|
timeout_seconds = int(request.query.get("timeout", 15))
|
||||||
@@ -3011,13 +3018,38 @@ class ReticulumMeshChat:
|
|||||||
)
|
)
|
||||||
if message:
|
if message:
|
||||||
message.is_spam = is_spam
|
message.is_spam = is_spam
|
||||||
message.updated_at = datetime.now(timezone.utc)
|
message.updated_at = datetime.now(UTC)
|
||||||
message.save()
|
message.save()
|
||||||
return web.json_response({"message": "ok"})
|
return web.json_response({"message": "ok"})
|
||||||
return web.json_response({"error": "Message not found"}, status=404)
|
return web.json_response({"error": "Message not found"}, status=404)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return web.json_response({"error": str(e)}, status=500)
|
return web.json_response({"error": str(e)}, status=500)
|
||||||
|
|
||||||
|
# security headers middleware
|
||||||
|
@web.middleware
|
||||||
|
async def security_middleware(request, handler):
|
||||||
|
response = await handler(request)
|
||||||
|
# Add security headers to all responses
|
||||||
|
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||||
|
response.headers["X-Frame-Options"] = "DENY"
|
||||||
|
response.headers["X-XSS-Protection"] = "1; mode=block"
|
||||||
|
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
|
||||||
|
# CSP: allow localhost for development and Electron, websockets, and blob URLs
|
||||||
|
csp = (
|
||||||
|
"default-src 'self'; "
|
||||||
|
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; "
|
||||||
|
"style-src 'self' 'unsafe-inline'; "
|
||||||
|
"img-src 'self' data: blob:; "
|
||||||
|
"font-src 'self' data:; "
|
||||||
|
"connect-src 'self' ws://localhost:* wss://localhost:* blob:; "
|
||||||
|
"media-src 'self' blob:; "
|
||||||
|
"worker-src 'self' blob:; "
|
||||||
|
"object-src 'none'; "
|
||||||
|
"base-uri 'self';"
|
||||||
|
)
|
||||||
|
response.headers["Content-Security-Policy"] = csp
|
||||||
|
return response
|
||||||
|
|
||||||
# called when web app has started
|
# called when web app has started
|
||||||
async def on_startup(app):
|
async def on_startup(app):
|
||||||
# remember main event loop
|
# remember main event loop
|
||||||
@@ -3033,6 +3065,7 @@ class ReticulumMeshChat:
|
|||||||
# create and run web app
|
# create and run web app
|
||||||
app = web.Application(
|
app = web.Application(
|
||||||
client_max_size=1024 * 1024 * 50,
|
client_max_size=1024 * 1024 * 50,
|
||||||
|
middlewares=[security_middleware],
|
||||||
) # allow uploading files up to 50mb
|
) # allow uploading files up to 50mb
|
||||||
app.add_routes(routes)
|
app.add_routes(routes)
|
||||||
app.add_routes(
|
app.add_routes(
|
||||||
@@ -3886,7 +3919,7 @@ class ReticulumMeshChat:
|
|||||||
"icon_name": icon_name,
|
"icon_name": icon_name,
|
||||||
"foreground_colour": foreground_colour,
|
"foreground_colour": foreground_colour,
|
||||||
"background_colour": background_colour,
|
"background_colour": background_colour,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4108,7 +4141,7 @@ class ReticulumMeshChat:
|
|||||||
"snr": lxmf_message_dict["snr"],
|
"snr": lxmf_message_dict["snr"],
|
||||||
"quality": lxmf_message_dict["quality"],
|
"quality": lxmf_message_dict["quality"],
|
||||||
"is_spam": is_spam,
|
"is_spam": is_spam,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4144,7 +4177,7 @@ class ReticulumMeshChat:
|
|||||||
"rssi": rssi,
|
"rssi": rssi,
|
||||||
"snr": snr,
|
"snr": snr,
|
||||||
"quality": quality,
|
"quality": quality,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# only set app data if provided, as we don't want to wipe existing data when we request keys from the network
|
# only set app data if provided, as we don't want to wipe existing data when we request keys from the network
|
||||||
@@ -4170,7 +4203,7 @@ class ReticulumMeshChat:
|
|||||||
data = {
|
data = {
|
||||||
"destination_hash": destination_hash,
|
"destination_hash": destination_hash,
|
||||||
"display_name": display_name,
|
"display_name": display_name,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4193,7 +4226,7 @@ class ReticulumMeshChat:
|
|||||||
"destination_hash": destination_hash,
|
"destination_hash": destination_hash,
|
||||||
"display_name": display_name,
|
"display_name": display_name,
|
||||||
"aspect": aspect,
|
"aspect": aspect,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4210,8 +4243,8 @@ class ReticulumMeshChat:
|
|||||||
# prepare data to insert or update
|
# prepare data to insert or update
|
||||||
data = {
|
data = {
|
||||||
"destination_hash": destination_hash,
|
"destination_hash": destination_hash,
|
||||||
"last_read_at": datetime.now(timezone.utc),
|
"last_read_at": datetime.now(UTC),
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4878,7 +4911,7 @@ class Config:
|
|||||||
data = {
|
data = {
|
||||||
"key": key,
|
"key": key,
|
||||||
"value": value,
|
"value": value,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -5022,8 +5055,8 @@ class NomadnetDownloader:
|
|||||||
self.path = path
|
self.path = path
|
||||||
self.data = data
|
self.data = data
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
self.on_download_success = on_download_success
|
self._download_success_callback = on_download_success
|
||||||
self.on_download_failure = on_download_failure
|
self._download_failure_callback = on_download_failure
|
||||||
self.on_progress_update = on_progress_update
|
self.on_progress_update = on_progress_update
|
||||||
self.request_receipt = None
|
self.request_receipt = None
|
||||||
self.is_cancelled = False
|
self.is_cancelled = False
|
||||||
@@ -5048,7 +5081,7 @@ class NomadnetDownloader:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
# notify that download was cancelled
|
# notify that download was cancelled
|
||||||
self.on_download_failure("cancelled")
|
self._download_failure_callback("cancelled")
|
||||||
|
|
||||||
# setup link to destination and request download
|
# setup link to destination and request download
|
||||||
async def download(
|
async def download(
|
||||||
@@ -5088,7 +5121,7 @@ class NomadnetDownloader:
|
|||||||
|
|
||||||
# if we still don't have a path, we can't establish a link, so bail out
|
# if we still don't have a path, we can't establish a link, so bail out
|
||||||
if not RNS.Transport.has_path(self.destination_hash):
|
if not RNS.Transport.has_path(self.destination_hash):
|
||||||
self.on_download_failure("Could not find path to destination.")
|
self._download_failure_callback("Could not find path to destination.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# check if cancelled before establishing link
|
# check if cancelled before establishing link
|
||||||
@@ -5124,7 +5157,7 @@ class NomadnetDownloader:
|
|||||||
|
|
||||||
# if we still haven't established a link, bail out
|
# if we still haven't established a link, bail out
|
||||||
if link.status is not RNS.Link.ACTIVE:
|
if link.status is not RNS.Link.ACTIVE:
|
||||||
self.on_download_failure("Could not establish link to destination.")
|
self._download_failure_callback("Could not establish link to destination.")
|
||||||
|
|
||||||
# link to destination was established, we should now request the download
|
# link to destination was established, we should now request the download
|
||||||
def link_established(self, link):
|
def link_established(self, link):
|
||||||
@@ -5147,11 +5180,11 @@ class NomadnetDownloader:
|
|||||||
|
|
||||||
# handle successful download
|
# handle successful download
|
||||||
def on_response(self, request_receipt: RNS.RequestReceipt):
|
def on_response(self, request_receipt: RNS.RequestReceipt):
|
||||||
self.on_download_success(request_receipt)
|
self._download_success_callback(request_receipt)
|
||||||
|
|
||||||
# handle failure
|
# handle failure
|
||||||
def on_failed(self, request_receipt=None):
|
def on_failed(self, request_receipt=None):
|
||||||
self.on_download_failure("request_failed")
|
self._download_failure_callback("request_failed")
|
||||||
|
|
||||||
# handle download progress
|
# handle download progress
|
||||||
def on_progress(self, request_receipt):
|
def on_progress(self, request_receipt):
|
||||||
1
meshchatx/src/backend/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Backend utilities shared by the Reticulum MeshChatX CLI."""
|
||||||
@@ -3,9 +3,8 @@ import time
|
|||||||
|
|
||||||
import RNS
|
import RNS
|
||||||
from RNS.Interfaces.Interface import Interface
|
from RNS.Interfaces.Interface import Interface
|
||||||
from websockets.sync.server import Server, ServerConnection, serve
|
|
||||||
|
|
||||||
from src.backend.interfaces.WebsocketClientInterface import WebsocketClientInterface
|
from src.backend.interfaces.WebsocketClientInterface import WebsocketClientInterface
|
||||||
|
from websockets.sync.server import Server, ServerConnection, serve
|
||||||
|
|
||||||
|
|
||||||
class WebsocketServerInterface(Interface):
|
class WebsocketServerInterface(Interface):
|
||||||
1
meshchatx/src/backend/interfaces/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Shared transport interfaces for MeshChatX."""
|
||||||
|
Before Width: | Height: | Size: 109 KiB After Width: | Height: | Size: 109 KiB |
|
Before Width: | Height: | Size: 80 KiB After Width: | Height: | Size: 80 KiB |
|
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
|
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 8.2 KiB After Width: | Height: | Size: 8.2 KiB |
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
⚠️ Potential issue | 🔴 Critical
Fix shell syntax error in SHA256 checksum generation script.
The for loop at line 226 has invalid shell syntax:
2>/dev/nullcannot be placed after the glob pattern list in a for statement. This causes shellcheck to fail parsing (SC1058, SC1072, SC1073). Since the conditionalif [ -f "$file" ]already handles non-matching globs, the redirection is unnecessary.Apply this diff to fix the syntax error:
🧰 Tools
🪛 actionlint (1.7.9)
221-221: shellcheck reported issue in this script: SC1058:error:5:49: Expected 'do'
(shellcheck)
221-221: shellcheck reported issue in this script: SC1072:error:5:50: Fix any mentioned problems and try again
(shellcheck)
221-221: shellcheck reported issue in this script: SC1073:error:5:1: Couldn't parse this for loop. Fix to allow more checks
(shellcheck)
🤖 Prompt for AI Agents