Compare commits
51 Commits
deepsource
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 1f15a67b8e | |||
|
|
675975bafc | ||
| 62d6cce914 | |||
| ab6099b0fb | |||
| d97676ad27 | |||
| 4200e43618 | |||
| c5ae53bf55 | |||
| bf8c22c31a | |||
| 9a9022ffb0 | |||
| 0443734ee3 | |||
| 6efac94f58 | |||
| 9e1a8ce180 | |||
| f2ab1ad067 | |||
| 365531be9b | |||
| 96f4fc8735 | |||
| d69a3e8522 | |||
| c95d2fd71c | |||
|
a74a6869ea
|
|||
| d8419990b1 | |||
|
085385a182
|
|||
|
f8b0dd18c5
|
|||
|
3231afb84d
|
|||
|
3848613a41
|
|||
|
284517bdfa
|
|||
|
5fc13dc61a
|
|||
|
f989295773
|
|||
|
|
d06ede8c5e | ||
|
a0047ea8fb
|
|||
|
c98131f76b
|
|||
|
9b4b8fdfeb
|
|||
|
48a0d8697e
|
|||
|
5627ae1640
|
|||
|
94d91c4934
|
|||
|
ac839df357
|
|||
|
cfad1ddc5f
|
|||
|
398ab570df
|
|||
|
50bc2cbfc8
|
|||
|
fe3a01c3c6
|
|||
|
0b0a39ea86
|
|||
|
2e001006c9
|
|||
|
0beaaaf4b1
|
|||
|
84f887df90
|
|||
|
80cf812e54
|
|||
|
19854e59da
|
|||
|
ba47e16b75
|
|||
|
578e80023f
|
|||
|
b7dcee4c06
|
|||
|
e44ec59b6e
|
|||
|
45379e6df1
|
|||
|
308f1f6459
|
|||
| 424ff116d1 |
@@ -3,24 +3,36 @@ README.md
|
||||
LICENSE
|
||||
donate.md
|
||||
screenshots/
|
||||
docs/
|
||||
|
||||
# Development files
|
||||
.github/
|
||||
electron/
|
||||
scripts/
|
||||
Makefile
|
||||
|
||||
# Build artifacts and cache
|
||||
build/
|
||||
dist/
|
||||
public/
|
||||
meshchatx/public/
|
||||
node_modules/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
*.egg-info/
|
||||
*.egg
|
||||
python-dist/
|
||||
|
||||
# Virtual environments
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
.venv/
|
||||
|
||||
# IDE and editor files
|
||||
.vscode/
|
||||
@@ -47,9 +59,19 @@ Dockerfile*
|
||||
docker-compose*.yml
|
||||
.dockerignore
|
||||
|
||||
# Local storage and runtime data
|
||||
storage/
|
||||
testing/
|
||||
telemetry_test_lxmf/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.temp
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
344
.github/workflows/build.yml
vendored
@@ -27,96 +27,14 @@ on:
|
||||
default: 'true'
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build_windows:
|
||||
runs-on: windows-latest
|
||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_windows == 'true')
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
|
||||
- name: Install NodeJS
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install Python Deps
|
||||
run: |
|
||||
python -m venv venv
|
||||
venv\Scripts\pip install --upgrade pip
|
||||
venv\Scripts\pip install -r requirements.txt
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
run: npm install
|
||||
|
||||
- name: Build Electron App
|
||||
run: npm run dist
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||
with:
|
||||
draft: true
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
omitDraftDuringUpdate: true
|
||||
omitNameDuringUpdate: true
|
||||
artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
||||
|
||||
build_mac:
|
||||
runs-on: macos-13
|
||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_mac == 'true')
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
|
||||
- name: Install NodeJS
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install Python Deps
|
||||
run: |
|
||||
python3 -m venv venv
|
||||
venv/bin/pip install --upgrade pip
|
||||
venv/bin/pip install -r requirements.txt
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
run: npm install
|
||||
|
||||
- name: Build Electron App
|
||||
run: npm run dist
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||
with:
|
||||
draft: true
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
omitDraftDuringUpdate: true
|
||||
omitNameDuringUpdate: true
|
||||
artifacts: "dist/*-mac.dmg"
|
||||
|
||||
build_linux:
|
||||
build_frontend:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_linux == 'true')
|
||||
permissions:
|
||||
contents: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
@@ -131,31 +49,245 @@ jobs:
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Sync versions
|
||||
run: python scripts/sync_version.py
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
run: npm install
|
||||
|
||||
- name: Build Frontend
|
||||
run: npm run build-frontend
|
||||
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: meshchatx/public
|
||||
if-no-files-found: error
|
||||
|
||||
build_desktop:
|
||||
name: Build Desktop (${{ matrix.name }})
|
||||
needs: build_frontend
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- name: windows
|
||||
os: windows-latest
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
||||
build_input: build_windows
|
||||
dist_script: dist-prebuilt
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: mac
|
||||
os: macos-14
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-mac-*.dmg"
|
||||
build_input: build_mac
|
||||
dist_script: dist:mac-universal
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: linux
|
||||
os: ubuntu-latest
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl"
|
||||
build_input: build_linux
|
||||
dist_script: dist-prebuilt
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: windows-legacy
|
||||
os: windows-latest
|
||||
node: 18
|
||||
python: "3.11"
|
||||
release_artifacts: "dist/*-win-installer*.exe,dist/*-win-portable*.exe"
|
||||
build_input: build_windows
|
||||
dist_script: dist-prebuilt
|
||||
variant: legacy
|
||||
electron_version: "30.0.8"
|
||||
- name: linux-legacy
|
||||
os: ubuntu-latest
|
||||
node: 18
|
||||
python: "3.11"
|
||||
release_artifacts: "dist/*-linux*.AppImage,dist/*-linux*.deb,python-dist/*.whl"
|
||||
build_input: build_linux
|
||||
dist_script: dist-prebuilt
|
||||
variant: legacy
|
||||
electron_version: "30.0.8"
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
|
||||
- name: Set legacy Electron version
|
||||
if: |
|
||||
matrix.variant == 'legacy' &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
shell: bash
|
||||
run: |
|
||||
node -e "const fs=require('fs');const pkg=require('./package.json');pkg.devDependencies.electron='${{ matrix.electron_version }}';fs.writeFileSync('package.json', JSON.stringify(pkg,null,2));"
|
||||
if [ -f package-lock.json ]; then rm package-lock.json; fi
|
||||
|
||||
- name: Install NodeJS
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
|
||||
- name: Install Python
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: Install Poetry
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python -m pip install --upgrade pip poetry
|
||||
|
||||
- name: Sync versions
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python scripts/sync_version.py
|
||||
|
||||
- name: Install Python Deps
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python -m poetry install
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: npm install
|
||||
|
||||
- name: Prepare frontend directory
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python scripts/prepare_frontend_dir.py
|
||||
|
||||
- name: Download frontend artifact
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: meshchatx/public
|
||||
|
||||
- name: Install patchelf
|
||||
if: |
|
||||
startsWith(matrix.name, 'linux') &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: sudo apt-get update && sudo apt-get install -y patchelf
|
||||
|
||||
- name: Install Python Deps
|
||||
- name: Build Python wheel
|
||||
if: |
|
||||
startsWith(matrix.name, 'linux') &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: |
|
||||
python3 -m venv venv
|
||||
venv/bin/pip install --upgrade pip
|
||||
venv/bin/pip install -r requirements.txt
|
||||
python -m poetry build -f wheel
|
||||
mkdir -p python-dist
|
||||
mv dist/*.whl python-dist/
|
||||
rm -rf dist
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
run: npm install
|
||||
- name: Build Electron App (Universal)
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: npm run ${{ matrix.dist_script }}
|
||||
|
||||
- name: Build Electron App
|
||||
run: npm run dist
|
||||
- name: Rename artifacts for legacy build
|
||||
if: |
|
||||
matrix.variant == 'legacy' &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: ./scripts/rename_legacy_artifacts.sh
|
||||
|
||||
- name: Upload build artifacts
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ matrix.name }}
|
||||
path: |
|
||||
dist/*-win-installer*.exe
|
||||
dist/*-win-portable*.exe
|
||||
dist/*-mac-*.dmg
|
||||
dist/*-linux*.AppImage
|
||||
dist/*-linux*.deb
|
||||
python-dist/*.whl
|
||||
if-no-files-found: ignore
|
||||
|
||||
create_release:
|
||||
name: Create Release
|
||||
needs: build_desktop
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Display structure of downloaded files
|
||||
run: ls -R artifacts
|
||||
|
||||
- name: Prepare release assets
|
||||
run: |
|
||||
mkdir -p release-assets
|
||||
find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \;
|
||||
ls -lh release-assets/
|
||||
|
||||
- name: Generate SHA256 checksums
|
||||
run: |
|
||||
cd release-assets
|
||||
echo "## SHA256 Checksums" > release-body.md
|
||||
echo "" >> release-body.md
|
||||
|
||||
for file in *.exe *.dmg *.AppImage *.deb *.whl; do
|
||||
if [ -f "$file" ]; then
|
||||
sha256sum "$file" | tee "${file}.sha256"
|
||||
echo "\`$(cat "${file}.sha256")\`" >> release-body.md
|
||||
fi
|
||||
done
|
||||
|
||||
echo "" >> release-body.md
|
||||
echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md
|
||||
|
||||
cat release-body.md
|
||||
echo ""
|
||||
echo "Generated .sha256 files:"
|
||||
ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found"
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||
with:
|
||||
draft: true
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
omitDraftDuringUpdate: true
|
||||
omitNameDuringUpdate: true
|
||||
artifacts: "dist/*-linux.AppImage,dist/*-linux.deb"
|
||||
artifacts: "release-assets/*"
|
||||
bodyFile: "release-assets/release-body.md"
|
||||
|
||||
build_docker:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -190,9 +322,9 @@ jobs:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: >-
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchat:latest,
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchat:${{ github.ref_name }}
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:latest,
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:${{ github.ref_name }}
|
||||
labels: >-
|
||||
org.opencontainers.image.title=Reticulum MeshChat,
|
||||
org.opencontainers.image.description=Docker image for Reticulum MeshChat,
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}/pkgs/container/reticulum-meshchat/
|
||||
org.opencontainers.image.title=Reticulum MeshChatX,
|
||||
org.opencontainers.image.description=Docker image for Reticulum MeshChatX,
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}/pkgs/container/reticulum-meshchatx/
|
||||
|
||||
22
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: 'Dependency review'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 'Checkout repository'
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4
|
||||
with:
|
||||
comment-summary-in-pr: always
|
||||
54
.gitignore
vendored
@@ -1,13 +1,57 @@
|
||||
# IDE and editor files
|
||||
.idea
|
||||
node_modules
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# build files
|
||||
# Dependencies
|
||||
node_modules/
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
*.egg-info/
|
||||
dist/
|
||||
*.egg
|
||||
|
||||
# Virtual environments
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
.venv/
|
||||
|
||||
# Build files
|
||||
/build/
|
||||
/dist/
|
||||
/public/
|
||||
/meshchatx/public/
|
||||
public/
|
||||
/electron/build/exe/
|
||||
python-dist/
|
||||
|
||||
# local storage
|
||||
# Local storage and runtime data
|
||||
storage/
|
||||
testing/
|
||||
telemetry_test_lxmf/
|
||||
|
||||
*.pyc
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# OS files
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
ehthumbs.db
|
||||
Thumbs.db
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
15
Dockerfile
@@ -10,9 +10,8 @@ FROM node:${NODE_VERSION}-alpine@${NODE_ALPINE_SHA256} AS build-frontend
|
||||
WORKDIR /src
|
||||
|
||||
# Copy required source files
|
||||
COPY *.json .
|
||||
COPY *.js .
|
||||
COPY src/frontend ./src/frontend
|
||||
COPY package*.json vite.config.js ./
|
||||
COPY meshchatx ./meshchatx
|
||||
|
||||
# Install NodeJS deps, exluding electron
|
||||
RUN npm install --omit=dev && \
|
||||
@@ -34,12 +33,10 @@ RUN apk add --no-cache --virtual .build-deps \
|
||||
apk del .build-deps
|
||||
|
||||
# Copy prebuilt frontend
|
||||
COPY --from=build-frontend /src/public public
|
||||
COPY --from=build-frontend /src/meshchatx/public meshchatx/public
|
||||
|
||||
# Copy other required source files
|
||||
COPY *.py .
|
||||
COPY src/__init__.py ./src/__init__.py
|
||||
COPY src/backend ./src/backend
|
||||
COPY *.json .
|
||||
COPY meshchatx ./meshchatx
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
CMD ["python", "meshchat.py", "--host=0.0.0.0", "--reticulum-config-dir=/config/.reticulum", "--storage-dir=/config/.meshchat", "--headless"]
|
||||
CMD ["python", "-m", "meshchatx.meshchat", "--host=0.0.0.0", "--reticulum-config-dir=/config/.reticulum", "--storage-dir=/config/.meshchat", "--headless"]
|
||||
|
||||
68
Makefile
@@ -1,26 +1,40 @@
|
||||
.PHONY: install run clean build build-appimage build-exe dist
|
||||
.PHONY: install run develop clean build build-appimage build-exe dist sync-version wheel node_modules python build-docker run-docker electron-legacy build-appimage-legacy build-exe-legacy
|
||||
|
||||
VENV = venv
|
||||
PYTHON = $(VENV)/bin/python
|
||||
PIP = $(VENV)/bin/pip
|
||||
PYTHON ?= python
|
||||
POETRY = $(PYTHON) -m poetry
|
||||
NPM = npm
|
||||
LEGACY_ELECTRON_VERSION ?= 30.0.8
|
||||
|
||||
install: $(VENV) node_modules
|
||||
DOCKER_COMPOSE_CMD ?= docker compose
|
||||
DOCKER_COMPOSE_FILE ?= docker-compose.yml
|
||||
DOCKER_IMAGE ?= reticulum-meshchatx:local
|
||||
DOCKER_BUILDER ?= meshchatx-builder
|
||||
DOCKER_PLATFORMS ?= linux/amd64
|
||||
DOCKER_BUILD_FLAGS ?= --load
|
||||
DOCKER_BUILD_ARGS ?=
|
||||
DOCKER_CONTEXT ?= .
|
||||
DOCKERFILE ?= Dockerfile
|
||||
|
||||
$(VENV):
|
||||
python3 -m venv $(VENV)
|
||||
$(PIP) install --upgrade pip
|
||||
$(PIP) install -r requirements.txt
|
||||
install: sync-version node_modules python
|
||||
|
||||
node_modules:
|
||||
$(NPM) install
|
||||
|
||||
python:
|
||||
$(POETRY) install
|
||||
|
||||
run: install
|
||||
$(PYTHON) meshchat.py
|
||||
$(POETRY) run meshchat
|
||||
|
||||
develop: run
|
||||
|
||||
build: install
|
||||
$(NPM) run build
|
||||
|
||||
wheel: install
|
||||
$(POETRY) build -f wheel
|
||||
$(PYTHON) scripts/move_wheels.py
|
||||
|
||||
build-appimage: build
|
||||
$(NPM) run electron-postinstall
|
||||
$(NPM) run dist -- --linux AppImage
|
||||
@@ -31,11 +45,43 @@ build-exe: build
|
||||
|
||||
dist: build-appimage
|
||||
|
||||
electron-legacy:
|
||||
$(NPM) install --no-save electron@$(LEGACY_ELECTRON_VERSION)
|
||||
|
||||
# Legacy targets intended for manual/local builds; CI uses workflow jobs.
|
||||
build-appimage-legacy: build electron-legacy
|
||||
$(NPM) run electron-postinstall
|
||||
$(NPM) run dist -- --linux AppImage
|
||||
./scripts/rename_legacy_artifacts.sh
|
||||
|
||||
build-exe-legacy: build electron-legacy
|
||||
$(NPM) run electron-postinstall
|
||||
$(NPM) run dist -- --win portable
|
||||
./scripts/rename_legacy_artifacts.sh
|
||||
|
||||
clean:
|
||||
rm -rf $(VENV)
|
||||
rm -rf node_modules
|
||||
rm -rf build
|
||||
rm -rf dist
|
||||
rm -rf python-dist
|
||||
rm -rf meshchatx/public
|
||||
|
||||
sync-version:
|
||||
$(PYTHON) scripts/sync_version.py
|
||||
|
||||
build-docker:
|
||||
@if ! docker buildx inspect $(DOCKER_BUILDER) >/dev/null 2>&1; then \
|
||||
docker buildx create --name $(DOCKER_BUILDER) --use >/dev/null; \
|
||||
else \
|
||||
docker buildx use $(DOCKER_BUILDER); \
|
||||
fi
|
||||
docker buildx build --builder $(DOCKER_BUILDER) --platform $(DOCKER_PLATFORMS) \
|
||||
$(DOCKER_BUILD_FLAGS) \
|
||||
-t $(DOCKER_IMAGE) \
|
||||
$(DOCKER_BUILD_ARGS) \
|
||||
-f $(DOCKERFILE) \
|
||||
$(DOCKER_CONTEXT)
|
||||
|
||||
run-docker:
|
||||
MESHCHAT_IMAGE="$(DOCKER_IMAGE)" \
|
||||
$(DOCKER_COMPOSE_CMD) -f $(DOCKER_COMPOSE_FILE) up --remove-orphans --pull never reticulum-meshchatx
|
||||
|
||||
81
README.md
@@ -8,15 +8,25 @@ A heavily customized fork of [Reticulum MeshChat](https://github.com/liamcottle/
|
||||
- [x] Ability to set inbound and propagation node stamps.
|
||||
- [x] Better config parsing.
|
||||
- [x] Cancel page fetching or file downloads
|
||||
- [x] Block recieving messages from users.
|
||||
- [x] Block receiving messages from users.
|
||||
- [ ] Spam filter (based on keywords)
|
||||
- [ ] Multi-identity support.
|
||||
- [ ] Multi-language support
|
||||
- [ ] Offline Reticulum documentation tool
|
||||
- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc.)
|
||||
- [x] Codebase reorganization and cleanup.
|
||||
- [ ] Tests and proper CI/CD pipeline.
|
||||
- [ ] RNS hot reload
|
||||
- [ ] Backup/Import identities, messages and interfaces.
|
||||
- [ ] Full LXST support.
|
||||
- [x] Poetry for packaging and dependency management.
|
||||
- [x] More stats on about page.
|
||||
- [x] Actions are pinned to full-length SHA hashes.
|
||||
- [x] Docker images are smaller and use SHA256 hashes for the images.
|
||||
- [x] Electron improvements.
|
||||
- [x] Electron improvements (ASAR and security).
|
||||
- [x] Latest updates for NPM and Python dependencies (bleeding edge)
|
||||
- [x] Ruff linting, CodeQL Advanced and Bearer SAST fixes.
|
||||
- [x] Numerous Ruff, Deepsource, CodeQL Advanced and Bearer Linting/SAST fixes.
|
||||
- [x] Some performance improvements.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -25,21 +35,76 @@ Check [releases](https://github.com/Sudo-Ivan/reticulum-meshchatX/releases) for
|
||||
## Building
|
||||
|
||||
```bash
|
||||
make install
|
||||
make install # installs Python deps via Poetry and Node deps via npm
|
||||
make build
|
||||
```
|
||||
|
||||
You can run `make run` or `make develop` (a thin alias) to start the backend + frontend loop locally through `poetry run meshchat`.
|
||||
|
||||
### Python packaging
|
||||
|
||||
The Python build is driven entirely by Poetry now. Run `python3 scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
|
||||
|
||||
```bash
|
||||
python -m poetry install
|
||||
make wheel # produces a wheel in python-dist/ that bundles the public assets
|
||||
```
|
||||
|
||||
The wheel includes the frontend `public/` assets, `logo/`, and the CLI entry point, and `python-dist/` keeps the artifact separate from the Electron `dist/` output.
|
||||
|
||||
### Building in Docker
|
||||
|
||||
```bash
|
||||
make docker-build
|
||||
make build-docker
|
||||
```
|
||||
|
||||
The build will be in the `dist` directory.
|
||||
`build-docker` creates `reticulum-meshchatx:local` (or `$(DOCKER_IMAGE)` if you override it) via `docker buildx`. Set `DOCKER_PLATFORMS` to `linux/amd64,linux/arm64` when you need multi-arch images, and adjust `DOCKER_BUILD_FLAGS`/`DOCKER_BUILD_ARGS` to control `--load`/`--push`.
|
||||
|
||||
## Development
|
||||
### Running with Docker Compose
|
||||
|
||||
```bash
|
||||
make develop
|
||||
make run-docker
|
||||
```
|
||||
|
||||
`run-docker` feeds the locally-built image into `docker compose -f docker-compose.yml up --remove-orphans --pull never reticulum-meshchatx`. The compose file uses the `MESHCHAT_IMAGE` env var so you can override the target image without editing the YAML (the default still points at `ghcr.io/sudo-ivan/reticulum-meshchatx:latest`). Use `docker compose down` or `Ctrl+C` to stop the container.
|
||||
|
||||
The Electron build artifacts will still live under `dist/` for releases.
|
||||
|
||||
## Python packaging
|
||||
|
||||
The backend uses Poetry with `pyproject.toml` for dependency management and packaging. Before building, run `python3 scripts/sync_version.py` (or `make sync-version`) to ensure the generated `src/version.py` reflects the version from `package.json` that the Electron artifacts use. This keeps the CLI release metadata, wheel packages, and other bundles aligned.
|
||||
|
||||
### Build artifact locations
|
||||
|
||||
Both `poetry build` and `python -m build` generate wheels inside the default `dist/` directory. The `make wheel` shortcut wraps `poetry build -f wheel` and then runs `python scripts/move_wheels.py` to relocate the generated `.whl` files into `python-dist/` (the layout expected by `scripts/test_wheel.sh` and the release automation). Use `make wheel` if you need the artifacts in `python-dist/`; `poetry build` or `python -m build` alone will leave them in `dist/`.
|
||||
|
||||
### Building with Poetry
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
poetry install
|
||||
|
||||
# Build the package (wheels land in dist/)
|
||||
poetry build
|
||||
|
||||
# Install locally for testing (consumes dist/)
|
||||
pip install dist/*.whl
|
||||
```
|
||||
|
||||
### Building with pip (alternative)
|
||||
|
||||
If you prefer pip, you can build/install directly:
|
||||
|
||||
```bash
|
||||
# Build the wheel
|
||||
pip install build
|
||||
python -m build
|
||||
|
||||
# Install locally
|
||||
pip install .
|
||||
```
|
||||
|
||||
### cx_Freeze (for AppImage/NSIS)
|
||||
|
||||
The `cx_setup.py` script uses cx_Freeze for creating standalone executables (AppImage for Linux, NSIS for Windows). This is separate from the Poetry/pip packaging workflow.
|
||||
|
||||
|
||||
31
TODO.md
@@ -1,31 +0,0 @@
|
||||
1. for messages fix:
|
||||
|
||||
convo goes off edge, near edge should be ... 3 dots
|
||||
|
||||
long names push the last message/announced seconds/time to right and nearly off the side, fix please
|
||||
|
||||
2. interfaces:
|
||||
|
||||
3 dots background circle is a oval, fix to be circle
|
||||
|
||||
on 3 dots clicked there is still white background the buttons have dark backgrounds though but main dropdown window is white fix depdning on theme
|
||||
|
||||
also on 3 dots drop down it still makes me scroll down in that interfaces window, we can expand that interfaces box os something so this crap doesnt hapen or if dropdown is above it
|
||||
|
||||
rework propagation nodes page with new UI/UX please like rest of app.
|
||||
|
||||
1. the attachment dropups/popups are white on dark mode, they need a ui/ux rework.
|
||||
|
||||
2. for settings add ability to set inbound stamp, ref lxmf via python -c if needed.
|
||||
|
||||
3. add multi-identity / account suport and a switcher at bottom with ability to create, delete or import/export identies from other apps.
|
||||
|
||||
for all this you will likely need to look at my ren chat app for stamps, multi-identity, /mnt/projects/ren-messenger/
|
||||
|
||||
its pretty simple.
|
||||
|
||||
translator tool
|
||||
reticulum documentation tool
|
||||
lxmfy bot tool
|
||||
page downloader tool
|
||||
page snapshots
|
||||
47
cx_setup.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from pathlib import Path
|
||||
|
||||
from cx_Freeze import Executable, setup
|
||||
|
||||
from meshchatx.src.version import __version__
|
||||
|
||||
ROOT = Path(__file__).resolve().parent
|
||||
PUBLIC_DIR = ROOT / "meshchatx" / "public"
|
||||
|
||||
include_files = [
|
||||
(str(PUBLIC_DIR), "public"),
|
||||
("logo", "logo"),
|
||||
]
|
||||
|
||||
setup(
|
||||
name="ReticulumMeshChatX",
|
||||
version=__version__,
|
||||
description="A simple mesh network communications app powered by the Reticulum Network Stack",
|
||||
executables=[
|
||||
Executable(
|
||||
script="meshchatx/meshchat.py",
|
||||
base=None,
|
||||
target_name="ReticulumMeshChatX",
|
||||
shortcut_name="ReticulumMeshChatX",
|
||||
shortcut_dir="ProgramMenuFolder",
|
||||
icon="logo/icon.ico",
|
||||
),
|
||||
],
|
||||
options={
|
||||
"build_exe": {
|
||||
"packages": [
|
||||
"RNS",
|
||||
"RNS.Interfaces",
|
||||
"LXMF",
|
||||
],
|
||||
"include_files": include_files,
|
||||
"excludes": [
|
||||
"PIL",
|
||||
],
|
||||
"optimize": 2,
|
||||
"build_exe": "build/exe",
|
||||
"replace_paths": [
|
||||
("*", ""),
|
||||
],
|
||||
},
|
||||
},
|
||||
)
|
||||
@@ -1,7 +1,7 @@
|
||||
services:
|
||||
reticulum-meshchat:
|
||||
container_name: reticulum-meshchat
|
||||
image: ghcr.io/sudo-ivan/reticulum-meshchat:latest
|
||||
reticulum-meshchatx:
|
||||
container_name: reticulum-meshchatx
|
||||
image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest}
|
||||
pull_policy: always
|
||||
restart: unless-stopped
|
||||
# Make the meshchat web interface accessible from the host on port 8000
|
||||
|
||||
@@ -133,6 +133,14 @@ app.whenReady().then(async () => {
|
||||
webPreferences: {
|
||||
// used to inject logging over ipc
|
||||
preload: path.join(__dirname, 'preload.js'),
|
||||
// Security: disable node integration in renderer
|
||||
nodeIntegration: false,
|
||||
// Security: enable context isolation (default in Electron 12+)
|
||||
contextIsolation: true,
|
||||
// Security: enable sandbox for additional protection
|
||||
sandbox: true,
|
||||
// Security: disable remote module (deprecated but explicit)
|
||||
enableRemoteModule: false,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
3
meshchatx/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Reticulum MeshChatX - A mesh network communications app."""
|
||||
|
||||
__version__ = "2.50.0"
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from peewee import * # noqa: F403
|
||||
from playhouse.migrate import SqliteMigrator
|
||||
@@ -17,7 +17,9 @@ def migrate(current_version):
|
||||
if current_version < 2:
|
||||
migrate_database(
|
||||
migrator.add_column(
|
||||
"lxmf_messages", "delivery_attempts", LxmfMessage.delivery_attempts,
|
||||
"lxmf_messages",
|
||||
"delivery_attempts",
|
||||
LxmfMessage.delivery_attempts,
|
||||
),
|
||||
migrator.add_column(
|
||||
"lxmf_messages",
|
||||
@@ -66,8 +68,8 @@ class Config(BaseModel):
|
||||
id = BigAutoField() # noqa: F405
|
||||
key = CharField(unique=True) # noqa: F405
|
||||
value = TextField() # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -85,7 +87,7 @@ class Announce(BaseModel):
|
||||
identity_hash = CharField( # noqa: F405
|
||||
index=True,
|
||||
) # identity hash that announced the destination
|
||||
identity_public_key = ( # noqa: F405
|
||||
identity_public_key = (
|
||||
CharField() # noqa: F405
|
||||
) # base64 encoded public key, incase we want to recreate the identity manually
|
||||
app_data = TextField(null=True) # noqa: F405 # base64 encoded app data bytes
|
||||
@@ -93,8 +95,8 @@ class Announce(BaseModel):
|
||||
snr = FloatField(null=True) # noqa: F405
|
||||
quality = FloatField(null=True) # noqa: F405
|
||||
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -106,8 +108,8 @@ class CustomDestinationDisplayName(BaseModel):
|
||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
||||
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -120,8 +122,8 @@ class FavouriteDestination(BaseModel):
|
||||
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
||||
aspect = CharField() # noqa: F405 # e.g: nomadnetwork.node
|
||||
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -133,7 +135,7 @@ class LxmfMessage(BaseModel):
|
||||
hash = CharField(unique=True) # noqa: F405 # unique lxmf message hash
|
||||
source_hash = CharField(index=True) # noqa: F405
|
||||
destination_hash = CharField(index=True) # noqa: F405
|
||||
state = ( # noqa: F405
|
||||
state = (
|
||||
CharField() # noqa: F405
|
||||
) # state is converted from internal int to a human friendly string
|
||||
progress = FloatField() # noqa: F405 # progress is converted from internal float 0.00-1.00 to float between 0.00/100 (2 decimal places)
|
||||
@@ -150,15 +152,15 @@ class LxmfMessage(BaseModel):
|
||||
title = TextField() # noqa: F405
|
||||
content = TextField() # noqa: F405
|
||||
fields = TextField() # noqa: F405 # json string
|
||||
timestamp = ( # noqa: F405
|
||||
timestamp = (
|
||||
FloatField() # noqa: F405
|
||||
) # timestamp of when the message was originally created (before ever being sent)
|
||||
rssi = IntegerField(null=True) # noqa: F405
|
||||
snr = FloatField(null=True) # noqa: F405
|
||||
quality = FloatField(null=True) # noqa: F405
|
||||
is_spam = BooleanField(default=False) # noqa: F405 # if true, message is marked as spam
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -170,8 +172,8 @@ class LxmfConversationReadState(BaseModel):
|
||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||
last_read_at = DateTimeField() # noqa: F405
|
||||
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -183,12 +185,12 @@ class LxmfUserIcon(BaseModel):
|
||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||
icon_name = CharField() # noqa: F405 # material design icon name for the destination hash
|
||||
foreground_colour = CharField() # noqa: F405 # hex colour to use for foreground (icon colour)
|
||||
background_colour = ( # noqa: F405
|
||||
background_colour = (
|
||||
CharField() # noqa: F405
|
||||
) # hex colour to use for background (background colour)
|
||||
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -198,10 +200,11 @@ class LxmfUserIcon(BaseModel):
|
||||
class BlockedDestination(BaseModel):
|
||||
id = BigAutoField() # noqa: F405
|
||||
destination_hash = CharField( # noqa: F405
|
||||
unique=True, index=True,
|
||||
unique=True,
|
||||
index=True,
|
||||
) # unique destination hash that is blocked
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -211,10 +214,11 @@ class BlockedDestination(BaseModel):
|
||||
class SpamKeyword(BaseModel):
|
||||
id = BigAutoField() # noqa: F405
|
||||
keyword = CharField( # noqa: F405
|
||||
unique=True, index=True,
|
||||
unique=True,
|
||||
index=True,
|
||||
) # keyword to match against message content
|
||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
||||
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||
|
||||
# define table name
|
||||
class Meta:
|
||||
@@ -13,7 +13,7 @@ import threading
|
||||
import time
|
||||
import webbrowser
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import LXMF
|
||||
import psutil
|
||||
@@ -24,31 +24,45 @@ from LXMF import LXMRouter
|
||||
from peewee import SqliteDatabase
|
||||
from serial.tools import list_ports
|
||||
|
||||
import database
|
||||
from src.backend.announce_handler import AnnounceHandler
|
||||
from src.backend.async_utils import AsyncUtils
|
||||
from src.backend.audio_call_manager import AudioCall, AudioCallManager
|
||||
from src.backend.colour_utils import ColourUtils
|
||||
from src.backend.interface_config_parser import InterfaceConfigParser
|
||||
from src.backend.interface_editor import InterfaceEditor
|
||||
from src.backend.lxmf_message_fields import (
|
||||
from meshchatx import database
|
||||
from meshchatx.src.backend.announce_handler import AnnounceHandler
|
||||
from meshchatx.src.backend.async_utils import AsyncUtils
|
||||
from meshchatx.src.backend.audio_call_manager import AudioCall, AudioCallManager
|
||||
from meshchatx.src.backend.colour_utils import ColourUtils
|
||||
from meshchatx.src.backend.interface_config_parser import InterfaceConfigParser
|
||||
from meshchatx.src.backend.interface_editor import InterfaceEditor
|
||||
from meshchatx.src.backend.lxmf_message_fields import (
|
||||
LxmfAudioField,
|
||||
LxmfFileAttachment,
|
||||
LxmfFileAttachmentsField,
|
||||
LxmfImageField,
|
||||
)
|
||||
from src.backend.sideband_commands import SidebandCommands
|
||||
from meshchatx.src.backend.sideband_commands import SidebandCommands
|
||||
from meshchatx.src.version import __version__ as app_version
|
||||
|
||||
|
||||
# NOTE: this is required to be able to pack our app with cxfreeze as an exe, otherwise it can't access bundled assets
|
||||
# this returns a file path based on if we are running meshchat.py directly, or if we have packed it as an exe with cxfreeze
|
||||
# https://cx-freeze.readthedocs.io/en/latest/faq.html#using-data-files
|
||||
# bearer:disable python_lang_path_traversal
|
||||
def get_file_path(filename):
|
||||
if getattr(sys, "frozen", False):
|
||||
datadir = os.path.dirname(sys.executable)
|
||||
else:
|
||||
datadir = os.path.dirname(__file__)
|
||||
return os.path.join(datadir, filename)
|
||||
return os.path.join(datadir, filename)
|
||||
|
||||
# Assets live inside the meshchatx package when installed from a wheel
|
||||
package_dir = os.path.dirname(__file__)
|
||||
package_path = os.path.join(package_dir, filename)
|
||||
if os.path.exists(package_path):
|
||||
return package_path
|
||||
|
||||
# When running from the repository, fall back to the project root
|
||||
repo_root = os.path.dirname(package_dir)
|
||||
repo_path = os.path.join(repo_root, filename)
|
||||
if os.path.exists(repo_path):
|
||||
return repo_path
|
||||
|
||||
return package_path
|
||||
|
||||
|
||||
class ReticulumMeshChat:
|
||||
@@ -64,7 +78,9 @@ class ReticulumMeshChat:
|
||||
# ensure a storage path exists for the loaded identity
|
||||
self.storage_dir = storage_dir or os.path.join("storage")
|
||||
self.storage_path = os.path.join(
|
||||
self.storage_dir, "identities", identity.hash.hex(),
|
||||
self.storage_dir,
|
||||
"identities",
|
||||
identity.hash.hex(),
|
||||
)
|
||||
print(f"Using Storage Path: {self.storage_path}")
|
||||
os.makedirs(self.storage_path, exist_ok=True)
|
||||
@@ -177,12 +193,14 @@ class ReticulumMeshChat:
|
||||
)
|
||||
RNS.Transport.register_announce_handler(
|
||||
AnnounceHandler(
|
||||
"lxmf.propagation", self.on_lxmf_propagation_announce_received,
|
||||
"lxmf.propagation",
|
||||
self.on_lxmf_propagation_announce_received,
|
||||
),
|
||||
)
|
||||
RNS.Transport.register_announce_handler(
|
||||
AnnounceHandler(
|
||||
"nomadnetwork.node", self.on_nomadnet_node_announce_received,
|
||||
"nomadnetwork.node",
|
||||
self.on_nomadnet_node_announce_received,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -215,17 +233,16 @@ class ReticulumMeshChat:
|
||||
|
||||
# start background thread for auto syncing propagation nodes
|
||||
thread = threading.Thread(
|
||||
target=asyncio.run, args=(self.announce_sync_propagation_nodes(),),
|
||||
target=asyncio.run,
|
||||
args=(self.announce_sync_propagation_nodes(),),
|
||||
)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
# gets app version from package.json
|
||||
# gets app version from the synchronized Python version helper
|
||||
@staticmethod
|
||||
def get_app_version() -> str:
|
||||
with open(get_file_path("package.json")) as f:
|
||||
package_json = json.load(f)
|
||||
return package_json["version"]
|
||||
return app_version
|
||||
|
||||
# automatically announces based on user config
|
||||
async def announce_loop(self):
|
||||
@@ -299,7 +316,7 @@ class ReticulumMeshChat:
|
||||
self.message_router.set_outbound_propagation_node(
|
||||
bytes.fromhex(destination_hash),
|
||||
)
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# failed to set propagation node, clear it to ensure we don't use an old one by mistake
|
||||
self.remove_active_propagation_node()
|
||||
|
||||
@@ -326,7 +343,7 @@ class ReticulumMeshChat:
|
||||
self.message_router.enable_propagation()
|
||||
else:
|
||||
self.message_router.disable_propagation()
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
print("failed to enable or disable propagation node")
|
||||
|
||||
def _get_reticulum_section(self):
|
||||
@@ -459,7 +476,8 @@ class ReticulumMeshChat:
|
||||
if "image" in fields or "audio" in fields:
|
||||
return True
|
||||
if "file_attachments" in fields and isinstance(
|
||||
fields["file_attachments"], list,
|
||||
fields["file_attachments"],
|
||||
list,
|
||||
):
|
||||
return len(fields["file_attachments"]) > 0
|
||||
return False
|
||||
@@ -473,7 +491,8 @@ class ReticulumMeshChat:
|
||||
|
||||
matches = set()
|
||||
query = database.LxmfMessage.select(
|
||||
database.LxmfMessage.source_hash, database.LxmfMessage.destination_hash,
|
||||
database.LxmfMessage.source_hash,
|
||||
database.LxmfMessage.destination_hash,
|
||||
).where(
|
||||
(
|
||||
(database.LxmfMessage.source_hash == local_hash)
|
||||
@@ -810,7 +829,9 @@ class ReticulumMeshChat:
|
||||
# set optional AutoInterface options
|
||||
InterfaceEditor.update_value(interface_details, data, "group_id")
|
||||
InterfaceEditor.update_value(
|
||||
interface_details, data, "multicast_address_type",
|
||||
interface_details,
|
||||
data,
|
||||
"multicast_address_type",
|
||||
)
|
||||
InterfaceEditor.update_value(interface_details, data, "devices")
|
||||
InterfaceEditor.update_value(interface_details, data, "ignored_devices")
|
||||
@@ -1008,10 +1029,14 @@ class ReticulumMeshChat:
|
||||
InterfaceEditor.update_value(interface_details, data, "callsign")
|
||||
InterfaceEditor.update_value(interface_details, data, "id_interval")
|
||||
InterfaceEditor.update_value(
|
||||
interface_details, data, "airtime_limit_long",
|
||||
interface_details,
|
||||
data,
|
||||
"airtime_limit_long",
|
||||
)
|
||||
InterfaceEditor.update_value(
|
||||
interface_details, data, "airtime_limit_short",
|
||||
interface_details,
|
||||
data,
|
||||
"airtime_limit_short",
|
||||
)
|
||||
|
||||
# handle RNodeMultiInterface
|
||||
@@ -1091,10 +1116,10 @@ class ReticulumMeshChat:
|
||||
interfaces[interface_name] = interface_details
|
||||
|
||||
# handle SerialInterface, KISSInterface, and AX25KISSInterface
|
||||
if (
|
||||
interface_type == "SerialInterface"
|
||||
or interface_type == "KISSInterface"
|
||||
or interface_type == "AX25KISSInterface"
|
||||
if interface_type in (
|
||||
"SerialInterface",
|
||||
"KISSInterface",
|
||||
"AX25KISSInterface",
|
||||
):
|
||||
# ensure port provided
|
||||
interface_port = data.get("port")
|
||||
@@ -1116,10 +1141,7 @@ class ReticulumMeshChat:
|
||||
InterfaceEditor.update_value(interface_details, data, "stopbits")
|
||||
|
||||
# Handle KISS and AX25KISS specific options
|
||||
if (
|
||||
interface_type == "KISSInterface"
|
||||
or interface_type == "AX25KISSInterface"
|
||||
):
|
||||
if interface_type in ("KISSInterface", "AX25KISSInterface"):
|
||||
# set optional options
|
||||
InterfaceEditor.update_value(interface_details, data, "preamble")
|
||||
InterfaceEditor.update_value(interface_details, data, "txtail")
|
||||
@@ -1200,7 +1222,7 @@ class ReticulumMeshChat:
|
||||
try:
|
||||
data = await request.json()
|
||||
selected_interface_names = data.get("selected_interface_names")
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# request data was not json, but we don't care
|
||||
pass
|
||||
|
||||
@@ -1366,7 +1388,7 @@ class ReticulumMeshChat:
|
||||
print(e)
|
||||
elif msg.type == WSMsgType.ERROR:
|
||||
# ignore errors while handling message
|
||||
print("ws connection error %s" % websocket_response.exception())
|
||||
print(f"ws connection error {websocket_response.exception()}")
|
||||
|
||||
# websocket closed
|
||||
self.websocket_clients.remove(websocket_response)
|
||||
@@ -1609,7 +1631,8 @@ class ReticulumMeshChat:
|
||||
# initiate audio call
|
||||
try:
|
||||
audio_call = await self.audio_call_manager.initiate(
|
||||
destination_hash, timeout_seconds,
|
||||
destination_hash,
|
||||
timeout_seconds,
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -1652,7 +1675,7 @@ class ReticulumMeshChat:
|
||||
if websocket_response.closed is False:
|
||||
try:
|
||||
AsyncUtils.run_async(websocket_response.send_bytes(data))
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# ignore errors sending audio packets to websocket
|
||||
pass
|
||||
|
||||
@@ -1663,7 +1686,7 @@ class ReticulumMeshChat:
|
||||
AsyncUtils.run_async(
|
||||
websocket_response.close(code=WSCloseCode.GOING_AWAY),
|
||||
)
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# ignore errors closing websocket
|
||||
pass
|
||||
|
||||
@@ -1688,7 +1711,7 @@ class ReticulumMeshChat:
|
||||
print(e)
|
||||
elif msg.type == WSMsgType.ERROR:
|
||||
# ignore errors while handling message
|
||||
print("ws connection error %s" % websocket_response.exception())
|
||||
print(f"ws connection error {websocket_response.exception()}")
|
||||
|
||||
# unregister audio packet handler now that the websocket has been closed
|
||||
audio_call.register_audio_packet_listener(on_audio_packet)
|
||||
@@ -1983,21 +2006,27 @@ class ReticulumMeshChat:
|
||||
and lxmf_delivery_announce.app_data is not None
|
||||
):
|
||||
operator_display_name = self.parse_lxmf_display_name(
|
||||
lxmf_delivery_announce.app_data, None,
|
||||
lxmf_delivery_announce.app_data,
|
||||
None,
|
||||
)
|
||||
elif (
|
||||
nomadnetwork_node_announce is not None
|
||||
and nomadnetwork_node_announce.app_data is not None
|
||||
):
|
||||
operator_display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
nomadnetwork_node_announce.app_data, None,
|
||||
operator_display_name = (
|
||||
ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
nomadnetwork_node_announce.app_data,
|
||||
None,
|
||||
)
|
||||
)
|
||||
|
||||
# parse app_data so we can see if propagation is enabled or disabled for this node
|
||||
is_propagation_enabled = None
|
||||
per_transfer_limit = None
|
||||
propagation_node_data = ReticulumMeshChat.parse_lxmf_propagation_node_app_data(
|
||||
announce.app_data,
|
||||
propagation_node_data = (
|
||||
ReticulumMeshChat.parse_lxmf_propagation_node_app_data(
|
||||
announce.app_data,
|
||||
)
|
||||
)
|
||||
if propagation_node_data is not None:
|
||||
is_propagation_enabled = propagation_node_data["enabled"]
|
||||
@@ -2032,9 +2061,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# check if user wants to request the path from the network right now
|
||||
request_query_param = request.query.get("request", "false")
|
||||
should_request_now = (
|
||||
request_query_param == "true" or request_query_param == "1"
|
||||
)
|
||||
should_request_now = request_query_param in ("true", "1")
|
||||
if should_request_now:
|
||||
# determine how long we should wait for a path response
|
||||
timeout_seconds = int(request.query.get("timeout", 15))
|
||||
@@ -2312,7 +2339,8 @@ class ReticulumMeshChat:
|
||||
# update display name if provided
|
||||
if len(display_name) > 0:
|
||||
self.db_upsert_custom_destination_display_name(
|
||||
destination_hash, display_name,
|
||||
destination_hash,
|
||||
display_name,
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -2756,7 +2784,9 @@ class ReticulumMeshChat:
|
||||
other_user_hash,
|
||||
),
|
||||
"destination_hash": other_user_hash,
|
||||
"is_unread": ReticulumMeshChat.is_lxmf_conversation_unread(other_user_hash),
|
||||
"is_unread": ReticulumMeshChat.is_lxmf_conversation_unread(
|
||||
other_user_hash,
|
||||
),
|
||||
"failed_messages_count": ReticulumMeshChat.lxmf_conversation_failed_messages_count(
|
||||
other_user_hash,
|
||||
),
|
||||
@@ -2878,7 +2908,8 @@ class ReticulumMeshChat:
|
||||
destination_hash = data.get("destination_hash", "")
|
||||
if not destination_hash or len(destination_hash) != 32:
|
||||
return web.json_response(
|
||||
{"error": "Invalid destination hash"}, status=400,
|
||||
{"error": "Invalid destination hash"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -2886,12 +2917,13 @@ class ReticulumMeshChat:
|
||||
# drop any existing paths to this destination
|
||||
try:
|
||||
RNS.Transport.drop_path(bytes.fromhex(destination_hash))
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
pass
|
||||
return web.json_response({"message": "ok"})
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return web.json_response(
|
||||
{"error": "Destination already blocked"}, status=400,
|
||||
{"error": "Destination already blocked"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# remove blocked destination
|
||||
@@ -2900,7 +2932,8 @@ class ReticulumMeshChat:
|
||||
destination_hash = request.match_info.get("destination_hash", "")
|
||||
if not destination_hash or len(destination_hash) != 32:
|
||||
return web.json_response(
|
||||
{"error": "Invalid destination hash"}, status=400,
|
||||
{"error": "Invalid destination hash"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -2911,7 +2944,8 @@ class ReticulumMeshChat:
|
||||
blocked.delete_instance()
|
||||
return web.json_response({"message": "ok"})
|
||||
return web.json_response(
|
||||
{"error": "Destination not blocked"}, status=404,
|
||||
{"error": "Destination not blocked"},
|
||||
status=404,
|
||||
)
|
||||
except Exception as e:
|
||||
return web.json_response({"error": str(e)}, status=500)
|
||||
@@ -2945,9 +2979,10 @@ class ReticulumMeshChat:
|
||||
try:
|
||||
database.SpamKeyword.create(keyword=keyword)
|
||||
return web.json_response({"message": "ok"})
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return web.json_response(
|
||||
{"error": "Keyword already exists"}, status=400,
|
||||
{"error": "Keyword already exists"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# remove spam keyword
|
||||
@@ -2956,7 +2991,7 @@ class ReticulumMeshChat:
|
||||
keyword_id = request.match_info.get("keyword_id", "")
|
||||
try:
|
||||
keyword_id = int(keyword_id)
|
||||
except (ValueError, TypeError): # noqa: E722
|
||||
except (ValueError, TypeError):
|
||||
return web.json_response({"error": "Invalid keyword ID"}, status=400)
|
||||
|
||||
try:
|
||||
@@ -2983,13 +3018,38 @@ class ReticulumMeshChat:
|
||||
)
|
||||
if message:
|
||||
message.is_spam = is_spam
|
||||
message.updated_at = datetime.now(timezone.utc)
|
||||
message.updated_at = datetime.now(UTC)
|
||||
message.save()
|
||||
return web.json_response({"message": "ok"})
|
||||
return web.json_response({"error": "Message not found"}, status=404)
|
||||
except Exception as e:
|
||||
return web.json_response({"error": str(e)}, status=500)
|
||||
|
||||
# security headers middleware
|
||||
@web.middleware
|
||||
async def security_middleware(request, handler):
|
||||
response = await handler(request)
|
||||
# Add security headers to all responses
|
||||
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||
response.headers["X-Frame-Options"] = "DENY"
|
||||
response.headers["X-XSS-Protection"] = "1; mode=block"
|
||||
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
|
||||
# CSP: allow localhost for development and Electron, websockets, and blob URLs
|
||||
csp = (
|
||||
"default-src 'self'; "
|
||||
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; "
|
||||
"style-src 'self' 'unsafe-inline'; "
|
||||
"img-src 'self' data: blob:; "
|
||||
"font-src 'self' data:; "
|
||||
"connect-src 'self' ws://localhost:* wss://localhost:* blob:; "
|
||||
"media-src 'self' blob:; "
|
||||
"worker-src 'self' blob:; "
|
||||
"object-src 'none'; "
|
||||
"base-uri 'self';"
|
||||
)
|
||||
response.headers["Content-Security-Policy"] = csp
|
||||
return response
|
||||
|
||||
# called when web app has started
|
||||
async def on_startup(app):
|
||||
# remember main event loop
|
||||
@@ -2999,12 +3059,13 @@ class ReticulumMeshChat:
|
||||
if launch_browser:
|
||||
try:
|
||||
webbrowser.open(f"http://127.0.0.1:{port}")
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
print("failed to launch web browser")
|
||||
|
||||
# create and run web app
|
||||
app = web.Application(
|
||||
client_max_size=1024 * 1024 * 50,
|
||||
middlewares=[security_middleware],
|
||||
) # allow uploading files up to 50mb
|
||||
app.add_routes(routes)
|
||||
app.add_routes(
|
||||
@@ -3106,7 +3167,8 @@ class ReticulumMeshChat:
|
||||
self.config.lxmf_inbound_stamp_cost.set(value)
|
||||
# update the inbound stamp cost on the delivery destination
|
||||
self.message_router.set_inbound_stamp_cost(
|
||||
self.local_lxmf_destination.hash, value,
|
||||
self.local_lxmf_destination.hash,
|
||||
value,
|
||||
)
|
||||
# re-announce to update the stamp cost in announces
|
||||
self.local_lxmf_destination.display_name = self.config.display_name.get()
|
||||
@@ -3504,7 +3566,7 @@ class ReticulumMeshChat:
|
||||
for websocket_client in self.websocket_clients:
|
||||
try:
|
||||
await websocket_client.send_str(data)
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# do nothing if failed to broadcast to a specific websocket client
|
||||
pass
|
||||
|
||||
@@ -3571,7 +3633,9 @@ class ReticulumMeshChat:
|
||||
remote_destination_hash_hex = None
|
||||
if remote_identity is not None:
|
||||
remote_destination_hash = RNS.Destination.hash(
|
||||
remote_identity, "call", "audio",
|
||||
remote_identity,
|
||||
"call",
|
||||
"audio",
|
||||
)
|
||||
remote_destination_hash_hex = remote_destination_hash.hex()
|
||||
|
||||
@@ -3674,7 +3738,9 @@ class ReticulumMeshChat:
|
||||
"method": self.convert_lxmf_method_to_string(lxmf_message),
|
||||
"delivery_attempts": lxmf_message.delivery_attempts,
|
||||
"next_delivery_attempt_at": getattr(
|
||||
lxmf_message, "next_delivery_attempt", None,
|
||||
lxmf_message,
|
||||
"next_delivery_attempt",
|
||||
None,
|
||||
), # attribute may not exist yet
|
||||
"title": lxmf_message.title.decode("utf-8"),
|
||||
"content": lxmf_message.content.decode("utf-8"),
|
||||
@@ -3757,7 +3823,9 @@ class ReticulumMeshChat:
|
||||
if announce.aspect == "lxmf.delivery":
|
||||
display_name = self.parse_lxmf_display_name(announce.app_data)
|
||||
elif announce.aspect == "nomadnetwork.node":
|
||||
display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(announce.app_data)
|
||||
display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
announce.app_data,
|
||||
)
|
||||
|
||||
# find lxmf user icon from database
|
||||
lxmf_user_icon = None
|
||||
@@ -3851,13 +3919,14 @@ class ReticulumMeshChat:
|
||||
"icon_name": icon_name,
|
||||
"foreground_colour": foreground_colour,
|
||||
"background_colour": background_colour,
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# upsert to database
|
||||
query = database.LxmfUserIcon.insert(data)
|
||||
query = query.on_conflict(
|
||||
conflict_target=[database.LxmfUserIcon.destination_hash], update=data,
|
||||
conflict_target=[database.LxmfUserIcon.destination_hash],
|
||||
update=data,
|
||||
)
|
||||
query.execute()
|
||||
|
||||
@@ -3869,7 +3938,7 @@ class ReticulumMeshChat:
|
||||
database.BlockedDestination.destination_hash == destination_hash,
|
||||
)
|
||||
return blocked is not None
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# check if message content matches spam keywords
|
||||
@@ -3882,7 +3951,7 @@ class ReticulumMeshChat:
|
||||
if keyword.keyword.lower() in search_text:
|
||||
return True
|
||||
return False
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# check if message has attachments and should be rejected
|
||||
@@ -3896,7 +3965,7 @@ class ReticulumMeshChat:
|
||||
if LXMF.FIELD_AUDIO in lxmf_fields:
|
||||
return True
|
||||
return False
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# handle an lxmf delivery from reticulum
|
||||
@@ -4046,7 +4115,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# upserts the provided lxmf message to the database
|
||||
def db_upsert_lxmf_message(
|
||||
self, lxmf_message: LXMF.LXMessage, is_spam: bool = False,
|
||||
self,
|
||||
lxmf_message: LXMF.LXMessage,
|
||||
is_spam: bool = False,
|
||||
):
|
||||
# convert lxmf message to dict
|
||||
lxmf_message_dict = self.convert_lxmf_message_to_dict(lxmf_message)
|
||||
@@ -4070,13 +4141,14 @@ class ReticulumMeshChat:
|
||||
"snr": lxmf_message_dict["snr"],
|
||||
"quality": lxmf_message_dict["quality"],
|
||||
"is_spam": is_spam,
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# upsert to database
|
||||
query = database.LxmfMessage.insert(data)
|
||||
query = query.on_conflict(
|
||||
conflict_target=[database.LxmfMessage.hash], update=data,
|
||||
conflict_target=[database.LxmfMessage.hash],
|
||||
update=data,
|
||||
)
|
||||
query.execute()
|
||||
|
||||
@@ -4105,7 +4177,7 @@ class ReticulumMeshChat:
|
||||
"rssi": rssi,
|
||||
"snr": snr,
|
||||
"quality": quality,
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# only set app data if provided, as we don't want to wipe existing data when we request keys from the network
|
||||
@@ -4116,20 +4188,22 @@ class ReticulumMeshChat:
|
||||
# upsert to database
|
||||
query = database.Announce.insert(data)
|
||||
query = query.on_conflict(
|
||||
conflict_target=[database.Announce.destination_hash], update=data,
|
||||
conflict_target=[database.Announce.destination_hash],
|
||||
update=data,
|
||||
)
|
||||
query.execute()
|
||||
|
||||
# upserts a custom destination display name to the database
|
||||
@staticmethod
|
||||
def db_upsert_custom_destination_display_name(
|
||||
destination_hash: str, display_name: str,
|
||||
destination_hash: str,
|
||||
display_name: str,
|
||||
):
|
||||
# prepare data to insert or update
|
||||
data = {
|
||||
"destination_hash": destination_hash,
|
||||
"display_name": display_name,
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# upsert to database
|
||||
@@ -4143,14 +4217,16 @@ class ReticulumMeshChat:
|
||||
# upserts a custom destination display name to the database
|
||||
@staticmethod
|
||||
def db_upsert_favourite(
|
||||
destination_hash: str, display_name: str, aspect: str,
|
||||
destination_hash: str,
|
||||
display_name: str,
|
||||
aspect: str,
|
||||
):
|
||||
# prepare data to insert or update
|
||||
data = {
|
||||
"destination_hash": destination_hash,
|
||||
"display_name": display_name,
|
||||
"aspect": aspect,
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# upsert to database
|
||||
@@ -4167,8 +4243,8 @@ class ReticulumMeshChat:
|
||||
# prepare data to insert or update
|
||||
data = {
|
||||
"destination_hash": destination_hash,
|
||||
"last_read_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"last_read_at": datetime.now(UTC),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# upsert to database
|
||||
@@ -4397,7 +4473,11 @@ class ReticulumMeshChat:
|
||||
|
||||
# upsert announce to database
|
||||
self.db_upsert_announce(
|
||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
||||
announced_identity,
|
||||
destination_hash,
|
||||
aspect,
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
)
|
||||
|
||||
# find announce from database
|
||||
@@ -4448,7 +4528,11 @@ class ReticulumMeshChat:
|
||||
|
||||
# upsert announce to database
|
||||
self.db_upsert_announce(
|
||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
||||
announced_identity,
|
||||
destination_hash,
|
||||
aspect,
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
)
|
||||
|
||||
# find announce from database
|
||||
@@ -4498,7 +4582,11 @@ class ReticulumMeshChat:
|
||||
|
||||
# upsert announce to database
|
||||
self.db_upsert_announce(
|
||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
||||
announced_identity,
|
||||
destination_hash,
|
||||
aspect,
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
)
|
||||
|
||||
# find announce from database
|
||||
@@ -4626,7 +4714,11 @@ class ReticulumMeshChat:
|
||||
|
||||
# upsert announce to database
|
||||
self.db_upsert_announce(
|
||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
||||
announced_identity,
|
||||
destination_hash,
|
||||
aspect,
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
)
|
||||
|
||||
# find announce from database
|
||||
@@ -4676,7 +4768,9 @@ class ReticulumMeshChat:
|
||||
# if app data is available in database, it should be base64 encoded text that was announced
|
||||
# we will return the parsed lxmf display name as the conversation name
|
||||
if lxmf_announce is not None and lxmf_announce.app_data is not None:
|
||||
return ReticulumMeshChat.parse_lxmf_display_name(app_data_base64=lxmf_announce.app_data)
|
||||
return ReticulumMeshChat.parse_lxmf_display_name(
|
||||
app_data_base64=lxmf_announce.app_data,
|
||||
)
|
||||
|
||||
# announce did not have app data, so provide a fallback name
|
||||
return "Anonymous Peer"
|
||||
@@ -4684,14 +4778,15 @@ class ReticulumMeshChat:
|
||||
# reads the lxmf display name from the provided base64 app data
|
||||
@staticmethod
|
||||
def parse_lxmf_display_name(
|
||||
app_data_base64: str, default_value: str | None = "Anonymous Peer",
|
||||
app_data_base64: str,
|
||||
default_value: str | None = "Anonymous Peer",
|
||||
):
|
||||
try:
|
||||
app_data_bytes = base64.b64decode(app_data_base64)
|
||||
display_name = LXMF.display_name_from_app_data(app_data_bytes)
|
||||
if display_name is not None:
|
||||
return display_name
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return default_value
|
||||
@@ -4702,18 +4797,19 @@ class ReticulumMeshChat:
|
||||
try:
|
||||
app_data_bytes = base64.b64decode(app_data_base64)
|
||||
return LXMF.stamp_cost_from_app_data(app_data_bytes)
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# reads the nomadnetwork node display name from the provided base64 app data
|
||||
@staticmethod
|
||||
def parse_nomadnetwork_node_display_name(
|
||||
app_data_base64: str, default_value: str | None = "Anonymous Node",
|
||||
app_data_base64: str,
|
||||
default_value: str | None = "Anonymous Node",
|
||||
):
|
||||
try:
|
||||
app_data_bytes = base64.b64decode(app_data_base64)
|
||||
return app_data_bytes.decode("utf-8")
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return default_value
|
||||
|
||||
# parses lxmf propagation node app data
|
||||
@@ -4727,7 +4823,7 @@ class ReticulumMeshChat:
|
||||
"timebase": int(data[1]),
|
||||
"per_transfer_limit": int(data[3]),
|
||||
}
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# returns true if the conversation has messages newer than the last read at timestamp
|
||||
@@ -4760,10 +4856,12 @@ class ReticulumMeshChat:
|
||||
|
||||
# conversation is unread if last read at is before the latest incoming message creation date
|
||||
conversation_last_read_at = datetime.strptime(
|
||||
lxmf_conversation_read_state.last_read_at, "%Y-%m-%d %H:%M:%S.%f%z",
|
||||
lxmf_conversation_read_state.last_read_at,
|
||||
"%Y-%m-%d %H:%M:%S.%f%z",
|
||||
)
|
||||
conversation_latest_message_at = datetime.strptime(
|
||||
latest_incoming_lxmf_message.created_at, "%Y-%m-%d %H:%M:%S.%f%z",
|
||||
latest_incoming_lxmf_message.created_at,
|
||||
"%Y-%m-%d %H:%M:%S.%f%z",
|
||||
)
|
||||
return conversation_last_read_at < conversation_latest_message_at
|
||||
|
||||
@@ -4813,7 +4911,7 @@ class Config:
|
||||
data = {
|
||||
"key": key,
|
||||
"value": value,
|
||||
"updated_at": datetime.now(timezone.utc),
|
||||
"updated_at": datetime.now(UTC),
|
||||
}
|
||||
|
||||
# upsert to database
|
||||
@@ -4882,44 +4980,57 @@ class Config:
|
||||
last_announced_at = IntConfig("last_announced_at", None)
|
||||
theme = StringConfig("theme", "light")
|
||||
auto_resend_failed_messages_when_announce_received = BoolConfig(
|
||||
"auto_resend_failed_messages_when_announce_received", True,
|
||||
"auto_resend_failed_messages_when_announce_received",
|
||||
True,
|
||||
)
|
||||
allow_auto_resending_failed_messages_with_attachments = BoolConfig(
|
||||
"allow_auto_resending_failed_messages_with_attachments", False,
|
||||
"allow_auto_resending_failed_messages_with_attachments",
|
||||
False,
|
||||
)
|
||||
auto_send_failed_messages_to_propagation_node = BoolConfig(
|
||||
"auto_send_failed_messages_to_propagation_node", False,
|
||||
"auto_send_failed_messages_to_propagation_node",
|
||||
False,
|
||||
)
|
||||
show_suggested_community_interfaces = BoolConfig(
|
||||
"show_suggested_community_interfaces", True,
|
||||
"show_suggested_community_interfaces",
|
||||
True,
|
||||
)
|
||||
lxmf_delivery_transfer_limit_in_bytes = IntConfig(
|
||||
"lxmf_delivery_transfer_limit_in_bytes", 1000 * 1000 * 10,
|
||||
"lxmf_delivery_transfer_limit_in_bytes",
|
||||
1000 * 1000 * 10,
|
||||
) # 10MB
|
||||
lxmf_preferred_propagation_node_destination_hash = StringConfig(
|
||||
"lxmf_preferred_propagation_node_destination_hash", None,
|
||||
"lxmf_preferred_propagation_node_destination_hash",
|
||||
None,
|
||||
)
|
||||
lxmf_preferred_propagation_node_auto_sync_interval_seconds = IntConfig(
|
||||
"lxmf_preferred_propagation_node_auto_sync_interval_seconds", 0,
|
||||
"lxmf_preferred_propagation_node_auto_sync_interval_seconds",
|
||||
0,
|
||||
)
|
||||
lxmf_preferred_propagation_node_last_synced_at = IntConfig(
|
||||
"lxmf_preferred_propagation_node_last_synced_at", None,
|
||||
"lxmf_preferred_propagation_node_last_synced_at",
|
||||
None,
|
||||
)
|
||||
lxmf_local_propagation_node_enabled = BoolConfig(
|
||||
"lxmf_local_propagation_node_enabled", False,
|
||||
"lxmf_local_propagation_node_enabled",
|
||||
False,
|
||||
)
|
||||
lxmf_user_icon_name = StringConfig("lxmf_user_icon_name", None)
|
||||
lxmf_user_icon_foreground_colour = StringConfig(
|
||||
"lxmf_user_icon_foreground_colour", None,
|
||||
"lxmf_user_icon_foreground_colour",
|
||||
None,
|
||||
)
|
||||
lxmf_user_icon_background_colour = StringConfig(
|
||||
"lxmf_user_icon_background_colour", None,
|
||||
"lxmf_user_icon_background_colour",
|
||||
None,
|
||||
)
|
||||
lxmf_inbound_stamp_cost = IntConfig(
|
||||
"lxmf_inbound_stamp_cost", 8,
|
||||
"lxmf_inbound_stamp_cost",
|
||||
8,
|
||||
) # for direct delivery messages
|
||||
lxmf_propagation_node_stamp_cost = IntConfig(
|
||||
"lxmf_propagation_node_stamp_cost", 16,
|
||||
"lxmf_propagation_node_stamp_cost",
|
||||
16,
|
||||
) # for propagation node messages
|
||||
|
||||
|
||||
@@ -4944,8 +5055,8 @@ class NomadnetDownloader:
|
||||
self.path = path
|
||||
self.data = data
|
||||
self.timeout = timeout
|
||||
self.on_download_success = on_download_success
|
||||
self.on_download_failure = on_download_failure
|
||||
self._download_success_callback = on_download_success
|
||||
self._download_failure_callback = on_download_failure
|
||||
self.on_progress_update = on_progress_update
|
||||
self.request_receipt = None
|
||||
self.is_cancelled = False
|
||||
@@ -4959,22 +5070,24 @@ class NomadnetDownloader:
|
||||
if self.request_receipt is not None:
|
||||
try:
|
||||
self.request_receipt.cancel()
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# clean up the link if we created it
|
||||
if self.link is not None:
|
||||
try:
|
||||
self.link.teardown()
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# notify that download was cancelled
|
||||
self.on_download_failure("cancelled")
|
||||
self._download_failure_callback("cancelled")
|
||||
|
||||
# setup link to destination and request download
|
||||
async def download(
|
||||
self, path_lookup_timeout: int = 15, link_establishment_timeout: int = 15,
|
||||
self,
|
||||
path_lookup_timeout: int = 15,
|
||||
link_establishment_timeout: int = 15,
|
||||
):
|
||||
# check if cancelled before starting
|
||||
if self.is_cancelled:
|
||||
@@ -5008,7 +5121,7 @@ class NomadnetDownloader:
|
||||
|
||||
# if we still don't have a path, we can't establish a link, so bail out
|
||||
if not RNS.Transport.has_path(self.destination_hash):
|
||||
self.on_download_failure("Could not find path to destination.")
|
||||
self._download_failure_callback("Could not find path to destination.")
|
||||
return
|
||||
|
||||
# check if cancelled before establishing link
|
||||
@@ -5044,7 +5157,7 @@ class NomadnetDownloader:
|
||||
|
||||
# if we still haven't established a link, bail out
|
||||
if link.status is not RNS.Link.ACTIVE:
|
||||
self.on_download_failure("Could not establish link to destination.")
|
||||
self._download_failure_callback("Could not establish link to destination.")
|
||||
|
||||
# link to destination was established, we should now request the download
|
||||
def link_established(self, link):
|
||||
@@ -5067,11 +5180,11 @@ class NomadnetDownloader:
|
||||
|
||||
# handle successful download
|
||||
def on_response(self, request_receipt: RNS.RequestReceipt):
|
||||
self.on_download_success(request_receipt)
|
||||
self._download_success_callback(request_receipt)
|
||||
|
||||
# handle failure
|
||||
def on_failed(self, request_receipt=None):
|
||||
self.on_download_failure("request_failed")
|
||||
self._download_failure_callback("request_failed")
|
||||
|
||||
# handle download progress
|
||||
def on_progress(self, request_receipt):
|
||||
@@ -5175,7 +5288,7 @@ class NomadnetFileDownloader(NomadnetDownloader):
|
||||
file_name: str = response[0]
|
||||
file_data: bytes = response[1]
|
||||
self.on_file_download_success(file_name, file_data)
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
self.on_download_failure("unsupported_response")
|
||||
|
||||
# page download failed, send error to provided callback
|
||||
@@ -5241,7 +5354,8 @@ def main():
|
||||
help="Throws an exception. Used for testing the electron error dialog",
|
||||
)
|
||||
parser.add_argument(
|
||||
"args", nargs=argparse.REMAINDER,
|
||||
"args",
|
||||
nargs=argparse.REMAINDER,
|
||||
) # allow unknown command line args
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -5313,7 +5427,9 @@ def main():
|
||||
|
||||
# init app
|
||||
reticulum_meshchat = ReticulumMeshChat(
|
||||
identity, args.storage_dir, args.reticulum_config_dir,
|
||||
identity,
|
||||
args.storage_dir,
|
||||
args.reticulum_config_dir,
|
||||
)
|
||||
reticulum_meshchat.run(args.host, args.port, launch_browser=args.headless is False)
|
||||
|
||||
1
meshchatx/src/backend/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Backend utilities shared by the Reticulum MeshChatX CLI."""
|
||||
@@ -7,7 +7,11 @@ class AnnounceHandler:
|
||||
|
||||
# we will just pass the received announce back to the provided callback
|
||||
def received_announce(
|
||||
self, destination_hash, announced_identity, app_data, announce_packet_hash,
|
||||
self,
|
||||
destination_hash,
|
||||
announced_identity,
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
):
|
||||
try:
|
||||
# handle received announce
|
||||
@@ -18,6 +22,6 @@ class AnnounceHandler:
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
)
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# ignore failure to handle received announce
|
||||
pass
|
||||
@@ -146,7 +146,9 @@ class AudioCallManager:
|
||||
|
||||
# attempts to initiate a call to the provided destination and returns the link hash on success
|
||||
async def initiate(
|
||||
self, destination_hash: bytes, timeout_seconds: int = 15,
|
||||
self,
|
||||
destination_hash: bytes,
|
||||
timeout_seconds: int = 15,
|
||||
) -> AudioCall:
|
||||
# determine when to timeout
|
||||
timeout_after_seconds = time.time() + timeout_seconds
|
||||
@@ -240,7 +242,7 @@ class AudioCallReceiver:
|
||||
)
|
||||
link.teardown()
|
||||
return
|
||||
except Exception: # noqa: E722
|
||||
except Exception:
|
||||
# if we can't get identity yet, we'll check later
|
||||
pass
|
||||
|
||||
@@ -71,7 +71,8 @@ class WebsocketClientInterface(Interface):
|
||||
self.websocket.send(data)
|
||||
except Exception as e:
|
||||
RNS.log(
|
||||
f"Exception occurred while transmitting via {self!s}", RNS.LOG_ERROR,
|
||||
f"Exception occurred while transmitting via {self!s}",
|
||||
RNS.LOG_ERROR,
|
||||
)
|
||||
RNS.log(f"The contained exception was: {e!s}", RNS.LOG_ERROR)
|
||||
return
|
||||
@@ -93,7 +94,9 @@ class WebsocketClientInterface(Interface):
|
||||
try:
|
||||
RNS.log(f"Connecting to Websocket for {self!s}...", RNS.LOG_DEBUG)
|
||||
self.websocket = connect(
|
||||
f"{self.target_url}", max_size=None, compression=None,
|
||||
f"{self.target_url}",
|
||||
max_size=None,
|
||||
compression=None,
|
||||
)
|
||||
RNS.log(f"Connected to Websocket for {self!s}", RNS.LOG_DEBUG)
|
||||
self.read_loop()
|
||||
@@ -3,9 +3,8 @@ import time
|
||||
|
||||
import RNS
|
||||
from RNS.Interfaces.Interface import Interface
|
||||
from websockets.sync.server import Server, ServerConnection, serve
|
||||
|
||||
from src.backend.interfaces.WebsocketClientInterface import WebsocketClientInterface
|
||||
from websockets.sync.server import Server, ServerConnection, serve
|
||||
|
||||
|
||||
class WebsocketServerInterface(Interface):
|
||||
1
meshchatx/src/backend/interfaces/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Shared transport interfaces for MeshChatX."""
|
||||
|
Before Width: | Height: | Size: 109 KiB After Width: | Height: | Size: 109 KiB |
|
Before Width: | Height: | Size: 80 KiB After Width: | Height: | Size: 80 KiB |
|
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
|
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 8.2 KiB After Width: | Height: | Size: 8.2 KiB |
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |