Compare commits
44 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 1f15a67b8e | |||
|
|
675975bafc | ||
| 62d6cce914 | |||
| ab6099b0fb | |||
| d97676ad27 | |||
| 4200e43618 | |||
| c5ae53bf55 | |||
| bf8c22c31a | |||
| 9a9022ffb0 | |||
| 0443734ee3 | |||
| 6efac94f58 | |||
| 9e1a8ce180 | |||
| f2ab1ad067 | |||
| 365531be9b | |||
| 96f4fc8735 | |||
| d69a3e8522 | |||
| c95d2fd71c | |||
|
a74a6869ea
|
|||
| d8419990b1 | |||
|
085385a182
|
|||
|
f8b0dd18c5
|
|||
|
3231afb84d
|
|||
|
3848613a41
|
|||
|
284517bdfa
|
|||
|
5fc13dc61a
|
|||
|
f989295773
|
|||
|
|
d06ede8c5e | ||
|
a0047ea8fb
|
|||
|
c98131f76b
|
|||
|
9b4b8fdfeb
|
|||
|
48a0d8697e
|
|||
|
5627ae1640
|
|||
|
94d91c4934
|
|||
|
ac839df357
|
|||
|
cfad1ddc5f
|
|||
|
398ab570df
|
|||
|
50bc2cbfc8
|
|||
|
fe3a01c3c6
|
|||
|
0b0a39ea86
|
|||
|
2e001006c9
|
|||
|
0beaaaf4b1
|
|||
|
84f887df90
|
|||
|
80cf812e54
|
|||
|
19854e59da
|
@@ -3,24 +3,36 @@ README.md
|
|||||||
LICENSE
|
LICENSE
|
||||||
donate.md
|
donate.md
|
||||||
screenshots/
|
screenshots/
|
||||||
|
docs/
|
||||||
|
|
||||||
# Development files
|
# Development files
|
||||||
.github/
|
.github/
|
||||||
electron/
|
electron/
|
||||||
|
scripts/
|
||||||
|
Makefile
|
||||||
|
|
||||||
# Build artifacts and cache
|
# Build artifacts and cache
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
public/
|
public/
|
||||||
|
meshchatx/public/
|
||||||
node_modules/
|
node_modules/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.pyc
|
*.py[cod]
|
||||||
*.pyo
|
*$py.class
|
||||||
*.pyd
|
*.so
|
||||||
.Python
|
.Python
|
||||||
|
*.egg-info/
|
||||||
|
*.egg
|
||||||
|
python-dist/
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
env/
|
env/
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
.venv/
|
||||||
|
|
||||||
# IDE and editor files
|
# IDE and editor files
|
||||||
.vscode/
|
.vscode/
|
||||||
@@ -47,9 +59,19 @@ Dockerfile*
|
|||||||
docker-compose*.yml
|
docker-compose*.yml
|
||||||
.dockerignore
|
.dockerignore
|
||||||
|
|
||||||
|
# Local storage and runtime data
|
||||||
|
storage/
|
||||||
|
testing/
|
||||||
|
telemetry_test_lxmf/
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
*.log
|
*.log
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
*.tmp
|
*.tmp
|
||||||
*.temp
|
*.temp
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
334
.github/workflows/build.yml
vendored
@@ -27,96 +27,14 @@ on:
|
|||||||
default: 'true'
|
default: 'true'
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_windows:
|
build_frontend:
|
||||||
runs-on: windows-latest
|
|
||||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_windows == 'true')
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Clone Repo
|
|
||||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
|
||||||
|
|
||||||
- name: Install NodeJS
|
|
||||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
|
||||||
with:
|
|
||||||
node-version: 22
|
|
||||||
|
|
||||||
- name: Install Python
|
|
||||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
|
||||||
with:
|
|
||||||
python-version: "3.12"
|
|
||||||
|
|
||||||
- name: Install Python Deps
|
|
||||||
run: |
|
|
||||||
python -m venv venv
|
|
||||||
venv\Scripts\pip install --upgrade pip
|
|
||||||
venv\Scripts\pip install -r requirements.txt
|
|
||||||
|
|
||||||
- name: Install NodeJS Deps
|
|
||||||
run: npm install
|
|
||||||
|
|
||||||
- name: Build Electron App
|
|
||||||
run: npm run dist
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
id: create_release
|
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
|
||||||
with:
|
|
||||||
draft: true
|
|
||||||
allowUpdates: true
|
|
||||||
replacesArtifacts: true
|
|
||||||
omitDraftDuringUpdate: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
|
||||||
|
|
||||||
build_mac:
|
|
||||||
runs-on: macos-13
|
|
||||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_mac == 'true')
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Clone Repo
|
|
||||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
|
||||||
|
|
||||||
- name: Install NodeJS
|
|
||||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Install Python
|
|
||||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
|
||||||
with:
|
|
||||||
python-version: "3.11"
|
|
||||||
|
|
||||||
- name: Install Python Deps
|
|
||||||
run: |
|
|
||||||
python3 -m venv venv
|
|
||||||
venv/bin/pip install --upgrade pip
|
|
||||||
venv/bin/pip install -r requirements.txt
|
|
||||||
|
|
||||||
- name: Install NodeJS Deps
|
|
||||||
run: npm install
|
|
||||||
|
|
||||||
- name: Build Electron App
|
|
||||||
run: npm run dist
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
id: create_release
|
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
|
||||||
with:
|
|
||||||
draft: true
|
|
||||||
allowUpdates: true
|
|
||||||
replacesArtifacts: true
|
|
||||||
omitDraftDuringUpdate: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
artifacts: "dist/*-mac.dmg"
|
|
||||||
|
|
||||||
build_linux:
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_linux == 'true')
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Clone Repo
|
- name: Clone Repo
|
||||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||||
@@ -131,31 +49,245 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: Sync versions
|
||||||
|
run: python scripts/sync_version.py
|
||||||
|
|
||||||
|
- name: Install NodeJS Deps
|
||||||
|
run: npm install
|
||||||
|
|
||||||
|
- name: Build Frontend
|
||||||
|
run: npm run build-frontend
|
||||||
|
|
||||||
|
- name: Upload frontend artifact
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||||
|
with:
|
||||||
|
name: frontend-build
|
||||||
|
path: meshchatx/public
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
build_desktop:
|
||||||
|
name: Build Desktop (${{ matrix.name }})
|
||||||
|
needs: build_frontend
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- name: windows
|
||||||
|
os: windows-latest
|
||||||
|
node: 22
|
||||||
|
python: "3.13"
|
||||||
|
release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
||||||
|
build_input: build_windows
|
||||||
|
dist_script: dist-prebuilt
|
||||||
|
variant: standard
|
||||||
|
electron_version: "39.2.4"
|
||||||
|
- name: mac
|
||||||
|
os: macos-14
|
||||||
|
node: 22
|
||||||
|
python: "3.13"
|
||||||
|
release_artifacts: "dist/*-mac-*.dmg"
|
||||||
|
build_input: build_mac
|
||||||
|
dist_script: dist:mac-universal
|
||||||
|
variant: standard
|
||||||
|
electron_version: "39.2.4"
|
||||||
|
- name: linux
|
||||||
|
os: ubuntu-latest
|
||||||
|
node: 22
|
||||||
|
python: "3.13"
|
||||||
|
release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl"
|
||||||
|
build_input: build_linux
|
||||||
|
dist_script: dist-prebuilt
|
||||||
|
variant: standard
|
||||||
|
electron_version: "39.2.4"
|
||||||
|
- name: windows-legacy
|
||||||
|
os: windows-latest
|
||||||
|
node: 18
|
||||||
|
python: "3.11"
|
||||||
|
release_artifacts: "dist/*-win-installer*.exe,dist/*-win-portable*.exe"
|
||||||
|
build_input: build_windows
|
||||||
|
dist_script: dist-prebuilt
|
||||||
|
variant: legacy
|
||||||
|
electron_version: "30.0.8"
|
||||||
|
- name: linux-legacy
|
||||||
|
os: ubuntu-latest
|
||||||
|
node: 18
|
||||||
|
python: "3.11"
|
||||||
|
release_artifacts: "dist/*-linux*.AppImage,dist/*-linux*.deb,python-dist/*.whl"
|
||||||
|
build_input: build_linux
|
||||||
|
dist_script: dist-prebuilt
|
||||||
|
variant: legacy
|
||||||
|
electron_version: "30.0.8"
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Clone Repo
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||||
|
|
||||||
|
- name: Set legacy Electron version
|
||||||
|
if: |
|
||||||
|
matrix.variant == 'legacy' &&
|
||||||
|
(github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
node -e "const fs=require('fs');const pkg=require('./package.json');pkg.devDependencies.electron='${{ matrix.electron_version }}';fs.writeFileSync('package.json', JSON.stringify(pkg,null,2));"
|
||||||
|
if [ -f package-lock.json ]; then rm package-lock.json; fi
|
||||||
|
|
||||||
|
- name: Install NodeJS
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node }}
|
||||||
|
|
||||||
|
- name: Install Python
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python -m pip install --upgrade pip poetry
|
||||||
|
|
||||||
|
- name: Sync versions
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python scripts/sync_version.py
|
||||||
|
|
||||||
|
- name: Install Python Deps
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python -m poetry install
|
||||||
|
|
||||||
|
- name: Install NodeJS Deps
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: npm install
|
||||||
|
|
||||||
|
- name: Prepare frontend directory
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: python scripts/prepare_frontend_dir.py
|
||||||
|
|
||||||
|
- name: Download frontend artifact
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||||
|
with:
|
||||||
|
name: frontend-build
|
||||||
|
path: meshchatx/public
|
||||||
|
|
||||||
- name: Install patchelf
|
- name: Install patchelf
|
||||||
|
if: |
|
||||||
|
startsWith(matrix.name, 'linux') &&
|
||||||
|
(github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||||
run: sudo apt-get update && sudo apt-get install -y patchelf
|
run: sudo apt-get update && sudo apt-get install -y patchelf
|
||||||
|
|
||||||
- name: Install Python Deps
|
- name: Build Python wheel
|
||||||
|
if: |
|
||||||
|
startsWith(matrix.name, 'linux') &&
|
||||||
|
(github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||||
run: |
|
run: |
|
||||||
python3 -m venv venv
|
python -m poetry build -f wheel
|
||||||
venv/bin/pip install --upgrade pip
|
mkdir -p python-dist
|
||||||
venv/bin/pip install -r requirements.txt
|
mv dist/*.whl python-dist/
|
||||||
|
rm -rf dist
|
||||||
|
|
||||||
- name: Install NodeJS Deps
|
- name: Build Electron App (Universal)
|
||||||
run: npm install
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
run: npm run ${{ matrix.dist_script }}
|
||||||
|
|
||||||
- name: Build Electron App
|
- name: Rename artifacts for legacy build
|
||||||
run: npm run dist
|
if: |
|
||||||
|
matrix.variant == 'legacy' &&
|
||||||
|
(github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||||
|
run: ./scripts/rename_legacy_artifacts.sh
|
||||||
|
|
||||||
|
- name: Upload build artifacts
|
||||||
|
if: |
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-${{ matrix.name }}
|
||||||
|
path: |
|
||||||
|
dist/*-win-installer*.exe
|
||||||
|
dist/*-win-portable*.exe
|
||||||
|
dist/*-mac-*.dmg
|
||||||
|
dist/*-linux*.AppImage
|
||||||
|
dist/*-linux*.deb
|
||||||
|
python-dist/*.whl
|
||||||
|
if-no-files-found: ignore
|
||||||
|
|
||||||
|
create_release:
|
||||||
|
name: Create Release
|
||||||
|
needs: build_desktop
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Download all artifacts
|
||||||
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||||
|
with:
|
||||||
|
path: artifacts
|
||||||
|
|
||||||
|
- name: Display structure of downloaded files
|
||||||
|
run: ls -R artifacts
|
||||||
|
|
||||||
|
- name: Prepare release assets
|
||||||
|
run: |
|
||||||
|
mkdir -p release-assets
|
||||||
|
find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \;
|
||||||
|
ls -lh release-assets/
|
||||||
|
|
||||||
|
- name: Generate SHA256 checksums
|
||||||
|
run: |
|
||||||
|
cd release-assets
|
||||||
|
echo "## SHA256 Checksums" > release-body.md
|
||||||
|
echo "" >> release-body.md
|
||||||
|
|
||||||
|
for file in *.exe *.dmg *.AppImage *.deb *.whl; do
|
||||||
|
if [ -f "$file" ]; then
|
||||||
|
sha256sum "$file" | tee "${file}.sha256"
|
||||||
|
echo "\`$(cat "${file}.sha256")\`" >> release-body.md
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "" >> release-body.md
|
||||||
|
echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md
|
||||||
|
|
||||||
|
cat release-body.md
|
||||||
|
echo ""
|
||||||
|
echo "Generated .sha256 files:"
|
||||||
|
ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found"
|
||||||
|
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
id: create_release
|
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
allowUpdates: true
|
artifacts: "release-assets/*"
|
||||||
replacesArtifacts: true
|
bodyFile: "release-assets/release-body.md"
|
||||||
omitDraftDuringUpdate: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
artifacts: "dist/*-linux.AppImage,dist/*-linux.deb"
|
|
||||||
|
|
||||||
build_docker:
|
build_docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
22
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: 'Dependency review'
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ "master" ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout repository'
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4
|
||||||
|
with:
|
||||||
|
comment-summary-in-pr: always
|
||||||
54
.gitignore
vendored
@@ -1,13 +1,57 @@
|
|||||||
|
# IDE and editor files
|
||||||
.idea
|
.idea
|
||||||
node_modules
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
# build files
|
# Dependencies
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
*.egg-info/
|
||||||
|
dist/
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
venv/
|
||||||
|
env/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
.venv/
|
||||||
|
|
||||||
|
# Build files
|
||||||
/build/
|
/build/
|
||||||
/dist/
|
/dist/
|
||||||
/public/
|
/meshchatx/public/
|
||||||
|
public/
|
||||||
/electron/build/exe/
|
/electron/build/exe/
|
||||||
|
python-dist/
|
||||||
|
|
||||||
# local storage
|
# Local storage and runtime data
|
||||||
storage/
|
storage/
|
||||||
|
testing/
|
||||||
|
telemetry_test_lxmf/
|
||||||
|
|
||||||
*.pyc
|
# Logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
.DS_Store
|
||||||
|
.DS_Store?
|
||||||
|
._*
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
ehthumbs.db
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
15
Dockerfile
@@ -10,9 +10,8 @@ FROM node:${NODE_VERSION}-alpine@${NODE_ALPINE_SHA256} AS build-frontend
|
|||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
|
|
||||||
# Copy required source files
|
# Copy required source files
|
||||||
COPY *.json .
|
COPY package*.json vite.config.js ./
|
||||||
COPY *.js .
|
COPY meshchatx ./meshchatx
|
||||||
COPY src/frontend ./src/frontend
|
|
||||||
|
|
||||||
# Install NodeJS deps, exluding electron
|
# Install NodeJS deps, exluding electron
|
||||||
RUN npm install --omit=dev && \
|
RUN npm install --omit=dev && \
|
||||||
@@ -34,12 +33,10 @@ RUN apk add --no-cache --virtual .build-deps \
|
|||||||
apk del .build-deps
|
apk del .build-deps
|
||||||
|
|
||||||
# Copy prebuilt frontend
|
# Copy prebuilt frontend
|
||||||
COPY --from=build-frontend /src/public public
|
COPY --from=build-frontend /src/meshchatx/public meshchatx/public
|
||||||
|
|
||||||
# Copy other required source files
|
# Copy other required source files
|
||||||
COPY *.py .
|
COPY meshchatx ./meshchatx
|
||||||
COPY src/__init__.py ./src/__init__.py
|
COPY pyproject.toml poetry.lock ./
|
||||||
COPY src/backend ./src/backend
|
|
||||||
COPY *.json .
|
|
||||||
|
|
||||||
CMD ["python", "meshchat.py", "--host=0.0.0.0", "--reticulum-config-dir=/config/.reticulum", "--storage-dir=/config/.meshchat", "--headless"]
|
CMD ["python", "-m", "meshchatx.meshchat", "--host=0.0.0.0", "--reticulum-config-dir=/config/.reticulum", "--storage-dir=/config/.meshchat", "--headless"]
|
||||||
|
|||||||
68
Makefile
@@ -1,26 +1,40 @@
|
|||||||
.PHONY: install run clean build build-appimage build-exe dist
|
.PHONY: install run develop clean build build-appimage build-exe dist sync-version wheel node_modules python build-docker run-docker electron-legacy build-appimage-legacy build-exe-legacy
|
||||||
|
|
||||||
VENV = venv
|
PYTHON ?= python
|
||||||
PYTHON = $(VENV)/bin/python
|
POETRY = $(PYTHON) -m poetry
|
||||||
PIP = $(VENV)/bin/pip
|
|
||||||
NPM = npm
|
NPM = npm
|
||||||
|
LEGACY_ELECTRON_VERSION ?= 30.0.8
|
||||||
|
|
||||||
install: $(VENV) node_modules
|
DOCKER_COMPOSE_CMD ?= docker compose
|
||||||
|
DOCKER_COMPOSE_FILE ?= docker-compose.yml
|
||||||
|
DOCKER_IMAGE ?= reticulum-meshchatx:local
|
||||||
|
DOCKER_BUILDER ?= meshchatx-builder
|
||||||
|
DOCKER_PLATFORMS ?= linux/amd64
|
||||||
|
DOCKER_BUILD_FLAGS ?= --load
|
||||||
|
DOCKER_BUILD_ARGS ?=
|
||||||
|
DOCKER_CONTEXT ?= .
|
||||||
|
DOCKERFILE ?= Dockerfile
|
||||||
|
|
||||||
$(VENV):
|
install: sync-version node_modules python
|
||||||
python3 -m venv $(VENV)
|
|
||||||
$(PIP) install --upgrade pip
|
|
||||||
$(PIP) install -r requirements.txt
|
|
||||||
|
|
||||||
node_modules:
|
node_modules:
|
||||||
$(NPM) install
|
$(NPM) install
|
||||||
|
|
||||||
|
python:
|
||||||
|
$(POETRY) install
|
||||||
|
|
||||||
run: install
|
run: install
|
||||||
$(PYTHON) meshchat.py
|
$(POETRY) run meshchat
|
||||||
|
|
||||||
|
develop: run
|
||||||
|
|
||||||
build: install
|
build: install
|
||||||
$(NPM) run build
|
$(NPM) run build
|
||||||
|
|
||||||
|
wheel: install
|
||||||
|
$(POETRY) build -f wheel
|
||||||
|
$(PYTHON) scripts/move_wheels.py
|
||||||
|
|
||||||
build-appimage: build
|
build-appimage: build
|
||||||
$(NPM) run electron-postinstall
|
$(NPM) run electron-postinstall
|
||||||
$(NPM) run dist -- --linux AppImage
|
$(NPM) run dist -- --linux AppImage
|
||||||
@@ -31,11 +45,43 @@ build-exe: build
|
|||||||
|
|
||||||
dist: build-appimage
|
dist: build-appimage
|
||||||
|
|
||||||
|
electron-legacy:
|
||||||
|
$(NPM) install --no-save electron@$(LEGACY_ELECTRON_VERSION)
|
||||||
|
|
||||||
|
# Legacy targets intended for manual/local builds; CI uses workflow jobs.
|
||||||
|
build-appimage-legacy: build electron-legacy
|
||||||
|
$(NPM) run electron-postinstall
|
||||||
|
$(NPM) run dist -- --linux AppImage
|
||||||
|
./scripts/rename_legacy_artifacts.sh
|
||||||
|
|
||||||
|
build-exe-legacy: build electron-legacy
|
||||||
|
$(NPM) run electron-postinstall
|
||||||
|
$(NPM) run dist -- --win portable
|
||||||
|
./scripts/rename_legacy_artifacts.sh
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(VENV)
|
|
||||||
rm -rf node_modules
|
rm -rf node_modules
|
||||||
rm -rf build
|
rm -rf build
|
||||||
rm -rf dist
|
rm -rf dist
|
||||||
|
rm -rf python-dist
|
||||||
|
rm -rf meshchatx/public
|
||||||
|
|
||||||
|
sync-version:
|
||||||
|
$(PYTHON) scripts/sync_version.py
|
||||||
|
|
||||||
|
build-docker:
|
||||||
|
@if ! docker buildx inspect $(DOCKER_BUILDER) >/dev/null 2>&1; then \
|
||||||
|
docker buildx create --name $(DOCKER_BUILDER) --use >/dev/null; \
|
||||||
|
else \
|
||||||
|
docker buildx use $(DOCKER_BUILDER); \
|
||||||
|
fi
|
||||||
|
docker buildx build --builder $(DOCKER_BUILDER) --platform $(DOCKER_PLATFORMS) \
|
||||||
|
$(DOCKER_BUILD_FLAGS) \
|
||||||
|
-t $(DOCKER_IMAGE) \
|
||||||
|
$(DOCKER_BUILD_ARGS) \
|
||||||
|
-f $(DOCKERFILE) \
|
||||||
|
$(DOCKER_CONTEXT)
|
||||||
|
|
||||||
|
run-docker:
|
||||||
|
MESHCHAT_IMAGE="$(DOCKER_IMAGE)" \
|
||||||
|
$(DOCKER_COMPOSE_CMD) -f $(DOCKER_COMPOSE_FILE) up --remove-orphans --pull never reticulum-meshchatx
|
||||||
|
|||||||
78
README.md
@@ -8,13 +8,22 @@ A heavily customized fork of [Reticulum MeshChat](https://github.com/liamcottle/
|
|||||||
- [x] Ability to set inbound and propagation node stamps.
|
- [x] Ability to set inbound and propagation node stamps.
|
||||||
- [x] Better config parsing.
|
- [x] Better config parsing.
|
||||||
- [x] Cancel page fetching or file downloads
|
- [x] Cancel page fetching or file downloads
|
||||||
- [x] Block recieving messages from users.
|
- [x] Block receiving messages from users.
|
||||||
- [ ] Spam filter (based on keywords)
|
- [ ] Spam filter (based on keywords)
|
||||||
- [ ] Multi-identity support.
|
- [ ] Multi-identity support.
|
||||||
|
- [ ] Multi-language support
|
||||||
|
- [ ] Offline Reticulum documentation tool
|
||||||
|
- [ ] More tools (translate, LoRa calculator, LXMFy bots, etc.)
|
||||||
|
- [x] Codebase reorganization and cleanup.
|
||||||
|
- [ ] Tests and proper CI/CD pipeline.
|
||||||
|
- [ ] RNS hot reload
|
||||||
|
- [ ] Backup/Import identities, messages and interfaces.
|
||||||
|
- [ ] Full LXST support.
|
||||||
|
- [x] Poetry for packaging and dependency management.
|
||||||
- [x] More stats on about page.
|
- [x] More stats on about page.
|
||||||
- [x] Actions are pinned to full-length SHA hashes.
|
- [x] Actions are pinned to full-length SHA hashes.
|
||||||
- [x] Docker images are smaller and use SHA256 hashes for the images.
|
- [x] Docker images are smaller and use SHA256 hashes for the images.
|
||||||
- [x] Electron improvements.
|
- [x] Electron improvements (ASAR and security).
|
||||||
- [x] Latest updates for NPM and Python dependencies (bleeding edge)
|
- [x] Latest updates for NPM and Python dependencies (bleeding edge)
|
||||||
- [x] Numerous Ruff, Deepsource, CodeQL Advanced and Bearer Linting/SAST fixes.
|
- [x] Numerous Ruff, Deepsource, CodeQL Advanced and Bearer Linting/SAST fixes.
|
||||||
- [x] Some performance improvements.
|
- [x] Some performance improvements.
|
||||||
@@ -26,21 +35,76 @@ Check [releases](https://github.com/Sudo-Ivan/reticulum-meshchatX/releases) for
|
|||||||
## Building
|
## Building
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make install
|
make install # installs Python deps via Poetry and Node deps via npm
|
||||||
make build
|
make build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
You can run `make run` or `make develop` (a thin alias) to start the backend + frontend loop locally through `poetry run meshchat`.
|
||||||
|
|
||||||
|
### Python packaging
|
||||||
|
|
||||||
|
The Python build is driven entirely by Poetry now. Run `python3 scripts/sync_version.py` or `make sync-version` before packaging so `pyproject.toml` and `src/version.py` match `package.json`. After that:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m poetry install
|
||||||
|
make wheel # produces a wheel in python-dist/ that bundles the public assets
|
||||||
|
```
|
||||||
|
|
||||||
|
The wheel includes the frontend `public/` assets, `logo/`, and the CLI entry point, and `python-dist/` keeps the artifact separate from the Electron `dist/` output.
|
||||||
|
|
||||||
### Building in Docker
|
### Building in Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make docker-build
|
make build-docker
|
||||||
```
|
```
|
||||||
|
|
||||||
The build will be in the `dist` directory.
|
`build-docker` creates `reticulum-meshchatx:local` (or `$(DOCKER_IMAGE)` if you override it) via `docker buildx`. Set `DOCKER_PLATFORMS` to `linux/amd64,linux/arm64` when you need multi-arch images, and adjust `DOCKER_BUILD_FLAGS`/`DOCKER_BUILD_ARGS` to control `--load`/`--push`.
|
||||||
|
|
||||||
## Development
|
### Running with Docker Compose
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make develop
|
make run-docker
|
||||||
```
|
```
|
||||||
|
|
||||||
|
`run-docker` feeds the locally-built image into `docker compose -f docker-compose.yml up --remove-orphans --pull never reticulum-meshchatx`. The compose file uses the `MESHCHAT_IMAGE` env var so you can override the target image without editing the YAML (the default still points at `ghcr.io/sudo-ivan/reticulum-meshchatx:latest`). Use `docker compose down` or `Ctrl+C` to stop the container.
|
||||||
|
|
||||||
|
The Electron build artifacts will still live under `dist/` for releases.
|
||||||
|
|
||||||
|
## Python packaging
|
||||||
|
|
||||||
|
The backend uses Poetry with `pyproject.toml` for dependency management and packaging. Before building, run `python3 scripts/sync_version.py` (or `make sync-version`) to ensure the generated `src/version.py` reflects the version from `package.json` that the Electron artifacts use. This keeps the CLI release metadata, wheel packages, and other bundles aligned.
|
||||||
|
|
||||||
|
### Build artifact locations
|
||||||
|
|
||||||
|
Both `poetry build` and `python -m build` generate wheels inside the default `dist/` directory. The `make wheel` shortcut wraps `poetry build -f wheel` and then runs `python scripts/move_wheels.py` to relocate the generated `.whl` files into `python-dist/` (the layout expected by `scripts/test_wheel.sh` and the release automation). Use `make wheel` if you need the artifacts in `python-dist/`; `poetry build` or `python -m build` alone will leave them in `dist/`.
|
||||||
|
|
||||||
|
### Building with Poetry
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
poetry install
|
||||||
|
|
||||||
|
# Build the package (wheels land in dist/)
|
||||||
|
poetry build
|
||||||
|
|
||||||
|
# Install locally for testing (consumes dist/)
|
||||||
|
pip install dist/*.whl
|
||||||
|
```
|
||||||
|
|
||||||
|
### Building with pip (alternative)
|
||||||
|
|
||||||
|
If you prefer pip, you can build/install directly:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build the wheel
|
||||||
|
pip install build
|
||||||
|
python -m build
|
||||||
|
|
||||||
|
# Install locally
|
||||||
|
pip install .
|
||||||
|
```
|
||||||
|
|
||||||
|
### cx_Freeze (for AppImage/NSIS)
|
||||||
|
|
||||||
|
The `cx_setup.py` script uses cx_Freeze for creating standalone executables (AppImage for Linux, NSIS for Windows). This is separate from the Poetry/pip packaging workflow.
|
||||||
|
|
||||||
|
|||||||
47
cx_setup.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from cx_Freeze import Executable, setup
|
||||||
|
|
||||||
|
from meshchatx.src.version import __version__
|
||||||
|
|
||||||
|
ROOT = Path(__file__).resolve().parent
|
||||||
|
PUBLIC_DIR = ROOT / "meshchatx" / "public"
|
||||||
|
|
||||||
|
include_files = [
|
||||||
|
(str(PUBLIC_DIR), "public"),
|
||||||
|
("logo", "logo"),
|
||||||
|
]
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="ReticulumMeshChatX",
|
||||||
|
version=__version__,
|
||||||
|
description="A simple mesh network communications app powered by the Reticulum Network Stack",
|
||||||
|
executables=[
|
||||||
|
Executable(
|
||||||
|
script="meshchatx/meshchat.py",
|
||||||
|
base=None,
|
||||||
|
target_name="ReticulumMeshChatX",
|
||||||
|
shortcut_name="ReticulumMeshChatX",
|
||||||
|
shortcut_dir="ProgramMenuFolder",
|
||||||
|
icon="logo/icon.ico",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"build_exe": {
|
||||||
|
"packages": [
|
||||||
|
"RNS",
|
||||||
|
"RNS.Interfaces",
|
||||||
|
"LXMF",
|
||||||
|
],
|
||||||
|
"include_files": include_files,
|
||||||
|
"excludes": [
|
||||||
|
"PIL",
|
||||||
|
],
|
||||||
|
"optimize": 2,
|
||||||
|
"build_exe": "build/exe",
|
||||||
|
"replace_paths": [
|
||||||
|
("*", ""),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
services:
|
services:
|
||||||
reticulum-meshchatx:
|
reticulum-meshchatx:
|
||||||
container_name: reticulum-meshchatx
|
container_name: reticulum-meshchatx
|
||||||
image: ghcr.io/sudo-ivan/reticulum-meshchatx:latest
|
image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest}
|
||||||
pull_policy: always
|
pull_policy: always
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# Make the meshchat web interface accessible from the host on port 8000
|
# Make the meshchat web interface accessible from the host on port 8000
|
||||||
|
|||||||
@@ -133,6 +133,14 @@ app.whenReady().then(async () => {
|
|||||||
webPreferences: {
|
webPreferences: {
|
||||||
// used to inject logging over ipc
|
// used to inject logging over ipc
|
||||||
preload: path.join(__dirname, 'preload.js'),
|
preload: path.join(__dirname, 'preload.js'),
|
||||||
|
// Security: disable node integration in renderer
|
||||||
|
nodeIntegration: false,
|
||||||
|
// Security: enable context isolation (default in Electron 12+)
|
||||||
|
contextIsolation: true,
|
||||||
|
// Security: enable sandbox for additional protection
|
||||||
|
sandbox: true,
|
||||||
|
// Security: disable remote module (deprecated but explicit)
|
||||||
|
enableRemoteModule: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
3
meshchatx/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"""Reticulum MeshChatX - A mesh network communications app."""
|
||||||
|
|
||||||
|
__version__ = "2.50.0"
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from peewee import * # noqa: F403
|
from peewee import * # noqa: F403
|
||||||
from playhouse.migrate import SqliteMigrator
|
from playhouse.migrate import SqliteMigrator
|
||||||
@@ -17,7 +17,9 @@ def migrate(current_version):
|
|||||||
if current_version < 2:
|
if current_version < 2:
|
||||||
migrate_database(
|
migrate_database(
|
||||||
migrator.add_column(
|
migrator.add_column(
|
||||||
"lxmf_messages", "delivery_attempts", LxmfMessage.delivery_attempts,
|
"lxmf_messages",
|
||||||
|
"delivery_attempts",
|
||||||
|
LxmfMessage.delivery_attempts,
|
||||||
),
|
),
|
||||||
migrator.add_column(
|
migrator.add_column(
|
||||||
"lxmf_messages",
|
"lxmf_messages",
|
||||||
@@ -66,8 +68,8 @@ class Config(BaseModel):
|
|||||||
id = BigAutoField() # noqa: F405
|
id = BigAutoField() # noqa: F405
|
||||||
key = CharField(unique=True) # noqa: F405
|
key = CharField(unique=True) # noqa: F405
|
||||||
value = TextField() # noqa: F405
|
value = TextField() # noqa: F405
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -85,7 +87,7 @@ class Announce(BaseModel):
|
|||||||
identity_hash = CharField( # noqa: F405
|
identity_hash = CharField( # noqa: F405
|
||||||
index=True,
|
index=True,
|
||||||
) # identity hash that announced the destination
|
) # identity hash that announced the destination
|
||||||
identity_public_key = ( # noqa: F405
|
identity_public_key = (
|
||||||
CharField() # noqa: F405
|
CharField() # noqa: F405
|
||||||
) # base64 encoded public key, incase we want to recreate the identity manually
|
) # base64 encoded public key, incase we want to recreate the identity manually
|
||||||
app_data = TextField(null=True) # noqa: F405 # base64 encoded app data bytes
|
app_data = TextField(null=True) # noqa: F405 # base64 encoded app data bytes
|
||||||
@@ -93,8 +95,8 @@ class Announce(BaseModel):
|
|||||||
snr = FloatField(null=True) # noqa: F405
|
snr = FloatField(null=True) # noqa: F405
|
||||||
quality = FloatField(null=True) # noqa: F405
|
quality = FloatField(null=True) # noqa: F405
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -106,8 +108,8 @@ class CustomDestinationDisplayName(BaseModel):
|
|||||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||||
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -120,8 +122,8 @@ class FavouriteDestination(BaseModel):
|
|||||||
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
display_name = CharField() # noqa: F405 # custom display name for the destination hash
|
||||||
aspect = CharField() # noqa: F405 # e.g: nomadnetwork.node
|
aspect = CharField() # noqa: F405 # e.g: nomadnetwork.node
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -133,7 +135,7 @@ class LxmfMessage(BaseModel):
|
|||||||
hash = CharField(unique=True) # noqa: F405 # unique lxmf message hash
|
hash = CharField(unique=True) # noqa: F405 # unique lxmf message hash
|
||||||
source_hash = CharField(index=True) # noqa: F405
|
source_hash = CharField(index=True) # noqa: F405
|
||||||
destination_hash = CharField(index=True) # noqa: F405
|
destination_hash = CharField(index=True) # noqa: F405
|
||||||
state = ( # noqa: F405
|
state = (
|
||||||
CharField() # noqa: F405
|
CharField() # noqa: F405
|
||||||
) # state is converted from internal int to a human friendly string
|
) # state is converted from internal int to a human friendly string
|
||||||
progress = FloatField() # noqa: F405 # progress is converted from internal float 0.00-1.00 to float between 0.00/100 (2 decimal places)
|
progress = FloatField() # noqa: F405 # progress is converted from internal float 0.00-1.00 to float between 0.00/100 (2 decimal places)
|
||||||
@@ -150,15 +152,15 @@ class LxmfMessage(BaseModel):
|
|||||||
title = TextField() # noqa: F405
|
title = TextField() # noqa: F405
|
||||||
content = TextField() # noqa: F405
|
content = TextField() # noqa: F405
|
||||||
fields = TextField() # noqa: F405 # json string
|
fields = TextField() # noqa: F405 # json string
|
||||||
timestamp = ( # noqa: F405
|
timestamp = (
|
||||||
FloatField() # noqa: F405
|
FloatField() # noqa: F405
|
||||||
) # timestamp of when the message was originally created (before ever being sent)
|
) # timestamp of when the message was originally created (before ever being sent)
|
||||||
rssi = IntegerField(null=True) # noqa: F405
|
rssi = IntegerField(null=True) # noqa: F405
|
||||||
snr = FloatField(null=True) # noqa: F405
|
snr = FloatField(null=True) # noqa: F405
|
||||||
quality = FloatField(null=True) # noqa: F405
|
quality = FloatField(null=True) # noqa: F405
|
||||||
is_spam = BooleanField(default=False) # noqa: F405 # if true, message is marked as spam
|
is_spam = BooleanField(default=False) # noqa: F405 # if true, message is marked as spam
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -170,8 +172,8 @@ class LxmfConversationReadState(BaseModel):
|
|||||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||||
last_read_at = DateTimeField() # noqa: F405
|
last_read_at = DateTimeField() # noqa: F405
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -183,12 +185,12 @@ class LxmfUserIcon(BaseModel):
|
|||||||
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
destination_hash = CharField(unique=True) # noqa: F405 # unique destination hash
|
||||||
icon_name = CharField() # noqa: F405 # material design icon name for the destination hash
|
icon_name = CharField() # noqa: F405 # material design icon name for the destination hash
|
||||||
foreground_colour = CharField() # noqa: F405 # hex colour to use for foreground (icon colour)
|
foreground_colour = CharField() # noqa: F405 # hex colour to use for foreground (icon colour)
|
||||||
background_colour = ( # noqa: F405
|
background_colour = (
|
||||||
CharField() # noqa: F405
|
CharField() # noqa: F405
|
||||||
) # hex colour to use for background (background colour)
|
) # hex colour to use for background (background colour)
|
||||||
|
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -198,10 +200,11 @@ class LxmfUserIcon(BaseModel):
|
|||||||
class BlockedDestination(BaseModel):
|
class BlockedDestination(BaseModel):
|
||||||
id = BigAutoField() # noqa: F405
|
id = BigAutoField() # noqa: F405
|
||||||
destination_hash = CharField( # noqa: F405
|
destination_hash = CharField( # noqa: F405
|
||||||
unique=True, index=True,
|
unique=True,
|
||||||
|
index=True,
|
||||||
) # unique destination hash that is blocked
|
) # unique destination hash that is blocked
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -211,10 +214,11 @@ class BlockedDestination(BaseModel):
|
|||||||
class SpamKeyword(BaseModel):
|
class SpamKeyword(BaseModel):
|
||||||
id = BigAutoField() # noqa: F405
|
id = BigAutoField() # noqa: F405
|
||||||
keyword = CharField( # noqa: F405
|
keyword = CharField( # noqa: F405
|
||||||
unique=True, index=True,
|
unique=True,
|
||||||
|
index=True,
|
||||||
) # keyword to match against message content
|
) # keyword to match against message content
|
||||||
created_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
created_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
updated_at = DateTimeField(default=lambda: datetime.now(timezone.utc)) # noqa: F405
|
updated_at = DateTimeField(default=lambda: datetime.now(UTC)) # noqa: F405
|
||||||
|
|
||||||
# define table name
|
# define table name
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -13,7 +13,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import webbrowser
|
import webbrowser
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import LXMF
|
import LXMF
|
||||||
import psutil
|
import psutil
|
||||||
@@ -24,31 +24,45 @@ from LXMF import LXMRouter
|
|||||||
from peewee import SqliteDatabase
|
from peewee import SqliteDatabase
|
||||||
from serial.tools import list_ports
|
from serial.tools import list_ports
|
||||||
|
|
||||||
import database
|
from meshchatx import database
|
||||||
from src.backend.announce_handler import AnnounceHandler
|
from meshchatx.src.backend.announce_handler import AnnounceHandler
|
||||||
from src.backend.async_utils import AsyncUtils
|
from meshchatx.src.backend.async_utils import AsyncUtils
|
||||||
from src.backend.audio_call_manager import AudioCall, AudioCallManager
|
from meshchatx.src.backend.audio_call_manager import AudioCall, AudioCallManager
|
||||||
from src.backend.colour_utils import ColourUtils
|
from meshchatx.src.backend.colour_utils import ColourUtils
|
||||||
from src.backend.interface_config_parser import InterfaceConfigParser
|
from meshchatx.src.backend.interface_config_parser import InterfaceConfigParser
|
||||||
from src.backend.interface_editor import InterfaceEditor
|
from meshchatx.src.backend.interface_editor import InterfaceEditor
|
||||||
from src.backend.lxmf_message_fields import (
|
from meshchatx.src.backend.lxmf_message_fields import (
|
||||||
LxmfAudioField,
|
LxmfAudioField,
|
||||||
LxmfFileAttachment,
|
LxmfFileAttachment,
|
||||||
LxmfFileAttachmentsField,
|
LxmfFileAttachmentsField,
|
||||||
LxmfImageField,
|
LxmfImageField,
|
||||||
)
|
)
|
||||||
from src.backend.sideband_commands import SidebandCommands
|
from meshchatx.src.backend.sideband_commands import SidebandCommands
|
||||||
|
from meshchatx.src.version import __version__ as app_version
|
||||||
|
|
||||||
|
|
||||||
# NOTE: this is required to be able to pack our app with cxfreeze as an exe, otherwise it can't access bundled assets
|
# NOTE: this is required to be able to pack our app with cxfreeze as an exe, otherwise it can't access bundled assets
|
||||||
# this returns a file path based on if we are running meshchat.py directly, or if we have packed it as an exe with cxfreeze
|
# this returns a file path based on if we are running meshchat.py directly, or if we have packed it as an exe with cxfreeze
|
||||||
# https://cx-freeze.readthedocs.io/en/latest/faq.html#using-data-files
|
# https://cx-freeze.readthedocs.io/en/latest/faq.html#using-data-files
|
||||||
|
# bearer:disable python_lang_path_traversal
|
||||||
def get_file_path(filename):
|
def get_file_path(filename):
|
||||||
if getattr(sys, "frozen", False):
|
if getattr(sys, "frozen", False):
|
||||||
datadir = os.path.dirname(sys.executable)
|
datadir = os.path.dirname(sys.executable)
|
||||||
else:
|
return os.path.join(datadir, filename)
|
||||||
datadir = os.path.dirname(__file__)
|
|
||||||
return os.path.join(datadir, filename)
|
# Assets live inside the meshchatx package when installed from a wheel
|
||||||
|
package_dir = os.path.dirname(__file__)
|
||||||
|
package_path = os.path.join(package_dir, filename)
|
||||||
|
if os.path.exists(package_path):
|
||||||
|
return package_path
|
||||||
|
|
||||||
|
# When running from the repository, fall back to the project root
|
||||||
|
repo_root = os.path.dirname(package_dir)
|
||||||
|
repo_path = os.path.join(repo_root, filename)
|
||||||
|
if os.path.exists(repo_path):
|
||||||
|
return repo_path
|
||||||
|
|
||||||
|
return package_path
|
||||||
|
|
||||||
|
|
||||||
class ReticulumMeshChat:
|
class ReticulumMeshChat:
|
||||||
@@ -64,7 +78,9 @@ class ReticulumMeshChat:
|
|||||||
# ensure a storage path exists for the loaded identity
|
# ensure a storage path exists for the loaded identity
|
||||||
self.storage_dir = storage_dir or os.path.join("storage")
|
self.storage_dir = storage_dir or os.path.join("storage")
|
||||||
self.storage_path = os.path.join(
|
self.storage_path = os.path.join(
|
||||||
self.storage_dir, "identities", identity.hash.hex(),
|
self.storage_dir,
|
||||||
|
"identities",
|
||||||
|
identity.hash.hex(),
|
||||||
)
|
)
|
||||||
print(f"Using Storage Path: {self.storage_path}")
|
print(f"Using Storage Path: {self.storage_path}")
|
||||||
os.makedirs(self.storage_path, exist_ok=True)
|
os.makedirs(self.storage_path, exist_ok=True)
|
||||||
@@ -177,12 +193,14 @@ class ReticulumMeshChat:
|
|||||||
)
|
)
|
||||||
RNS.Transport.register_announce_handler(
|
RNS.Transport.register_announce_handler(
|
||||||
AnnounceHandler(
|
AnnounceHandler(
|
||||||
"lxmf.propagation", self.on_lxmf_propagation_announce_received,
|
"lxmf.propagation",
|
||||||
|
self.on_lxmf_propagation_announce_received,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
RNS.Transport.register_announce_handler(
|
RNS.Transport.register_announce_handler(
|
||||||
AnnounceHandler(
|
AnnounceHandler(
|
||||||
"nomadnetwork.node", self.on_nomadnet_node_announce_received,
|
"nomadnetwork.node",
|
||||||
|
self.on_nomadnet_node_announce_received,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -215,17 +233,16 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# start background thread for auto syncing propagation nodes
|
# start background thread for auto syncing propagation nodes
|
||||||
thread = threading.Thread(
|
thread = threading.Thread(
|
||||||
target=asyncio.run, args=(self.announce_sync_propagation_nodes(),),
|
target=asyncio.run,
|
||||||
|
args=(self.announce_sync_propagation_nodes(),),
|
||||||
)
|
)
|
||||||
thread.daemon = True
|
thread.daemon = True
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
# gets app version from package.json
|
# gets app version from the synchronized Python version helper
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_app_version() -> str:
|
def get_app_version() -> str:
|
||||||
with open(get_file_path("package.json")) as f:
|
return app_version
|
||||||
package_json = json.load(f)
|
|
||||||
return package_json["version"]
|
|
||||||
|
|
||||||
# automatically announces based on user config
|
# automatically announces based on user config
|
||||||
async def announce_loop(self):
|
async def announce_loop(self):
|
||||||
@@ -299,7 +316,7 @@ class ReticulumMeshChat:
|
|||||||
self.message_router.set_outbound_propagation_node(
|
self.message_router.set_outbound_propagation_node(
|
||||||
bytes.fromhex(destination_hash),
|
bytes.fromhex(destination_hash),
|
||||||
)
|
)
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# failed to set propagation node, clear it to ensure we don't use an old one by mistake
|
# failed to set propagation node, clear it to ensure we don't use an old one by mistake
|
||||||
self.remove_active_propagation_node()
|
self.remove_active_propagation_node()
|
||||||
|
|
||||||
@@ -326,7 +343,7 @@ class ReticulumMeshChat:
|
|||||||
self.message_router.enable_propagation()
|
self.message_router.enable_propagation()
|
||||||
else:
|
else:
|
||||||
self.message_router.disable_propagation()
|
self.message_router.disable_propagation()
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
print("failed to enable or disable propagation node")
|
print("failed to enable or disable propagation node")
|
||||||
|
|
||||||
def _get_reticulum_section(self):
|
def _get_reticulum_section(self):
|
||||||
@@ -459,7 +476,8 @@ class ReticulumMeshChat:
|
|||||||
if "image" in fields or "audio" in fields:
|
if "image" in fields or "audio" in fields:
|
||||||
return True
|
return True
|
||||||
if "file_attachments" in fields and isinstance(
|
if "file_attachments" in fields and isinstance(
|
||||||
fields["file_attachments"], list,
|
fields["file_attachments"],
|
||||||
|
list,
|
||||||
):
|
):
|
||||||
return len(fields["file_attachments"]) > 0
|
return len(fields["file_attachments"]) > 0
|
||||||
return False
|
return False
|
||||||
@@ -473,7 +491,8 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
matches = set()
|
matches = set()
|
||||||
query = database.LxmfMessage.select(
|
query = database.LxmfMessage.select(
|
||||||
database.LxmfMessage.source_hash, database.LxmfMessage.destination_hash,
|
database.LxmfMessage.source_hash,
|
||||||
|
database.LxmfMessage.destination_hash,
|
||||||
).where(
|
).where(
|
||||||
(
|
(
|
||||||
(database.LxmfMessage.source_hash == local_hash)
|
(database.LxmfMessage.source_hash == local_hash)
|
||||||
@@ -810,7 +829,9 @@ class ReticulumMeshChat:
|
|||||||
# set optional AutoInterface options
|
# set optional AutoInterface options
|
||||||
InterfaceEditor.update_value(interface_details, data, "group_id")
|
InterfaceEditor.update_value(interface_details, data, "group_id")
|
||||||
InterfaceEditor.update_value(
|
InterfaceEditor.update_value(
|
||||||
interface_details, data, "multicast_address_type",
|
interface_details,
|
||||||
|
data,
|
||||||
|
"multicast_address_type",
|
||||||
)
|
)
|
||||||
InterfaceEditor.update_value(interface_details, data, "devices")
|
InterfaceEditor.update_value(interface_details, data, "devices")
|
||||||
InterfaceEditor.update_value(interface_details, data, "ignored_devices")
|
InterfaceEditor.update_value(interface_details, data, "ignored_devices")
|
||||||
@@ -1008,10 +1029,14 @@ class ReticulumMeshChat:
|
|||||||
InterfaceEditor.update_value(interface_details, data, "callsign")
|
InterfaceEditor.update_value(interface_details, data, "callsign")
|
||||||
InterfaceEditor.update_value(interface_details, data, "id_interval")
|
InterfaceEditor.update_value(interface_details, data, "id_interval")
|
||||||
InterfaceEditor.update_value(
|
InterfaceEditor.update_value(
|
||||||
interface_details, data, "airtime_limit_long",
|
interface_details,
|
||||||
|
data,
|
||||||
|
"airtime_limit_long",
|
||||||
)
|
)
|
||||||
InterfaceEditor.update_value(
|
InterfaceEditor.update_value(
|
||||||
interface_details, data, "airtime_limit_short",
|
interface_details,
|
||||||
|
data,
|
||||||
|
"airtime_limit_short",
|
||||||
)
|
)
|
||||||
|
|
||||||
# handle RNodeMultiInterface
|
# handle RNodeMultiInterface
|
||||||
@@ -1091,10 +1116,10 @@ class ReticulumMeshChat:
|
|||||||
interfaces[interface_name] = interface_details
|
interfaces[interface_name] = interface_details
|
||||||
|
|
||||||
# handle SerialInterface, KISSInterface, and AX25KISSInterface
|
# handle SerialInterface, KISSInterface, and AX25KISSInterface
|
||||||
if (
|
if interface_type in (
|
||||||
interface_type == "SerialInterface"
|
"SerialInterface",
|
||||||
or interface_type == "KISSInterface"
|
"KISSInterface",
|
||||||
or interface_type == "AX25KISSInterface"
|
"AX25KISSInterface",
|
||||||
):
|
):
|
||||||
# ensure port provided
|
# ensure port provided
|
||||||
interface_port = data.get("port")
|
interface_port = data.get("port")
|
||||||
@@ -1116,10 +1141,7 @@ class ReticulumMeshChat:
|
|||||||
InterfaceEditor.update_value(interface_details, data, "stopbits")
|
InterfaceEditor.update_value(interface_details, data, "stopbits")
|
||||||
|
|
||||||
# Handle KISS and AX25KISS specific options
|
# Handle KISS and AX25KISS specific options
|
||||||
if (
|
if interface_type in ("KISSInterface", "AX25KISSInterface"):
|
||||||
interface_type == "KISSInterface"
|
|
||||||
or interface_type == "AX25KISSInterface"
|
|
||||||
):
|
|
||||||
# set optional options
|
# set optional options
|
||||||
InterfaceEditor.update_value(interface_details, data, "preamble")
|
InterfaceEditor.update_value(interface_details, data, "preamble")
|
||||||
InterfaceEditor.update_value(interface_details, data, "txtail")
|
InterfaceEditor.update_value(interface_details, data, "txtail")
|
||||||
@@ -1200,7 +1222,7 @@ class ReticulumMeshChat:
|
|||||||
try:
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
selected_interface_names = data.get("selected_interface_names")
|
selected_interface_names = data.get("selected_interface_names")
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# request data was not json, but we don't care
|
# request data was not json, but we don't care
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1366,7 +1388,7 @@ class ReticulumMeshChat:
|
|||||||
print(e)
|
print(e)
|
||||||
elif msg.type == WSMsgType.ERROR:
|
elif msg.type == WSMsgType.ERROR:
|
||||||
# ignore errors while handling message
|
# ignore errors while handling message
|
||||||
print("ws connection error %s" % websocket_response.exception())
|
print(f"ws connection error {websocket_response.exception()}")
|
||||||
|
|
||||||
# websocket closed
|
# websocket closed
|
||||||
self.websocket_clients.remove(websocket_response)
|
self.websocket_clients.remove(websocket_response)
|
||||||
@@ -1609,7 +1631,8 @@ class ReticulumMeshChat:
|
|||||||
# initiate audio call
|
# initiate audio call
|
||||||
try:
|
try:
|
||||||
audio_call = await self.audio_call_manager.initiate(
|
audio_call = await self.audio_call_manager.initiate(
|
||||||
destination_hash, timeout_seconds,
|
destination_hash,
|
||||||
|
timeout_seconds,
|
||||||
)
|
)
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{
|
{
|
||||||
@@ -1652,7 +1675,7 @@ class ReticulumMeshChat:
|
|||||||
if websocket_response.closed is False:
|
if websocket_response.closed is False:
|
||||||
try:
|
try:
|
||||||
AsyncUtils.run_async(websocket_response.send_bytes(data))
|
AsyncUtils.run_async(websocket_response.send_bytes(data))
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# ignore errors sending audio packets to websocket
|
# ignore errors sending audio packets to websocket
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1663,7 +1686,7 @@ class ReticulumMeshChat:
|
|||||||
AsyncUtils.run_async(
|
AsyncUtils.run_async(
|
||||||
websocket_response.close(code=WSCloseCode.GOING_AWAY),
|
websocket_response.close(code=WSCloseCode.GOING_AWAY),
|
||||||
)
|
)
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# ignore errors closing websocket
|
# ignore errors closing websocket
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1688,7 +1711,7 @@ class ReticulumMeshChat:
|
|||||||
print(e)
|
print(e)
|
||||||
elif msg.type == WSMsgType.ERROR:
|
elif msg.type == WSMsgType.ERROR:
|
||||||
# ignore errors while handling message
|
# ignore errors while handling message
|
||||||
print("ws connection error %s" % websocket_response.exception())
|
print(f"ws connection error {websocket_response.exception()}")
|
||||||
|
|
||||||
# unregister audio packet handler now that the websocket has been closed
|
# unregister audio packet handler now that the websocket has been closed
|
||||||
audio_call.register_audio_packet_listener(on_audio_packet)
|
audio_call.register_audio_packet_listener(on_audio_packet)
|
||||||
@@ -1983,21 +2006,27 @@ class ReticulumMeshChat:
|
|||||||
and lxmf_delivery_announce.app_data is not None
|
and lxmf_delivery_announce.app_data is not None
|
||||||
):
|
):
|
||||||
operator_display_name = self.parse_lxmf_display_name(
|
operator_display_name = self.parse_lxmf_display_name(
|
||||||
lxmf_delivery_announce.app_data, None,
|
lxmf_delivery_announce.app_data,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
elif (
|
elif (
|
||||||
nomadnetwork_node_announce is not None
|
nomadnetwork_node_announce is not None
|
||||||
and nomadnetwork_node_announce.app_data is not None
|
and nomadnetwork_node_announce.app_data is not None
|
||||||
):
|
):
|
||||||
operator_display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
operator_display_name = (
|
||||||
nomadnetwork_node_announce.app_data, None,
|
ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||||
|
nomadnetwork_node_announce.app_data,
|
||||||
|
None,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# parse app_data so we can see if propagation is enabled or disabled for this node
|
# parse app_data so we can see if propagation is enabled or disabled for this node
|
||||||
is_propagation_enabled = None
|
is_propagation_enabled = None
|
||||||
per_transfer_limit = None
|
per_transfer_limit = None
|
||||||
propagation_node_data = ReticulumMeshChat.parse_lxmf_propagation_node_app_data(
|
propagation_node_data = (
|
||||||
announce.app_data,
|
ReticulumMeshChat.parse_lxmf_propagation_node_app_data(
|
||||||
|
announce.app_data,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
if propagation_node_data is not None:
|
if propagation_node_data is not None:
|
||||||
is_propagation_enabled = propagation_node_data["enabled"]
|
is_propagation_enabled = propagation_node_data["enabled"]
|
||||||
@@ -2032,9 +2061,7 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# check if user wants to request the path from the network right now
|
# check if user wants to request the path from the network right now
|
||||||
request_query_param = request.query.get("request", "false")
|
request_query_param = request.query.get("request", "false")
|
||||||
should_request_now = (
|
should_request_now = request_query_param in ("true", "1")
|
||||||
request_query_param == "true" or request_query_param == "1"
|
|
||||||
)
|
|
||||||
if should_request_now:
|
if should_request_now:
|
||||||
# determine how long we should wait for a path response
|
# determine how long we should wait for a path response
|
||||||
timeout_seconds = int(request.query.get("timeout", 15))
|
timeout_seconds = int(request.query.get("timeout", 15))
|
||||||
@@ -2312,7 +2339,8 @@ class ReticulumMeshChat:
|
|||||||
# update display name if provided
|
# update display name if provided
|
||||||
if len(display_name) > 0:
|
if len(display_name) > 0:
|
||||||
self.db_upsert_custom_destination_display_name(
|
self.db_upsert_custom_destination_display_name(
|
||||||
destination_hash, display_name,
|
destination_hash,
|
||||||
|
display_name,
|
||||||
)
|
)
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{
|
{
|
||||||
@@ -2756,7 +2784,9 @@ class ReticulumMeshChat:
|
|||||||
other_user_hash,
|
other_user_hash,
|
||||||
),
|
),
|
||||||
"destination_hash": other_user_hash,
|
"destination_hash": other_user_hash,
|
||||||
"is_unread": ReticulumMeshChat.is_lxmf_conversation_unread(other_user_hash),
|
"is_unread": ReticulumMeshChat.is_lxmf_conversation_unread(
|
||||||
|
other_user_hash,
|
||||||
|
),
|
||||||
"failed_messages_count": ReticulumMeshChat.lxmf_conversation_failed_messages_count(
|
"failed_messages_count": ReticulumMeshChat.lxmf_conversation_failed_messages_count(
|
||||||
other_user_hash,
|
other_user_hash,
|
||||||
),
|
),
|
||||||
@@ -2878,7 +2908,8 @@ class ReticulumMeshChat:
|
|||||||
destination_hash = data.get("destination_hash", "")
|
destination_hash = data.get("destination_hash", "")
|
||||||
if not destination_hash or len(destination_hash) != 32:
|
if not destination_hash or len(destination_hash) != 32:
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{"error": "Invalid destination hash"}, status=400,
|
{"error": "Invalid destination hash"},
|
||||||
|
status=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2886,12 +2917,13 @@ class ReticulumMeshChat:
|
|||||||
# drop any existing paths to this destination
|
# drop any existing paths to this destination
|
||||||
try:
|
try:
|
||||||
RNS.Transport.drop_path(bytes.fromhex(destination_hash))
|
RNS.Transport.drop_path(bytes.fromhex(destination_hash))
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return web.json_response({"message": "ok"})
|
return web.json_response({"message": "ok"})
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{"error": "Destination already blocked"}, status=400,
|
{"error": "Destination already blocked"},
|
||||||
|
status=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
# remove blocked destination
|
# remove blocked destination
|
||||||
@@ -2900,7 +2932,8 @@ class ReticulumMeshChat:
|
|||||||
destination_hash = request.match_info.get("destination_hash", "")
|
destination_hash = request.match_info.get("destination_hash", "")
|
||||||
if not destination_hash or len(destination_hash) != 32:
|
if not destination_hash or len(destination_hash) != 32:
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{"error": "Invalid destination hash"}, status=400,
|
{"error": "Invalid destination hash"},
|
||||||
|
status=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2911,7 +2944,8 @@ class ReticulumMeshChat:
|
|||||||
blocked.delete_instance()
|
blocked.delete_instance()
|
||||||
return web.json_response({"message": "ok"})
|
return web.json_response({"message": "ok"})
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{"error": "Destination not blocked"}, status=404,
|
{"error": "Destination not blocked"},
|
||||||
|
status=404,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return web.json_response({"error": str(e)}, status=500)
|
return web.json_response({"error": str(e)}, status=500)
|
||||||
@@ -2945,9 +2979,10 @@ class ReticulumMeshChat:
|
|||||||
try:
|
try:
|
||||||
database.SpamKeyword.create(keyword=keyword)
|
database.SpamKeyword.create(keyword=keyword)
|
||||||
return web.json_response({"message": "ok"})
|
return web.json_response({"message": "ok"})
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{"error": "Keyword already exists"}, status=400,
|
{"error": "Keyword already exists"},
|
||||||
|
status=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
# remove spam keyword
|
# remove spam keyword
|
||||||
@@ -2956,7 +2991,7 @@ class ReticulumMeshChat:
|
|||||||
keyword_id = request.match_info.get("keyword_id", "")
|
keyword_id = request.match_info.get("keyword_id", "")
|
||||||
try:
|
try:
|
||||||
keyword_id = int(keyword_id)
|
keyword_id = int(keyword_id)
|
||||||
except (ValueError, TypeError): # noqa: E722
|
except (ValueError, TypeError):
|
||||||
return web.json_response({"error": "Invalid keyword ID"}, status=400)
|
return web.json_response({"error": "Invalid keyword ID"}, status=400)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2983,13 +3018,38 @@ class ReticulumMeshChat:
|
|||||||
)
|
)
|
||||||
if message:
|
if message:
|
||||||
message.is_spam = is_spam
|
message.is_spam = is_spam
|
||||||
message.updated_at = datetime.now(timezone.utc)
|
message.updated_at = datetime.now(UTC)
|
||||||
message.save()
|
message.save()
|
||||||
return web.json_response({"message": "ok"})
|
return web.json_response({"message": "ok"})
|
||||||
return web.json_response({"error": "Message not found"}, status=404)
|
return web.json_response({"error": "Message not found"}, status=404)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return web.json_response({"error": str(e)}, status=500)
|
return web.json_response({"error": str(e)}, status=500)
|
||||||
|
|
||||||
|
# security headers middleware
|
||||||
|
@web.middleware
|
||||||
|
async def security_middleware(request, handler):
|
||||||
|
response = await handler(request)
|
||||||
|
# Add security headers to all responses
|
||||||
|
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||||
|
response.headers["X-Frame-Options"] = "DENY"
|
||||||
|
response.headers["X-XSS-Protection"] = "1; mode=block"
|
||||||
|
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
|
||||||
|
# CSP: allow localhost for development and Electron, websockets, and blob URLs
|
||||||
|
csp = (
|
||||||
|
"default-src 'self'; "
|
||||||
|
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; "
|
||||||
|
"style-src 'self' 'unsafe-inline'; "
|
||||||
|
"img-src 'self' data: blob:; "
|
||||||
|
"font-src 'self' data:; "
|
||||||
|
"connect-src 'self' ws://localhost:* wss://localhost:* blob:; "
|
||||||
|
"media-src 'self' blob:; "
|
||||||
|
"worker-src 'self' blob:; "
|
||||||
|
"object-src 'none'; "
|
||||||
|
"base-uri 'self';"
|
||||||
|
)
|
||||||
|
response.headers["Content-Security-Policy"] = csp
|
||||||
|
return response
|
||||||
|
|
||||||
# called when web app has started
|
# called when web app has started
|
||||||
async def on_startup(app):
|
async def on_startup(app):
|
||||||
# remember main event loop
|
# remember main event loop
|
||||||
@@ -2999,12 +3059,13 @@ class ReticulumMeshChat:
|
|||||||
if launch_browser:
|
if launch_browser:
|
||||||
try:
|
try:
|
||||||
webbrowser.open(f"http://127.0.0.1:{port}")
|
webbrowser.open(f"http://127.0.0.1:{port}")
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
print("failed to launch web browser")
|
print("failed to launch web browser")
|
||||||
|
|
||||||
# create and run web app
|
# create and run web app
|
||||||
app = web.Application(
|
app = web.Application(
|
||||||
client_max_size=1024 * 1024 * 50,
|
client_max_size=1024 * 1024 * 50,
|
||||||
|
middlewares=[security_middleware],
|
||||||
) # allow uploading files up to 50mb
|
) # allow uploading files up to 50mb
|
||||||
app.add_routes(routes)
|
app.add_routes(routes)
|
||||||
app.add_routes(
|
app.add_routes(
|
||||||
@@ -3106,7 +3167,8 @@ class ReticulumMeshChat:
|
|||||||
self.config.lxmf_inbound_stamp_cost.set(value)
|
self.config.lxmf_inbound_stamp_cost.set(value)
|
||||||
# update the inbound stamp cost on the delivery destination
|
# update the inbound stamp cost on the delivery destination
|
||||||
self.message_router.set_inbound_stamp_cost(
|
self.message_router.set_inbound_stamp_cost(
|
||||||
self.local_lxmf_destination.hash, value,
|
self.local_lxmf_destination.hash,
|
||||||
|
value,
|
||||||
)
|
)
|
||||||
# re-announce to update the stamp cost in announces
|
# re-announce to update the stamp cost in announces
|
||||||
self.local_lxmf_destination.display_name = self.config.display_name.get()
|
self.local_lxmf_destination.display_name = self.config.display_name.get()
|
||||||
@@ -3504,7 +3566,7 @@ class ReticulumMeshChat:
|
|||||||
for websocket_client in self.websocket_clients:
|
for websocket_client in self.websocket_clients:
|
||||||
try:
|
try:
|
||||||
await websocket_client.send_str(data)
|
await websocket_client.send_str(data)
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# do nothing if failed to broadcast to a specific websocket client
|
# do nothing if failed to broadcast to a specific websocket client
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -3571,7 +3633,9 @@ class ReticulumMeshChat:
|
|||||||
remote_destination_hash_hex = None
|
remote_destination_hash_hex = None
|
||||||
if remote_identity is not None:
|
if remote_identity is not None:
|
||||||
remote_destination_hash = RNS.Destination.hash(
|
remote_destination_hash = RNS.Destination.hash(
|
||||||
remote_identity, "call", "audio",
|
remote_identity,
|
||||||
|
"call",
|
||||||
|
"audio",
|
||||||
)
|
)
|
||||||
remote_destination_hash_hex = remote_destination_hash.hex()
|
remote_destination_hash_hex = remote_destination_hash.hex()
|
||||||
|
|
||||||
@@ -3674,7 +3738,9 @@ class ReticulumMeshChat:
|
|||||||
"method": self.convert_lxmf_method_to_string(lxmf_message),
|
"method": self.convert_lxmf_method_to_string(lxmf_message),
|
||||||
"delivery_attempts": lxmf_message.delivery_attempts,
|
"delivery_attempts": lxmf_message.delivery_attempts,
|
||||||
"next_delivery_attempt_at": getattr(
|
"next_delivery_attempt_at": getattr(
|
||||||
lxmf_message, "next_delivery_attempt", None,
|
lxmf_message,
|
||||||
|
"next_delivery_attempt",
|
||||||
|
None,
|
||||||
), # attribute may not exist yet
|
), # attribute may not exist yet
|
||||||
"title": lxmf_message.title.decode("utf-8"),
|
"title": lxmf_message.title.decode("utf-8"),
|
||||||
"content": lxmf_message.content.decode("utf-8"),
|
"content": lxmf_message.content.decode("utf-8"),
|
||||||
@@ -3757,7 +3823,9 @@ class ReticulumMeshChat:
|
|||||||
if announce.aspect == "lxmf.delivery":
|
if announce.aspect == "lxmf.delivery":
|
||||||
display_name = self.parse_lxmf_display_name(announce.app_data)
|
display_name = self.parse_lxmf_display_name(announce.app_data)
|
||||||
elif announce.aspect == "nomadnetwork.node":
|
elif announce.aspect == "nomadnetwork.node":
|
||||||
display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(announce.app_data)
|
display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||||
|
announce.app_data,
|
||||||
|
)
|
||||||
|
|
||||||
# find lxmf user icon from database
|
# find lxmf user icon from database
|
||||||
lxmf_user_icon = None
|
lxmf_user_icon = None
|
||||||
@@ -3851,13 +3919,14 @@ class ReticulumMeshChat:
|
|||||||
"icon_name": icon_name,
|
"icon_name": icon_name,
|
||||||
"foreground_colour": foreground_colour,
|
"foreground_colour": foreground_colour,
|
||||||
"background_colour": background_colour,
|
"background_colour": background_colour,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
query = database.LxmfUserIcon.insert(data)
|
query = database.LxmfUserIcon.insert(data)
|
||||||
query = query.on_conflict(
|
query = query.on_conflict(
|
||||||
conflict_target=[database.LxmfUserIcon.destination_hash], update=data,
|
conflict_target=[database.LxmfUserIcon.destination_hash],
|
||||||
|
update=data,
|
||||||
)
|
)
|
||||||
query.execute()
|
query.execute()
|
||||||
|
|
||||||
@@ -3869,7 +3938,7 @@ class ReticulumMeshChat:
|
|||||||
database.BlockedDestination.destination_hash == destination_hash,
|
database.BlockedDestination.destination_hash == destination_hash,
|
||||||
)
|
)
|
||||||
return blocked is not None
|
return blocked is not None
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# check if message content matches spam keywords
|
# check if message content matches spam keywords
|
||||||
@@ -3882,7 +3951,7 @@ class ReticulumMeshChat:
|
|||||||
if keyword.keyword.lower() in search_text:
|
if keyword.keyword.lower() in search_text:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# check if message has attachments and should be rejected
|
# check if message has attachments and should be rejected
|
||||||
@@ -3896,7 +3965,7 @@ class ReticulumMeshChat:
|
|||||||
if LXMF.FIELD_AUDIO in lxmf_fields:
|
if LXMF.FIELD_AUDIO in lxmf_fields:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# handle an lxmf delivery from reticulum
|
# handle an lxmf delivery from reticulum
|
||||||
@@ -4046,7 +4115,9 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# upserts the provided lxmf message to the database
|
# upserts the provided lxmf message to the database
|
||||||
def db_upsert_lxmf_message(
|
def db_upsert_lxmf_message(
|
||||||
self, lxmf_message: LXMF.LXMessage, is_spam: bool = False,
|
self,
|
||||||
|
lxmf_message: LXMF.LXMessage,
|
||||||
|
is_spam: bool = False,
|
||||||
):
|
):
|
||||||
# convert lxmf message to dict
|
# convert lxmf message to dict
|
||||||
lxmf_message_dict = self.convert_lxmf_message_to_dict(lxmf_message)
|
lxmf_message_dict = self.convert_lxmf_message_to_dict(lxmf_message)
|
||||||
@@ -4070,13 +4141,14 @@ class ReticulumMeshChat:
|
|||||||
"snr": lxmf_message_dict["snr"],
|
"snr": lxmf_message_dict["snr"],
|
||||||
"quality": lxmf_message_dict["quality"],
|
"quality": lxmf_message_dict["quality"],
|
||||||
"is_spam": is_spam,
|
"is_spam": is_spam,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
query = database.LxmfMessage.insert(data)
|
query = database.LxmfMessage.insert(data)
|
||||||
query = query.on_conflict(
|
query = query.on_conflict(
|
||||||
conflict_target=[database.LxmfMessage.hash], update=data,
|
conflict_target=[database.LxmfMessage.hash],
|
||||||
|
update=data,
|
||||||
)
|
)
|
||||||
query.execute()
|
query.execute()
|
||||||
|
|
||||||
@@ -4105,7 +4177,7 @@ class ReticulumMeshChat:
|
|||||||
"rssi": rssi,
|
"rssi": rssi,
|
||||||
"snr": snr,
|
"snr": snr,
|
||||||
"quality": quality,
|
"quality": quality,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# only set app data if provided, as we don't want to wipe existing data when we request keys from the network
|
# only set app data if provided, as we don't want to wipe existing data when we request keys from the network
|
||||||
@@ -4116,20 +4188,22 @@ class ReticulumMeshChat:
|
|||||||
# upsert to database
|
# upsert to database
|
||||||
query = database.Announce.insert(data)
|
query = database.Announce.insert(data)
|
||||||
query = query.on_conflict(
|
query = query.on_conflict(
|
||||||
conflict_target=[database.Announce.destination_hash], update=data,
|
conflict_target=[database.Announce.destination_hash],
|
||||||
|
update=data,
|
||||||
)
|
)
|
||||||
query.execute()
|
query.execute()
|
||||||
|
|
||||||
# upserts a custom destination display name to the database
|
# upserts a custom destination display name to the database
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def db_upsert_custom_destination_display_name(
|
def db_upsert_custom_destination_display_name(
|
||||||
destination_hash: str, display_name: str,
|
destination_hash: str,
|
||||||
|
display_name: str,
|
||||||
):
|
):
|
||||||
# prepare data to insert or update
|
# prepare data to insert or update
|
||||||
data = {
|
data = {
|
||||||
"destination_hash": destination_hash,
|
"destination_hash": destination_hash,
|
||||||
"display_name": display_name,
|
"display_name": display_name,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4143,14 +4217,16 @@ class ReticulumMeshChat:
|
|||||||
# upserts a custom destination display name to the database
|
# upserts a custom destination display name to the database
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def db_upsert_favourite(
|
def db_upsert_favourite(
|
||||||
destination_hash: str, display_name: str, aspect: str,
|
destination_hash: str,
|
||||||
|
display_name: str,
|
||||||
|
aspect: str,
|
||||||
):
|
):
|
||||||
# prepare data to insert or update
|
# prepare data to insert or update
|
||||||
data = {
|
data = {
|
||||||
"destination_hash": destination_hash,
|
"destination_hash": destination_hash,
|
||||||
"display_name": display_name,
|
"display_name": display_name,
|
||||||
"aspect": aspect,
|
"aspect": aspect,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4167,8 +4243,8 @@ class ReticulumMeshChat:
|
|||||||
# prepare data to insert or update
|
# prepare data to insert or update
|
||||||
data = {
|
data = {
|
||||||
"destination_hash": destination_hash,
|
"destination_hash": destination_hash,
|
||||||
"last_read_at": datetime.now(timezone.utc),
|
"last_read_at": datetime.now(UTC),
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4397,7 +4473,11 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# upsert announce to database
|
# upsert announce to database
|
||||||
self.db_upsert_announce(
|
self.db_upsert_announce(
|
||||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
announced_identity,
|
||||||
|
destination_hash,
|
||||||
|
aspect,
|
||||||
|
app_data,
|
||||||
|
announce_packet_hash,
|
||||||
)
|
)
|
||||||
|
|
||||||
# find announce from database
|
# find announce from database
|
||||||
@@ -4448,7 +4528,11 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# upsert announce to database
|
# upsert announce to database
|
||||||
self.db_upsert_announce(
|
self.db_upsert_announce(
|
||||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
announced_identity,
|
||||||
|
destination_hash,
|
||||||
|
aspect,
|
||||||
|
app_data,
|
||||||
|
announce_packet_hash,
|
||||||
)
|
)
|
||||||
|
|
||||||
# find announce from database
|
# find announce from database
|
||||||
@@ -4498,7 +4582,11 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# upsert announce to database
|
# upsert announce to database
|
||||||
self.db_upsert_announce(
|
self.db_upsert_announce(
|
||||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
announced_identity,
|
||||||
|
destination_hash,
|
||||||
|
aspect,
|
||||||
|
app_data,
|
||||||
|
announce_packet_hash,
|
||||||
)
|
)
|
||||||
|
|
||||||
# find announce from database
|
# find announce from database
|
||||||
@@ -4626,7 +4714,11 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# upsert announce to database
|
# upsert announce to database
|
||||||
self.db_upsert_announce(
|
self.db_upsert_announce(
|
||||||
announced_identity, destination_hash, aspect, app_data, announce_packet_hash,
|
announced_identity,
|
||||||
|
destination_hash,
|
||||||
|
aspect,
|
||||||
|
app_data,
|
||||||
|
announce_packet_hash,
|
||||||
)
|
)
|
||||||
|
|
||||||
# find announce from database
|
# find announce from database
|
||||||
@@ -4676,7 +4768,9 @@ class ReticulumMeshChat:
|
|||||||
# if app data is available in database, it should be base64 encoded text that was announced
|
# if app data is available in database, it should be base64 encoded text that was announced
|
||||||
# we will return the parsed lxmf display name as the conversation name
|
# we will return the parsed lxmf display name as the conversation name
|
||||||
if lxmf_announce is not None and lxmf_announce.app_data is not None:
|
if lxmf_announce is not None and lxmf_announce.app_data is not None:
|
||||||
return ReticulumMeshChat.parse_lxmf_display_name(app_data_base64=lxmf_announce.app_data)
|
return ReticulumMeshChat.parse_lxmf_display_name(
|
||||||
|
app_data_base64=lxmf_announce.app_data,
|
||||||
|
)
|
||||||
|
|
||||||
# announce did not have app data, so provide a fallback name
|
# announce did not have app data, so provide a fallback name
|
||||||
return "Anonymous Peer"
|
return "Anonymous Peer"
|
||||||
@@ -4684,14 +4778,15 @@ class ReticulumMeshChat:
|
|||||||
# reads the lxmf display name from the provided base64 app data
|
# reads the lxmf display name from the provided base64 app data
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_lxmf_display_name(
|
def parse_lxmf_display_name(
|
||||||
app_data_base64: str, default_value: str | None = "Anonymous Peer",
|
app_data_base64: str,
|
||||||
|
default_value: str | None = "Anonymous Peer",
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
app_data_bytes = base64.b64decode(app_data_base64)
|
app_data_bytes = base64.b64decode(app_data_base64)
|
||||||
display_name = LXMF.display_name_from_app_data(app_data_bytes)
|
display_name = LXMF.display_name_from_app_data(app_data_bytes)
|
||||||
if display_name is not None:
|
if display_name is not None:
|
||||||
return display_name
|
return display_name
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return default_value
|
return default_value
|
||||||
@@ -4702,18 +4797,19 @@ class ReticulumMeshChat:
|
|||||||
try:
|
try:
|
||||||
app_data_bytes = base64.b64decode(app_data_base64)
|
app_data_bytes = base64.b64decode(app_data_base64)
|
||||||
return LXMF.stamp_cost_from_app_data(app_data_bytes)
|
return LXMF.stamp_cost_from_app_data(app_data_bytes)
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# reads the nomadnetwork node display name from the provided base64 app data
|
# reads the nomadnetwork node display name from the provided base64 app data
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_nomadnetwork_node_display_name(
|
def parse_nomadnetwork_node_display_name(
|
||||||
app_data_base64: str, default_value: str | None = "Anonymous Node",
|
app_data_base64: str,
|
||||||
|
default_value: str | None = "Anonymous Node",
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
app_data_bytes = base64.b64decode(app_data_base64)
|
app_data_bytes = base64.b64decode(app_data_base64)
|
||||||
return app_data_bytes.decode("utf-8")
|
return app_data_bytes.decode("utf-8")
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return default_value
|
return default_value
|
||||||
|
|
||||||
# parses lxmf propagation node app data
|
# parses lxmf propagation node app data
|
||||||
@@ -4727,7 +4823,7 @@ class ReticulumMeshChat:
|
|||||||
"timebase": int(data[1]),
|
"timebase": int(data[1]),
|
||||||
"per_transfer_limit": int(data[3]),
|
"per_transfer_limit": int(data[3]),
|
||||||
}
|
}
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# returns true if the conversation has messages newer than the last read at timestamp
|
# returns true if the conversation has messages newer than the last read at timestamp
|
||||||
@@ -4760,10 +4856,12 @@ class ReticulumMeshChat:
|
|||||||
|
|
||||||
# conversation is unread if last read at is before the latest incoming message creation date
|
# conversation is unread if last read at is before the latest incoming message creation date
|
||||||
conversation_last_read_at = datetime.strptime(
|
conversation_last_read_at = datetime.strptime(
|
||||||
lxmf_conversation_read_state.last_read_at, "%Y-%m-%d %H:%M:%S.%f%z",
|
lxmf_conversation_read_state.last_read_at,
|
||||||
|
"%Y-%m-%d %H:%M:%S.%f%z",
|
||||||
)
|
)
|
||||||
conversation_latest_message_at = datetime.strptime(
|
conversation_latest_message_at = datetime.strptime(
|
||||||
latest_incoming_lxmf_message.created_at, "%Y-%m-%d %H:%M:%S.%f%z",
|
latest_incoming_lxmf_message.created_at,
|
||||||
|
"%Y-%m-%d %H:%M:%S.%f%z",
|
||||||
)
|
)
|
||||||
return conversation_last_read_at < conversation_latest_message_at
|
return conversation_last_read_at < conversation_latest_message_at
|
||||||
|
|
||||||
@@ -4813,7 +4911,7 @@ class Config:
|
|||||||
data = {
|
data = {
|
||||||
"key": key,
|
"key": key,
|
||||||
"value": value,
|
"value": value,
|
||||||
"updated_at": datetime.now(timezone.utc),
|
"updated_at": datetime.now(UTC),
|
||||||
}
|
}
|
||||||
|
|
||||||
# upsert to database
|
# upsert to database
|
||||||
@@ -4882,44 +4980,57 @@ class Config:
|
|||||||
last_announced_at = IntConfig("last_announced_at", None)
|
last_announced_at = IntConfig("last_announced_at", None)
|
||||||
theme = StringConfig("theme", "light")
|
theme = StringConfig("theme", "light")
|
||||||
auto_resend_failed_messages_when_announce_received = BoolConfig(
|
auto_resend_failed_messages_when_announce_received = BoolConfig(
|
||||||
"auto_resend_failed_messages_when_announce_received", True,
|
"auto_resend_failed_messages_when_announce_received",
|
||||||
|
True,
|
||||||
)
|
)
|
||||||
allow_auto_resending_failed_messages_with_attachments = BoolConfig(
|
allow_auto_resending_failed_messages_with_attachments = BoolConfig(
|
||||||
"allow_auto_resending_failed_messages_with_attachments", False,
|
"allow_auto_resending_failed_messages_with_attachments",
|
||||||
|
False,
|
||||||
)
|
)
|
||||||
auto_send_failed_messages_to_propagation_node = BoolConfig(
|
auto_send_failed_messages_to_propagation_node = BoolConfig(
|
||||||
"auto_send_failed_messages_to_propagation_node", False,
|
"auto_send_failed_messages_to_propagation_node",
|
||||||
|
False,
|
||||||
)
|
)
|
||||||
show_suggested_community_interfaces = BoolConfig(
|
show_suggested_community_interfaces = BoolConfig(
|
||||||
"show_suggested_community_interfaces", True,
|
"show_suggested_community_interfaces",
|
||||||
|
True,
|
||||||
)
|
)
|
||||||
lxmf_delivery_transfer_limit_in_bytes = IntConfig(
|
lxmf_delivery_transfer_limit_in_bytes = IntConfig(
|
||||||
"lxmf_delivery_transfer_limit_in_bytes", 1000 * 1000 * 10,
|
"lxmf_delivery_transfer_limit_in_bytes",
|
||||||
|
1000 * 1000 * 10,
|
||||||
) # 10MB
|
) # 10MB
|
||||||
lxmf_preferred_propagation_node_destination_hash = StringConfig(
|
lxmf_preferred_propagation_node_destination_hash = StringConfig(
|
||||||
"lxmf_preferred_propagation_node_destination_hash", None,
|
"lxmf_preferred_propagation_node_destination_hash",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
lxmf_preferred_propagation_node_auto_sync_interval_seconds = IntConfig(
|
lxmf_preferred_propagation_node_auto_sync_interval_seconds = IntConfig(
|
||||||
"lxmf_preferred_propagation_node_auto_sync_interval_seconds", 0,
|
"lxmf_preferred_propagation_node_auto_sync_interval_seconds",
|
||||||
|
0,
|
||||||
)
|
)
|
||||||
lxmf_preferred_propagation_node_last_synced_at = IntConfig(
|
lxmf_preferred_propagation_node_last_synced_at = IntConfig(
|
||||||
"lxmf_preferred_propagation_node_last_synced_at", None,
|
"lxmf_preferred_propagation_node_last_synced_at",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
lxmf_local_propagation_node_enabled = BoolConfig(
|
lxmf_local_propagation_node_enabled = BoolConfig(
|
||||||
"lxmf_local_propagation_node_enabled", False,
|
"lxmf_local_propagation_node_enabled",
|
||||||
|
False,
|
||||||
)
|
)
|
||||||
lxmf_user_icon_name = StringConfig("lxmf_user_icon_name", None)
|
lxmf_user_icon_name = StringConfig("lxmf_user_icon_name", None)
|
||||||
lxmf_user_icon_foreground_colour = StringConfig(
|
lxmf_user_icon_foreground_colour = StringConfig(
|
||||||
"lxmf_user_icon_foreground_colour", None,
|
"lxmf_user_icon_foreground_colour",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
lxmf_user_icon_background_colour = StringConfig(
|
lxmf_user_icon_background_colour = StringConfig(
|
||||||
"lxmf_user_icon_background_colour", None,
|
"lxmf_user_icon_background_colour",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
lxmf_inbound_stamp_cost = IntConfig(
|
lxmf_inbound_stamp_cost = IntConfig(
|
||||||
"lxmf_inbound_stamp_cost", 8,
|
"lxmf_inbound_stamp_cost",
|
||||||
|
8,
|
||||||
) # for direct delivery messages
|
) # for direct delivery messages
|
||||||
lxmf_propagation_node_stamp_cost = IntConfig(
|
lxmf_propagation_node_stamp_cost = IntConfig(
|
||||||
"lxmf_propagation_node_stamp_cost", 16,
|
"lxmf_propagation_node_stamp_cost",
|
||||||
|
16,
|
||||||
) # for propagation node messages
|
) # for propagation node messages
|
||||||
|
|
||||||
|
|
||||||
@@ -4944,8 +5055,8 @@ class NomadnetDownloader:
|
|||||||
self.path = path
|
self.path = path
|
||||||
self.data = data
|
self.data = data
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
self.on_download_success = on_download_success
|
self._download_success_callback = on_download_success
|
||||||
self.on_download_failure = on_download_failure
|
self._download_failure_callback = on_download_failure
|
||||||
self.on_progress_update = on_progress_update
|
self.on_progress_update = on_progress_update
|
||||||
self.request_receipt = None
|
self.request_receipt = None
|
||||||
self.is_cancelled = False
|
self.is_cancelled = False
|
||||||
@@ -4959,22 +5070,24 @@ class NomadnetDownloader:
|
|||||||
if self.request_receipt is not None:
|
if self.request_receipt is not None:
|
||||||
try:
|
try:
|
||||||
self.request_receipt.cancel()
|
self.request_receipt.cancel()
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# clean up the link if we created it
|
# clean up the link if we created it
|
||||||
if self.link is not None:
|
if self.link is not None:
|
||||||
try:
|
try:
|
||||||
self.link.teardown()
|
self.link.teardown()
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# notify that download was cancelled
|
# notify that download was cancelled
|
||||||
self.on_download_failure("cancelled")
|
self._download_failure_callback("cancelled")
|
||||||
|
|
||||||
# setup link to destination and request download
|
# setup link to destination and request download
|
||||||
async def download(
|
async def download(
|
||||||
self, path_lookup_timeout: int = 15, link_establishment_timeout: int = 15,
|
self,
|
||||||
|
path_lookup_timeout: int = 15,
|
||||||
|
link_establishment_timeout: int = 15,
|
||||||
):
|
):
|
||||||
# check if cancelled before starting
|
# check if cancelled before starting
|
||||||
if self.is_cancelled:
|
if self.is_cancelled:
|
||||||
@@ -5008,7 +5121,7 @@ class NomadnetDownloader:
|
|||||||
|
|
||||||
# if we still don't have a path, we can't establish a link, so bail out
|
# if we still don't have a path, we can't establish a link, so bail out
|
||||||
if not RNS.Transport.has_path(self.destination_hash):
|
if not RNS.Transport.has_path(self.destination_hash):
|
||||||
self.on_download_failure("Could not find path to destination.")
|
self._download_failure_callback("Could not find path to destination.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# check if cancelled before establishing link
|
# check if cancelled before establishing link
|
||||||
@@ -5044,7 +5157,7 @@ class NomadnetDownloader:
|
|||||||
|
|
||||||
# if we still haven't established a link, bail out
|
# if we still haven't established a link, bail out
|
||||||
if link.status is not RNS.Link.ACTIVE:
|
if link.status is not RNS.Link.ACTIVE:
|
||||||
self.on_download_failure("Could not establish link to destination.")
|
self._download_failure_callback("Could not establish link to destination.")
|
||||||
|
|
||||||
# link to destination was established, we should now request the download
|
# link to destination was established, we should now request the download
|
||||||
def link_established(self, link):
|
def link_established(self, link):
|
||||||
@@ -5067,11 +5180,11 @@ class NomadnetDownloader:
|
|||||||
|
|
||||||
# handle successful download
|
# handle successful download
|
||||||
def on_response(self, request_receipt: RNS.RequestReceipt):
|
def on_response(self, request_receipt: RNS.RequestReceipt):
|
||||||
self.on_download_success(request_receipt)
|
self._download_success_callback(request_receipt)
|
||||||
|
|
||||||
# handle failure
|
# handle failure
|
||||||
def on_failed(self, request_receipt=None):
|
def on_failed(self, request_receipt=None):
|
||||||
self.on_download_failure("request_failed")
|
self._download_failure_callback("request_failed")
|
||||||
|
|
||||||
# handle download progress
|
# handle download progress
|
||||||
def on_progress(self, request_receipt):
|
def on_progress(self, request_receipt):
|
||||||
@@ -5175,7 +5288,7 @@ class NomadnetFileDownloader(NomadnetDownloader):
|
|||||||
file_name: str = response[0]
|
file_name: str = response[0]
|
||||||
file_data: bytes = response[1]
|
file_data: bytes = response[1]
|
||||||
self.on_file_download_success(file_name, file_data)
|
self.on_file_download_success(file_name, file_data)
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
self.on_download_failure("unsupported_response")
|
self.on_download_failure("unsupported_response")
|
||||||
|
|
||||||
# page download failed, send error to provided callback
|
# page download failed, send error to provided callback
|
||||||
@@ -5241,7 +5354,8 @@ def main():
|
|||||||
help="Throws an exception. Used for testing the electron error dialog",
|
help="Throws an exception. Used for testing the electron error dialog",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"args", nargs=argparse.REMAINDER,
|
"args",
|
||||||
|
nargs=argparse.REMAINDER,
|
||||||
) # allow unknown command line args
|
) # allow unknown command line args
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -5313,7 +5427,9 @@ def main():
|
|||||||
|
|
||||||
# init app
|
# init app
|
||||||
reticulum_meshchat = ReticulumMeshChat(
|
reticulum_meshchat = ReticulumMeshChat(
|
||||||
identity, args.storage_dir, args.reticulum_config_dir,
|
identity,
|
||||||
|
args.storage_dir,
|
||||||
|
args.reticulum_config_dir,
|
||||||
)
|
)
|
||||||
reticulum_meshchat.run(args.host, args.port, launch_browser=args.headless is False)
|
reticulum_meshchat.run(args.host, args.port, launch_browser=args.headless is False)
|
||||||
|
|
||||||
1
meshchatx/src/backend/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Backend utilities shared by the Reticulum MeshChatX CLI."""
|
||||||
@@ -7,7 +7,11 @@ class AnnounceHandler:
|
|||||||
|
|
||||||
# we will just pass the received announce back to the provided callback
|
# we will just pass the received announce back to the provided callback
|
||||||
def received_announce(
|
def received_announce(
|
||||||
self, destination_hash, announced_identity, app_data, announce_packet_hash,
|
self,
|
||||||
|
destination_hash,
|
||||||
|
announced_identity,
|
||||||
|
app_data,
|
||||||
|
announce_packet_hash,
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
# handle received announce
|
# handle received announce
|
||||||
@@ -18,6 +22,6 @@ class AnnounceHandler:
|
|||||||
app_data,
|
app_data,
|
||||||
announce_packet_hash,
|
announce_packet_hash,
|
||||||
)
|
)
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# ignore failure to handle received announce
|
# ignore failure to handle received announce
|
||||||
pass
|
pass
|
||||||
@@ -146,7 +146,9 @@ class AudioCallManager:
|
|||||||
|
|
||||||
# attempts to initiate a call to the provided destination and returns the link hash on success
|
# attempts to initiate a call to the provided destination and returns the link hash on success
|
||||||
async def initiate(
|
async def initiate(
|
||||||
self, destination_hash: bytes, timeout_seconds: int = 15,
|
self,
|
||||||
|
destination_hash: bytes,
|
||||||
|
timeout_seconds: int = 15,
|
||||||
) -> AudioCall:
|
) -> AudioCall:
|
||||||
# determine when to timeout
|
# determine when to timeout
|
||||||
timeout_after_seconds = time.time() + timeout_seconds
|
timeout_after_seconds = time.time() + timeout_seconds
|
||||||
@@ -240,7 +242,7 @@ class AudioCallReceiver:
|
|||||||
)
|
)
|
||||||
link.teardown()
|
link.teardown()
|
||||||
return
|
return
|
||||||
except Exception: # noqa: E722
|
except Exception:
|
||||||
# if we can't get identity yet, we'll check later
|
# if we can't get identity yet, we'll check later
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -71,7 +71,8 @@ class WebsocketClientInterface(Interface):
|
|||||||
self.websocket.send(data)
|
self.websocket.send(data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
RNS.log(
|
RNS.log(
|
||||||
f"Exception occurred while transmitting via {self!s}", RNS.LOG_ERROR,
|
f"Exception occurred while transmitting via {self!s}",
|
||||||
|
RNS.LOG_ERROR,
|
||||||
)
|
)
|
||||||
RNS.log(f"The contained exception was: {e!s}", RNS.LOG_ERROR)
|
RNS.log(f"The contained exception was: {e!s}", RNS.LOG_ERROR)
|
||||||
return
|
return
|
||||||
@@ -93,7 +94,9 @@ class WebsocketClientInterface(Interface):
|
|||||||
try:
|
try:
|
||||||
RNS.log(f"Connecting to Websocket for {self!s}...", RNS.LOG_DEBUG)
|
RNS.log(f"Connecting to Websocket for {self!s}...", RNS.LOG_DEBUG)
|
||||||
self.websocket = connect(
|
self.websocket = connect(
|
||||||
f"{self.target_url}", max_size=None, compression=None,
|
f"{self.target_url}",
|
||||||
|
max_size=None,
|
||||||
|
compression=None,
|
||||||
)
|
)
|
||||||
RNS.log(f"Connected to Websocket for {self!s}", RNS.LOG_DEBUG)
|
RNS.log(f"Connected to Websocket for {self!s}", RNS.LOG_DEBUG)
|
||||||
self.read_loop()
|
self.read_loop()
|
||||||
@@ -3,9 +3,8 @@ import time
|
|||||||
|
|
||||||
import RNS
|
import RNS
|
||||||
from RNS.Interfaces.Interface import Interface
|
from RNS.Interfaces.Interface import Interface
|
||||||
from websockets.sync.server import Server, ServerConnection, serve
|
|
||||||
|
|
||||||
from src.backend.interfaces.WebsocketClientInterface import WebsocketClientInterface
|
from src.backend.interfaces.WebsocketClientInterface import WebsocketClientInterface
|
||||||
|
from websockets.sync.server import Server, ServerConnection, serve
|
||||||
|
|
||||||
|
|
||||||
class WebsocketServerInterface(Interface):
|
class WebsocketServerInterface(Interface):
|
||||||
1
meshchatx/src/backend/interfaces/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Shared transport interfaces for MeshChatX."""
|
||||||
|
Before Width: | Height: | Size: 109 KiB After Width: | Height: | Size: 109 KiB |
|
Before Width: | Height: | Size: 80 KiB After Width: | Height: | Size: 80 KiB |
|
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
|
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 8.2 KiB After Width: | Height: | Size: 8.2 KiB |
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |