1.0.0
This commit is contained in:
@@ -1,20 +1,20 @@
|
||||
name: Bearer PR Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
rule_check:
|
||||
runs-on: ubuntu-latest
|
||||
rule_check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Bearer
|
||||
uses: bearer/bearer-action@828eeb928ce2f4a7ca5ed57fb8b59508cb8c79bc # v2
|
||||
with:
|
||||
diff: true
|
||||
- name: Bearer
|
||||
uses: bearer/bearer-action@828eeb928ce2f4a7ca5ed57fb8b59508cb8c79bc # v2
|
||||
with:
|
||||
diff: true
|
||||
|
||||
@@ -1,343 +1,343 @@
|
||||
name: Build and Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_windows:
|
||||
description: 'Build Windows'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
build_mac:
|
||||
description: 'Build macOS'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
build_linux:
|
||||
description: 'Build Linux'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
build_docker:
|
||||
description: 'Build Docker'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_windows:
|
||||
description: "Build Windows"
|
||||
required: false
|
||||
default: "true"
|
||||
type: boolean
|
||||
build_mac:
|
||||
description: "Build macOS"
|
||||
required: false
|
||||
default: "true"
|
||||
type: boolean
|
||||
build_linux:
|
||||
description: "Build Linux"
|
||||
required: false
|
||||
default: "true"
|
||||
type: boolean
|
||||
build_docker:
|
||||
description: "Build Docker"
|
||||
required: false
|
||||
default: "true"
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build_frontend:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
build_frontend:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
|
||||
- name: Install NodeJS
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: 22
|
||||
- name: Install NodeJS
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Sync versions
|
||||
run: python scripts/sync_version.py
|
||||
- name: Sync versions
|
||||
run: python scripts/sync_version.py
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
run: pnpm install
|
||||
- name: Install NodeJS Deps
|
||||
run: pnpm install
|
||||
|
||||
- name: Build Frontend
|
||||
run: pnpm run build-frontend
|
||||
- name: Build Frontend
|
||||
run: pnpm run build-frontend
|
||||
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: meshchatx/public
|
||||
if-no-files-found: error
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: meshchatx/public
|
||||
if-no-files-found: error
|
||||
|
||||
build_desktop:
|
||||
name: Build Desktop (${{ matrix.name }})
|
||||
needs: build_frontend
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- name: windows
|
||||
os: windows-latest
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
||||
build_input: build_windows
|
||||
dist_script: dist-prebuilt
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: mac
|
||||
os: macos-14
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-mac-*.dmg"
|
||||
build_input: build_mac
|
||||
dist_script: dist:mac-universal
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: linux
|
||||
os: ubuntu-latest
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl"
|
||||
build_input: build_linux
|
||||
dist_script: dist-prebuilt
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: windows-legacy
|
||||
os: windows-latest
|
||||
node: 18
|
||||
python: "3.11"
|
||||
release_artifacts: "dist/*-win-installer*.exe,dist/*-win-portable*.exe"
|
||||
build_input: build_windows
|
||||
dist_script: dist-prebuilt
|
||||
variant: legacy
|
||||
electron_version: "30.0.8"
|
||||
- name: linux-legacy
|
||||
os: ubuntu-latest
|
||||
node: 18
|
||||
python: "3.11"
|
||||
release_artifacts: "dist/*-linux*.AppImage,dist/*-linux*.deb,python-dist/*.whl"
|
||||
build_input: build_linux
|
||||
dist_script: dist-prebuilt
|
||||
variant: legacy
|
||||
electron_version: "30.0.8"
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
build_desktop:
|
||||
name: Build Desktop (${{ matrix.name }})
|
||||
needs: build_frontend
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- name: windows
|
||||
os: windows-latest
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe"
|
||||
build_input: build_windows
|
||||
dist_script: dist-prebuilt
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: mac
|
||||
os: macos-14
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-mac-*.dmg"
|
||||
build_input: build_mac
|
||||
dist_script: dist:mac-universal
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: linux
|
||||
os: ubuntu-latest
|
||||
node: 22
|
||||
python: "3.13"
|
||||
release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl"
|
||||
build_input: build_linux
|
||||
dist_script: dist-prebuilt
|
||||
variant: standard
|
||||
electron_version: "39.2.4"
|
||||
- name: windows-legacy
|
||||
os: windows-latest
|
||||
node: 18
|
||||
python: "3.11"
|
||||
release_artifacts: "dist/*-win-installer*.exe,dist/*-win-portable*.exe"
|
||||
build_input: build_windows
|
||||
dist_script: dist-prebuilt
|
||||
variant: legacy
|
||||
electron_version: "30.0.8"
|
||||
- name: linux-legacy
|
||||
os: ubuntu-latest
|
||||
node: 18
|
||||
python: "3.11"
|
||||
release_artifacts: "dist/*-linux*.AppImage,dist/*-linux*.deb,python-dist/*.whl"
|
||||
build_input: build_linux
|
||||
dist_script: dist-prebuilt
|
||||
variant: legacy
|
||||
electron_version: "30.0.8"
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1
|
||||
|
||||
- name: Set legacy Electron version
|
||||
if: |
|
||||
matrix.variant == 'legacy' &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
shell: bash
|
||||
run: |
|
||||
node -e "const fs=require('fs');const pkg=require('./package.json');pkg.devDependencies.electron='${{ matrix.electron_version }}';fs.writeFileSync('package.json', JSON.stringify(pkg,null,2));"
|
||||
if [ -f pnpm-lock.yaml ]; then rm pnpm-lock.yaml; fi
|
||||
- name: Set legacy Electron version
|
||||
if: |
|
||||
matrix.variant == 'legacy' &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
shell: bash
|
||||
run: |
|
||||
node -e "const fs=require('fs');const pkg=require('./package.json');pkg.devDependencies.electron='${{ matrix.electron_version }}';fs.writeFileSync('package.json', JSON.stringify(pkg,null,2));"
|
||||
if [ -f pnpm-lock.yaml ]; then rm pnpm-lock.yaml; fi
|
||||
|
||||
- name: Install NodeJS
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Install NodeJS
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
|
||||
- name: Install Python
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install Python
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: Install Poetry
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python -m pip install --upgrade pip poetry
|
||||
- name: Install Poetry
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python -m pip install --upgrade pip poetry
|
||||
|
||||
- name: Sync versions
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python scripts/sync_version.py
|
||||
- name: Sync versions
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python scripts/sync_version.py
|
||||
|
||||
- name: Install Python Deps
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python -m poetry install
|
||||
- name: Install Python Deps
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python -m poetry install
|
||||
|
||||
- name: Install pnpm
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- name: Install pnpm
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Install NodeJS Deps
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: pnpm install
|
||||
- name: Install NodeJS Deps
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: pnpm install
|
||||
|
||||
- name: Prepare frontend directory
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python scripts/prepare_frontend_dir.py
|
||||
- name: Prepare frontend directory
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: python scripts/prepare_frontend_dir.py
|
||||
|
||||
- name: Download frontend artifact
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: meshchatx/public
|
||||
- name: Download frontend artifact
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
name: frontend-build
|
||||
path: meshchatx/public
|
||||
|
||||
- name: Install patchelf
|
||||
if: |
|
||||
startsWith(matrix.name, 'linux') &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: sudo apt-get update && sudo apt-get install -y patchelf
|
||||
- name: Install patchelf
|
||||
if: |
|
||||
startsWith(matrix.name, 'linux') &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: sudo apt-get update && sudo apt-get install -y patchelf
|
||||
|
||||
- name: Build Python wheel
|
||||
if: |
|
||||
startsWith(matrix.name, 'linux') &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: |
|
||||
python -m poetry build -f wheel
|
||||
mkdir -p python-dist
|
||||
mv dist/*.whl python-dist/
|
||||
rm -rf dist
|
||||
- name: Build Python wheel
|
||||
if: |
|
||||
startsWith(matrix.name, 'linux') &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: |
|
||||
python -m poetry build -f wheel
|
||||
mkdir -p python-dist
|
||||
mv dist/*.whl python-dist/
|
||||
rm -rf dist
|
||||
|
||||
- name: Build Electron App (Universal)
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: pnpm run ${{ matrix.dist_script }}
|
||||
- name: Build Electron App (Universal)
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
run: pnpm run ${{ matrix.dist_script }}
|
||||
|
||||
- name: Rename artifacts for legacy build
|
||||
if: |
|
||||
matrix.variant == 'legacy' &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: ./scripts/rename_legacy_artifacts.sh
|
||||
- name: Rename artifacts for legacy build
|
||||
if: |
|
||||
matrix.variant == 'legacy' &&
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true))
|
||||
run: ./scripts/rename_legacy_artifacts.sh
|
||||
|
||||
- name: Upload build artifacts
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ matrix.name }}
|
||||
path: |
|
||||
dist/*-win-installer*.exe
|
||||
dist/*-win-portable*.exe
|
||||
dist/*-mac-*.dmg
|
||||
dist/*-linux*.AppImage
|
||||
dist/*-linux*.deb
|
||||
python-dist/*.whl
|
||||
if-no-files-found: ignore
|
||||
- name: Upload build artifacts
|
||||
if: |
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ matrix.name }}
|
||||
path: |
|
||||
dist/*-win-installer*.exe
|
||||
dist/*-win-portable*.exe
|
||||
dist/*-mac-*.dmg
|
||||
dist/*-linux*.AppImage
|
||||
dist/*-linux*.deb
|
||||
python-dist/*.whl
|
||||
if-no-files-found: ignore
|
||||
|
||||
create_release:
|
||||
name: Create Release
|
||||
needs: build_desktop
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
path: artifacts
|
||||
create_release:
|
||||
name: Create Release
|
||||
needs: build_desktop
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Display structure of downloaded files
|
||||
run: ls -R artifacts
|
||||
- name: Display structure of downloaded files
|
||||
run: ls -R artifacts
|
||||
|
||||
- name: Prepare release assets
|
||||
run: |
|
||||
mkdir -p release-assets
|
||||
find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \;
|
||||
ls -lh release-assets/
|
||||
- name: Prepare release assets
|
||||
run: |
|
||||
mkdir -p release-assets
|
||||
find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \;
|
||||
ls -lh release-assets/
|
||||
|
||||
- name: Generate SHA256 checksums
|
||||
run: |
|
||||
cd release-assets
|
||||
echo "## SHA256 Checksums" > release-body.md
|
||||
echo "" >> release-body.md
|
||||
- name: Generate SHA256 checksums
|
||||
run: |
|
||||
cd release-assets
|
||||
echo "## SHA256 Checksums" > release-body.md
|
||||
echo "" >> release-body.md
|
||||
|
||||
for file in *.exe *.dmg *.AppImage *.deb *.whl; do
|
||||
if [ -f "$file" ]; then
|
||||
sha256sum "$file" | tee "${file}.sha256"
|
||||
echo "\`$(cat "${file}.sha256")\`" >> release-body.md
|
||||
fi
|
||||
done
|
||||
|
||||
echo "" >> release-body.md
|
||||
echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md
|
||||
|
||||
cat release-body.md
|
||||
echo ""
|
||||
echo "Generated .sha256 files:"
|
||||
ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found"
|
||||
for file in *.exe *.dmg *.AppImage *.deb *.whl; do
|
||||
if [ -f "$file" ]; then
|
||||
sha256sum "$file" | tee "${file}.sha256"
|
||||
echo "\`$(cat "${file}.sha256")\`" >> release-body.md
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Create Release
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||
with:
|
||||
draft: true
|
||||
artifacts: "release-assets/*"
|
||||
bodyFile: "release-assets/release-body.md"
|
||||
echo "" >> release-body.md
|
||||
echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md
|
||||
|
||||
build_docker:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_docker == 'true')
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4
|
||||
cat release-body.md
|
||||
echo ""
|
||||
echo "Generated .sha256 files:"
|
||||
ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found"
|
||||
|
||||
- name: Set lowercase repository owner
|
||||
run: echo "REPO_OWNER_LC=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_ENV
|
||||
- name: Create Release
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
|
||||
with:
|
||||
draft: true
|
||||
artifacts: "release-assets/*"
|
||||
bodyFile: "release-assets/release-body.md"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3
|
||||
build_docker:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_docker == 'true')
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Clone Repo
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
- name: Set lowercase repository owner
|
||||
run: echo "REPO_OWNER_LC=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to the GitHub Container registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3
|
||||
|
||||
- name: Build and push Docker images
|
||||
uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: >-
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:latest,
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:${{ github.ref_name }}
|
||||
labels: >-
|
||||
org.opencontainers.image.title=Reticulum MeshChatX,
|
||||
org.opencontainers.image.description=Docker image for Reticulum MeshChatX,
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}/pkgs/container/reticulum-meshchatx/
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
|
||||
- name: Log in to the GitHub Container registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker images
|
||||
uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: >-
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:latest,
|
||||
ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:${{ github.ref_name }}
|
||||
labels: >-
|
||||
org.opencontainers.image.title=Reticulum MeshChatX,
|
||||
org.opencontainers.image.description=Docker image for Reticulum MeshChatX,
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}/pkgs/container/reticulum-meshchatx/
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
name: 'Dependency review'
|
||||
name: "Dependency review"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: ["master"]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 'Checkout repository'
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
steps:
|
||||
- name: "Checkout repository"
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4
|
||||
with:
|
||||
comment-summary-in-pr: always
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4
|
||||
with:
|
||||
comment-summary-in-pr: always
|
||||
|
||||
13
.prettierrc
13
.prettierrc
@@ -1,9 +1,8 @@
|
||||
{
|
||||
"semi": true,
|
||||
"tabWidth": 4,
|
||||
"singleQuote": false,
|
||||
"printWidth": 120,
|
||||
"trailingComma": "es5",
|
||||
"endOfLine": "auto"
|
||||
"semi": true,
|
||||
"tabWidth": 4,
|
||||
"singleQuote": false,
|
||||
"printWidth": 120,
|
||||
"trailingComma": "es5",
|
||||
"endOfLine": "auto"
|
||||
}
|
||||
|
||||
|
||||
54
README.md
54
README.md
@@ -2,11 +2,13 @@
|
||||
|
||||
A heavily customized and updated fork of [Reticulum MeshChat](https://github.com/liamcottle/reticulum-meshchat).
|
||||
|
||||
This project is seperate from the original Reticulum MeshChat project, and is not affiliated with the original project.
|
||||
|
||||
## Features of this Fork
|
||||
|
||||
### Major
|
||||
|
||||
- Full LXST support.
|
||||
- Full LXST support w/ custom voicemail support.
|
||||
- Map (w/ MBTiles support for offline)
|
||||
- Security improvements
|
||||
- Custom UI/UX
|
||||
@@ -27,12 +29,12 @@ A heavily customized and updated fork of [Reticulum MeshChat](https://github.com
|
||||
- [ ] Spam filter (based on keywords)
|
||||
- [ ] Multi-identity support.
|
||||
- [ ] TAK tool/integration
|
||||
- [ ] RNS Tunnel - tunnel your regular services over RNS to another MeshchatX user.
|
||||
- [ ] RNS Tunnel - tunnel your regular services over RNS to another MeshchatX user.
|
||||
- [ ] RNS Filesync - P2P file sync
|
||||
|
||||
## Usage
|
||||
|
||||
Check [releases](https://git.quad4.io/RNS-Things/reticulum-meshchatX/releases) for pre-built binaries or appimages.
|
||||
Check [releases](https://git.quad4.io/Ivan/MeshChatX/releases) for pre-built binaries or appimages.
|
||||
|
||||
## Building
|
||||
|
||||
@@ -47,28 +49,29 @@ You can run `task run` or `task develop` (a thin alias) to start the backend + f
|
||||
|
||||
### Available Tasks
|
||||
|
||||
| Task | Description |
|
||||
|------|-------------|
|
||||
| `task install` | Install all dependencies (syncs version, installs node modules and python deps) |
|
||||
| `task node_modules` | Install Node.js dependencies only |
|
||||
| `task python` | Install Python dependencies using Poetry only |
|
||||
| `task sync-version` | Sync version numbers across project files |
|
||||
| `task run` | Run the application |
|
||||
| `task develop` | Run the application in development mode (alias for `run`) |
|
||||
| `task build` | Build the application (frontend and backend) |
|
||||
| `task build-frontend` | Build only the frontend |
|
||||
| `task clean` | Clean build artifacts and dependencies |
|
||||
| `task wheel` | Build Python wheel package (outputs to `python-dist/`) |
|
||||
| `task build-appimage` | Build Linux AppImage |
|
||||
| `task build-exe` | Build Windows portable executable |
|
||||
| `task dist` | Build distribution (defaults to AppImage) |
|
||||
| `task electron-legacy` | Install legacy Electron version |
|
||||
| `task build-appimage-legacy` | Build Linux AppImage with legacy Electron version |
|
||||
| `task build-exe-legacy` | Build Windows portable executable with legacy Electron version |
|
||||
| `task build-docker` | Build Docker image using buildx |
|
||||
| `task run-docker` | Run Docker container using docker-compose |
|
||||
| Task | Description |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------- |
|
||||
| `task install` | Install all dependencies (syncs version, installs node modules and python deps) |
|
||||
| `task node_modules` | Install Node.js dependencies only |
|
||||
| `task python` | Install Python dependencies using Poetry only |
|
||||
| `task sync-version` | Sync version numbers across project files |
|
||||
| `task run` | Run the application |
|
||||
| `task develop` | Run the application in development mode (alias for `run`) |
|
||||
| `task build` | Build the application (frontend and backend) |
|
||||
| `task build-frontend` | Build only the frontend |
|
||||
| `task clean` | Clean build artifacts and dependencies |
|
||||
| `task wheel` | Build Python wheel package (outputs to `python-dist/`) |
|
||||
| `task build-appimage` | Build Linux AppImage |
|
||||
| `task build-exe` | Build Windows portable executable |
|
||||
| `task dist` | Build distribution (defaults to AppImage) |
|
||||
| `task electron-legacy` | Install legacy Electron version |
|
||||
| `task build-appimage-legacy` | Build Linux AppImage with legacy Electron version |
|
||||
| `task build-exe-legacy` | Build Windows portable executable with legacy Electron version |
|
||||
| `task build-docker` | Build Docker image using buildx |
|
||||
| `task run-docker` | Run Docker container using docker-compose |
|
||||
|
||||
All tasks support environment variable overrides. For example:
|
||||
|
||||
- `PYTHON=python3.12 task install`
|
||||
- `DOCKER_PLATFORMS=linux/amd64,linux/arm64 task build-docker`
|
||||
|
||||
@@ -130,11 +133,12 @@ The `cx_setup.py` script uses cx_Freeze for creating standalone executables (App
|
||||
|
||||
## Internationalization (i18n)
|
||||
|
||||
Multi-language support is in progress. We use `vue-i18n` for the frontend.
|
||||
Multi-language support is in progress. We use `vue-i18n` for the frontend.
|
||||
|
||||
Translation files are located in `meshchatx/src/frontend/locales/`.
|
||||
|
||||
Currently supported languages:
|
||||
|
||||
- English (Primary)
|
||||
- Russian
|
||||
- German
|
||||
- German
|
||||
|
||||
262
Taskfile.yml
262
Taskfile.yml
@@ -1,153 +1,153 @@
|
||||
version: '3'
|
||||
version: "3"
|
||||
|
||||
vars:
|
||||
PYTHON:
|
||||
sh: echo "${PYTHON:-python}"
|
||||
NPM:
|
||||
sh: echo "${NPM:-pnpm}"
|
||||
LEGACY_ELECTRON_VERSION:
|
||||
sh: echo "${LEGACY_ELECTRON_VERSION:-30.0.8}"
|
||||
DOCKER_COMPOSE_CMD:
|
||||
sh: echo "${DOCKER_COMPOSE_CMD:-docker compose}"
|
||||
DOCKER_COMPOSE_FILE:
|
||||
sh: echo "${DOCKER_COMPOSE_FILE:-docker-compose.yml}"
|
||||
DOCKER_IMAGE:
|
||||
sh: echo "${DOCKER_IMAGE:-reticulum-meshchatx:local}"
|
||||
DOCKER_BUILDER:
|
||||
sh: echo "${DOCKER_BUILDER:-meshchatx-builder}"
|
||||
DOCKER_PLATFORMS:
|
||||
sh: echo "${DOCKER_PLATFORMS:-linux/amd64}"
|
||||
DOCKER_BUILD_FLAGS:
|
||||
sh: echo "${DOCKER_BUILD_FLAGS:---load}"
|
||||
DOCKER_BUILD_ARGS:
|
||||
sh: echo "${DOCKER_BUILD_ARGS:-}"
|
||||
DOCKER_CONTEXT:
|
||||
sh: echo "${DOCKER_CONTEXT:-.}"
|
||||
DOCKERFILE:
|
||||
sh: echo "${DOCKERFILE:-Dockerfile}"
|
||||
PYTHON:
|
||||
sh: echo "${PYTHON:-python}"
|
||||
NPM:
|
||||
sh: echo "${NPM:-pnpm}"
|
||||
LEGACY_ELECTRON_VERSION:
|
||||
sh: echo "${LEGACY_ELECTRON_VERSION:-30.0.8}"
|
||||
DOCKER_COMPOSE_CMD:
|
||||
sh: echo "${DOCKER_COMPOSE_CMD:-docker compose}"
|
||||
DOCKER_COMPOSE_FILE:
|
||||
sh: echo "${DOCKER_COMPOSE_FILE:-docker-compose.yml}"
|
||||
DOCKER_IMAGE:
|
||||
sh: echo "${DOCKER_IMAGE:-reticulum-meshchatx:local}"
|
||||
DOCKER_BUILDER:
|
||||
sh: echo "${DOCKER_BUILDER:-meshchatx-builder}"
|
||||
DOCKER_PLATFORMS:
|
||||
sh: echo "${DOCKER_PLATFORMS:-linux/amd64}"
|
||||
DOCKER_BUILD_FLAGS:
|
||||
sh: echo "${DOCKER_BUILD_FLAGS:---load}"
|
||||
DOCKER_BUILD_ARGS:
|
||||
sh: echo "${DOCKER_BUILD_ARGS:-}"
|
||||
DOCKER_CONTEXT:
|
||||
sh: echo "${DOCKER_CONTEXT:-.}"
|
||||
DOCKERFILE:
|
||||
sh: echo "${DOCKERFILE:-Dockerfile}"
|
||||
|
||||
tasks:
|
||||
default:
|
||||
desc: Show available tasks
|
||||
cmds:
|
||||
- task --list
|
||||
default:
|
||||
desc: Show available tasks
|
||||
cmds:
|
||||
- task --list
|
||||
|
||||
install:
|
||||
desc: Install all dependencies (syncs version, installs node modules and python deps)
|
||||
deps: [sync-version, node_modules, python]
|
||||
install:
|
||||
desc: Install all dependencies (syncs version, installs node modules and python deps)
|
||||
deps: [sync-version, node_modules, python]
|
||||
|
||||
node_modules:
|
||||
desc: Install Node.js dependencies
|
||||
cmds:
|
||||
- '{{.NPM}} install'
|
||||
node_modules:
|
||||
desc: Install Node.js dependencies
|
||||
cmds:
|
||||
- "{{.NPM}} install"
|
||||
|
||||
python:
|
||||
desc: Install Python dependencies using Poetry
|
||||
cmds:
|
||||
- '{{.PYTHON}} -m poetry install'
|
||||
python:
|
||||
desc: Install Python dependencies using Poetry
|
||||
cmds:
|
||||
- "{{.PYTHON}} -m poetry install"
|
||||
|
||||
run:
|
||||
desc: Run the application
|
||||
deps: [install]
|
||||
cmds:
|
||||
- '{{.PYTHON}} -m poetry run meshchat'
|
||||
run:
|
||||
desc: Run the application
|
||||
deps: [install]
|
||||
cmds:
|
||||
- "{{.PYTHON}} -m poetry run meshchat"
|
||||
|
||||
develop:
|
||||
desc: Run the application in development mode
|
||||
cmds:
|
||||
- task: run
|
||||
develop:
|
||||
desc: Run the application in development mode
|
||||
cmds:
|
||||
- task: run
|
||||
|
||||
build:
|
||||
desc: Build the application (frontend and backend)
|
||||
deps: [install]
|
||||
cmds:
|
||||
- '{{.NPM}} run build'
|
||||
build:
|
||||
desc: Build the application (frontend and backend)
|
||||
deps: [install]
|
||||
cmds:
|
||||
- "{{.NPM}} run build"
|
||||
|
||||
build-frontend:
|
||||
desc: Build only the frontend
|
||||
deps: [node_modules]
|
||||
cmds:
|
||||
- '{{.NPM}} run build-frontend'
|
||||
build-frontend:
|
||||
desc: Build only the frontend
|
||||
deps: [node_modules]
|
||||
cmds:
|
||||
- "{{.NPM}} run build-frontend"
|
||||
|
||||
wheel:
|
||||
desc: Build Python wheel package
|
||||
deps: [install]
|
||||
cmds:
|
||||
- '{{.PYTHON}} -m poetry build -f wheel'
|
||||
- '{{.PYTHON}} scripts/move_wheels.py'
|
||||
wheel:
|
||||
desc: Build Python wheel package
|
||||
deps: [install]
|
||||
cmds:
|
||||
- "{{.PYTHON}} -m poetry build -f wheel"
|
||||
- "{{.PYTHON}} scripts/move_wheels.py"
|
||||
|
||||
build-appimage:
|
||||
desc: Build Linux AppImage
|
||||
deps: [build]
|
||||
cmds:
|
||||
- '{{.NPM}} run electron-postinstall'
|
||||
- '{{.NPM}} run dist -- --linux AppImage'
|
||||
build-appimage:
|
||||
desc: Build Linux AppImage
|
||||
deps: [build]
|
||||
cmds:
|
||||
- "{{.NPM}} run electron-postinstall"
|
||||
- "{{.NPM}} run dist -- --linux AppImage"
|
||||
|
||||
build-exe:
|
||||
desc: Build Windows portable executable
|
||||
deps: [build]
|
||||
cmds:
|
||||
- '{{.NPM}} run electron-postinstall'
|
||||
- '{{.NPM}} run dist -- --win portable'
|
||||
build-exe:
|
||||
desc: Build Windows portable executable
|
||||
deps: [build]
|
||||
cmds:
|
||||
- "{{.NPM}} run electron-postinstall"
|
||||
- "{{.NPM}} run dist -- --win portable"
|
||||
|
||||
dist:
|
||||
desc: Build distribution (defaults to AppImage)
|
||||
cmds:
|
||||
- task: build-appimage
|
||||
dist:
|
||||
desc: Build distribution (defaults to AppImage)
|
||||
cmds:
|
||||
- task: build-appimage
|
||||
|
||||
electron-legacy:
|
||||
desc: Install legacy Electron version
|
||||
cmds:
|
||||
- '{{.NPM}} install --no-save electron@{{.LEGACY_ELECTRON_VERSION}}'
|
||||
electron-legacy:
|
||||
desc: Install legacy Electron version
|
||||
cmds:
|
||||
- "{{.NPM}} install --no-save electron@{{.LEGACY_ELECTRON_VERSION}}"
|
||||
|
||||
build-appimage-legacy:
|
||||
desc: Build Linux AppImage with legacy Electron version
|
||||
deps: [build, electron-legacy]
|
||||
cmds:
|
||||
- '{{.NPM}} run electron-postinstall'
|
||||
- '{{.NPM}} run dist -- --linux AppImage'
|
||||
- './scripts/rename_legacy_artifacts.sh'
|
||||
build-appimage-legacy:
|
||||
desc: Build Linux AppImage with legacy Electron version
|
||||
deps: [build, electron-legacy]
|
||||
cmds:
|
||||
- "{{.NPM}} run electron-postinstall"
|
||||
- "{{.NPM}} run dist -- --linux AppImage"
|
||||
- "./scripts/rename_legacy_artifacts.sh"
|
||||
|
||||
build-exe-legacy:
|
||||
desc: Build Windows portable executable with legacy Electron version
|
||||
deps: [build, electron-legacy]
|
||||
cmds:
|
||||
- '{{.NPM}} run electron-postinstall'
|
||||
- '{{.NPM}} run dist -- --win portable'
|
||||
- './scripts/rename_legacy_artifacts.sh'
|
||||
build-exe-legacy:
|
||||
desc: Build Windows portable executable with legacy Electron version
|
||||
deps: [build, electron-legacy]
|
||||
cmds:
|
||||
- "{{.NPM}} run electron-postinstall"
|
||||
- "{{.NPM}} run dist -- --win portable"
|
||||
- "./scripts/rename_legacy_artifacts.sh"
|
||||
|
||||
clean:
|
||||
desc: Clean build artifacts and dependencies
|
||||
cmds:
|
||||
- rm -rf node_modules
|
||||
- rm -rf build
|
||||
- rm -rf dist
|
||||
- rm -rf python-dist
|
||||
- rm -rf meshchatx/public
|
||||
clean:
|
||||
desc: Clean build artifacts and dependencies
|
||||
cmds:
|
||||
- rm -rf node_modules
|
||||
- rm -rf build
|
||||
- rm -rf dist
|
||||
- rm -rf python-dist
|
||||
- rm -rf meshchatx/public
|
||||
|
||||
sync-version:
|
||||
desc: Sync version numbers across project files
|
||||
cmds:
|
||||
- '{{.PYTHON}} scripts/sync_version.py'
|
||||
sync-version:
|
||||
desc: Sync version numbers across project files
|
||||
cmds:
|
||||
- "{{.PYTHON}} scripts/sync_version.py"
|
||||
|
||||
build-docker:
|
||||
desc: Build Docker image using buildx
|
||||
cmds:
|
||||
- |
|
||||
if ! docker buildx inspect {{.DOCKER_BUILDER}} >/dev/null 2>&1; then
|
||||
docker buildx create --name {{.DOCKER_BUILDER}} --use >/dev/null
|
||||
else
|
||||
docker buildx use {{.DOCKER_BUILDER}}
|
||||
fi
|
||||
- |
|
||||
docker buildx build --builder {{.DOCKER_BUILDER}} --platform {{.DOCKER_PLATFORMS}} \
|
||||
{{.DOCKER_BUILD_FLAGS}} \
|
||||
-t {{.DOCKER_IMAGE}} \
|
||||
{{.DOCKER_BUILD_ARGS}} \
|
||||
-f {{.DOCKERFILE}} \
|
||||
{{.DOCKER_CONTEXT}}
|
||||
build-docker:
|
||||
desc: Build Docker image using buildx
|
||||
cmds:
|
||||
- |
|
||||
if ! docker buildx inspect {{.DOCKER_BUILDER}} >/dev/null 2>&1; then
|
||||
docker buildx create --name {{.DOCKER_BUILDER}} --use >/dev/null
|
||||
else
|
||||
docker buildx use {{.DOCKER_BUILDER}}
|
||||
fi
|
||||
- |
|
||||
docker buildx build --builder {{.DOCKER_BUILDER}} --platform {{.DOCKER_PLATFORMS}} \
|
||||
{{.DOCKER_BUILD_FLAGS}} \
|
||||
-t {{.DOCKER_IMAGE}} \
|
||||
{{.DOCKER_BUILD_ARGS}} \
|
||||
-f {{.DOCKERFILE}} \
|
||||
{{.DOCKER_CONTEXT}}
|
||||
|
||||
run-docker:
|
||||
desc: Run Docker container using docker-compose
|
||||
cmds:
|
||||
- 'MESHCHAT_IMAGE="{{.DOCKER_IMAGE}}" {{.DOCKER_COMPOSE_CMD}} -f {{.DOCKER_COMPOSE_FILE}} up --remove-orphans --pull never reticulum-meshchatx'
|
||||
run-docker:
|
||||
desc: Run Docker container using docker-compose
|
||||
cmds:
|
||||
- 'MESHCHAT_IMAGE="{{.DOCKER_IMAGE}}" {{.DOCKER_COMPOSE_CMD}} -f {{.DOCKER_COMPOSE_FILE}} up --remove-orphans --pull never reticulum-meshchatx'
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
services:
|
||||
reticulum-meshchatx:
|
||||
container_name: reticulum-meshchatx
|
||||
image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest}
|
||||
pull_policy: always
|
||||
restart: unless-stopped
|
||||
# Make the meshchat web interface accessible from the host on port 8000
|
||||
ports:
|
||||
- 127.0.0.1:8000:8000
|
||||
volumes:
|
||||
- meshchat-config:/config
|
||||
# Uncomment if you have a USB device connected, such as an RNode
|
||||
# devices:
|
||||
# - /dev/ttyUSB0:/dev/ttyUSB0
|
||||
reticulum-meshchatx:
|
||||
container_name: reticulum-meshchatx
|
||||
image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest}
|
||||
pull_policy: always
|
||||
restart: unless-stopped
|
||||
# Make the meshchat web interface accessible from the host on port 8000
|
||||
ports:
|
||||
- 127.0.0.1:8000:8000
|
||||
volumes:
|
||||
- meshchat-config:/config
|
||||
# Uncomment if you have a USB device connected, such as an RNode
|
||||
# devices:
|
||||
# - /dev/ttyUSB0:/dev/ttyUSB0
|
||||
|
||||
volumes:
|
||||
meshchat-config:
|
||||
|
||||
@@ -96,4 +96,4 @@ sudo systemctl status reticulum-meshchat.service
|
||||
|
||||
You should now be able to access MeshChat via your Pi's IP address.
|
||||
|
||||
> Note: Don't forget to include the default port `8000`
|
||||
> Note: Don't forget to include the default port `8000`
|
||||
|
||||
@@ -1,161 +1,205 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||
<meta name="color-scheme" content="light dark">
|
||||
<title>MeshChatX</title>
|
||||
<script src="./assets/js/tailwindcss/tailwind-v3.4.3-forms-v0.5.7.js"></script>
|
||||
</head>
|
||||
<body class="min-h-screen bg-slate-100 text-gray-900 antialiased dark:bg-zinc-950 dark:text-zinc-50 transition-colors">
|
||||
|
||||
<div class="absolute inset-0 -z-10 overflow-hidden">
|
||||
<div class="absolute -left-32 -top-40 h-80 w-80 rounded-full bg-gradient-to-br from-blue-500/30 via-indigo-500/20 to-purple-500/30 blur-3xl dark:from-blue-600/25 dark:via-indigo-600/25 dark:to-purple-600/25"></div>
|
||||
<div class="absolute -right-24 top-20 h-64 w-64 rounded-full bg-gradient-to-br from-emerald-400/30 via-cyan-500/20 to-blue-500/30 blur-3xl dark:from-emerald-500/25 dark:via-cyan-500/25 dark:to-blue-500/25"></div>
|
||||
</div>
|
||||
|
||||
<main class="relative flex min-h-screen items-center justify-center px-6 py-10">
|
||||
<div class="w-full max-w-xl">
|
||||
<div class="rounded-3xl border border-slate-200/80 bg-white/80 shadow-2xl backdrop-blur-xl ring-1 ring-white/60 dark:border-zinc-800/70 dark:bg-zinc-900/70 dark:ring-zinc-800/70 transition-colors">
|
||||
<div class="p-8 space-y-6">
|
||||
|
||||
<div class="flex items-center gap-4">
|
||||
<div class="flex h-16 w-16 items-center justify-center rounded-2xl bg-gradient-to-br from-blue-500 via-indigo-500 to-purple-500 shadow-lg ring-4 ring-white/60 dark:ring-zinc-800/70">
|
||||
<img class="h-10 w-10 object-contain" src="./assets/images/logo.png" alt="MeshChatX logo">
|
||||
</div>
|
||||
<div class="space-y-1">
|
||||
<p class="text-xs uppercase tracking-[0.2em] text-blue-600 dark:text-blue-300">MeshChatX</p>
|
||||
<div class="text-2xl font-semibold tracking-tight text-gray-900 dark:text-white">MeshChatX</div>
|
||||
<div class="text-sm text-gray-600 dark:text-gray-300">Custom fork by Sudo-Ivan</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between rounded-2xl border border-dashed border-slate-200/90 bg-slate-50/70 px-4 py-3 text-sm text-gray-700 dark:border-zinc-800/80 dark:bg-zinc-900/70 dark:text-gray-200 transition-colors">
|
||||
<div class="flex items-center gap-2">
|
||||
<span class="h-2 w-2 rounded-full bg-blue-500 animate-pulse"></span>
|
||||
<span>Preparing your node</span>
|
||||
</div>
|
||||
<div class="inline-flex items-center gap-2 rounded-full bg-blue-100/80 px-3 py-1 text-xs font-semibold text-blue-700 shadow-sm dark:bg-blue-900/50 dark:text-blue-200">
|
||||
<span class="h-2 w-2 rounded-full bg-blue-500"></span>
|
||||
<span id="status-text">Starting services</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center gap-4">
|
||||
<div class="relative inline-flex h-14 w-14 items-center justify-center">
|
||||
<span class="absolute inset-0 rounded-full border-4 border-blue-500/25 dark:border-blue-500/20"></span>
|
||||
<span class="absolute inset-0 animate-spin rounded-full border-4 border-transparent border-t-blue-500 dark:border-t-blue-400"></span>
|
||||
<span class="absolute inset-2 rounded-full bg-blue-500/10 dark:bg-blue-500/15"></span>
|
||||
</div>
|
||||
<div class="flex-1 space-y-1">
|
||||
<div class="text-base font-medium text-gray-900 dark:text-white">Loading services</div>
|
||||
<div class="text-sm text-gray-600 dark:text-gray-400">Waiting for the MeshChatX API to come online.</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-2 gap-4 text-sm">
|
||||
<div class="rounded-2xl border border-slate-200/90 bg-white/70 p-4 dark:border-zinc-800/80 dark:bg-zinc-900/70 transition-colors">
|
||||
<div class="text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400">Version</div>
|
||||
<div class="mt-1 text-lg font-semibold text-gray-900 dark:text-white" id="app-version">v0.0.0</div>
|
||||
</div>
|
||||
<div class="rounded-2xl border border-slate-200/90 bg-white/70 p-4 text-right dark:border-zinc-800/80 dark:bg-zinc-900/70 transition-colors">
|
||||
<div class="text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400">Status</div>
|
||||
<div class="mt-1 text-lg font-semibold text-emerald-600 dark:text-emerald-300" id="status-badge">Booting</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" />
|
||||
<meta name="color-scheme" content="light dark" />
|
||||
<title>MeshChatX</title>
|
||||
<script src="./assets/js/tailwindcss/tailwind-v3.4.3-forms-v0.5.7.js"></script>
|
||||
</head>
|
||||
<body
|
||||
class="min-h-screen bg-slate-100 text-gray-900 antialiased dark:bg-zinc-950 dark:text-zinc-50 transition-colors"
|
||||
>
|
||||
<div class="absolute inset-0 -z-10 overflow-hidden">
|
||||
<div
|
||||
class="absolute -left-32 -top-40 h-80 w-80 rounded-full bg-gradient-to-br from-blue-500/30 via-indigo-500/20 to-purple-500/30 blur-3xl dark:from-blue-600/25 dark:via-indigo-600/25 dark:to-purple-600/25"
|
||||
></div>
|
||||
<div
|
||||
class="absolute -right-24 top-20 h-64 w-64 rounded-full bg-gradient-to-br from-emerald-400/30 via-cyan-500/20 to-blue-500/30 blur-3xl dark:from-emerald-500/25 dark:via-cyan-500/25 dark:to-blue-500/25"
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<script>
|
||||
const statusText = document.getElementById("status-text");
|
||||
const statusBadge = document.getElementById("status-badge");
|
||||
<main class="relative flex min-h-screen items-center justify-center px-6 py-10">
|
||||
<div class="w-full max-w-xl">
|
||||
<div
|
||||
class="rounded-3xl border border-slate-200/80 bg-white/80 shadow-2xl backdrop-blur-xl ring-1 ring-white/60 dark:border-zinc-800/70 dark:bg-zinc-900/70 dark:ring-zinc-800/70 transition-colors"
|
||||
>
|
||||
<div class="p-8 space-y-6">
|
||||
<div class="flex items-center gap-4">
|
||||
<div
|
||||
class="flex h-16 w-16 items-center justify-center rounded-2xl bg-gradient-to-br from-blue-500 via-indigo-500 to-purple-500 shadow-lg ring-4 ring-white/60 dark:ring-zinc-800/70"
|
||||
>
|
||||
<img
|
||||
class="h-10 w-10 object-contain"
|
||||
src="./assets/images/logo.png"
|
||||
alt="MeshChatX logo"
|
||||
/>
|
||||
</div>
|
||||
<div class="space-y-1">
|
||||
<p class="text-xs uppercase tracking-[0.2em] text-blue-600 dark:text-blue-300">
|
||||
MeshChatX
|
||||
</p>
|
||||
<div class="text-2xl font-semibold tracking-tight text-gray-900 dark:text-white">
|
||||
MeshChatX
|
||||
</div>
|
||||
<div class="text-sm text-gray-600 dark:text-gray-300">Custom fork by Sudo-Ivan</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
applyTheme(detectPreferredTheme());
|
||||
showAppVersion();
|
||||
check();
|
||||
listenForSystemThemeChanges();
|
||||
<div
|
||||
class="flex items-center justify-between rounded-2xl border border-dashed border-slate-200/90 bg-slate-50/70 px-4 py-3 text-sm text-gray-700 dark:border-zinc-800/80 dark:bg-zinc-900/70 dark:text-gray-200 transition-colors"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<span class="h-2 w-2 rounded-full bg-blue-500 animate-pulse"></span>
|
||||
<span>Preparing your node</span>
|
||||
</div>
|
||||
<div
|
||||
class="inline-flex items-center gap-2 rounded-full bg-blue-100/80 px-3 py-1 text-xs font-semibold text-blue-700 shadow-sm dark:bg-blue-900/50 dark:text-blue-200"
|
||||
>
|
||||
<span class="h-2 w-2 rounded-full bg-blue-500"></span>
|
||||
<span id="status-text">Starting services</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
async function showAppVersion() {
|
||||
const appVersion = await window.electron.appVersion();
|
||||
document.getElementById("app-version").innerText = "v" + appVersion;
|
||||
}
|
||||
<div class="flex items-center gap-4">
|
||||
<div class="relative inline-flex h-14 w-14 items-center justify-center">
|
||||
<span
|
||||
class="absolute inset-0 rounded-full border-4 border-blue-500/25 dark:border-blue-500/20"
|
||||
></span>
|
||||
<span
|
||||
class="absolute inset-0 animate-spin rounded-full border-4 border-transparent border-t-blue-500 dark:border-t-blue-400"
|
||||
></span>
|
||||
<span class="absolute inset-2 rounded-full bg-blue-500/10 dark:bg-blue-500/15"></span>
|
||||
</div>
|
||||
<div class="flex-1 space-y-1">
|
||||
<div class="text-base font-medium text-gray-900 dark:text-white">Loading services</div>
|
||||
<div class="text-sm text-gray-600 dark:text-gray-400">
|
||||
Waiting for the MeshChatX API to come online.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
function detectPreferredTheme() {
|
||||
try {
|
||||
const storedTheme = localStorage.getItem("meshchat.theme") || localStorage.getItem("meshchatx.theme");
|
||||
if (storedTheme === "dark" || storedTheme === "light") {
|
||||
return storedTheme;
|
||||
<div class="grid grid-cols-2 gap-4 text-sm">
|
||||
<div
|
||||
class="rounded-2xl border border-slate-200/90 bg-white/70 p-4 dark:border-zinc-800/80 dark:bg-zinc-900/70 transition-colors"
|
||||
>
|
||||
<div class="text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400">
|
||||
Version
|
||||
</div>
|
||||
<div class="mt-1 text-lg font-semibold text-gray-900 dark:text-white" id="app-version">
|
||||
v0.0.0
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
class="rounded-2xl border border-slate-200/90 bg-white/70 p-4 text-right dark:border-zinc-800/80 dark:bg-zinc-900/70 transition-colors"
|
||||
>
|
||||
<div class="text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400">
|
||||
Status
|
||||
</div>
|
||||
<div
|
||||
class="mt-1 text-lg font-semibold text-emerald-600 dark:text-emerald-300"
|
||||
id="status-badge"
|
||||
>
|
||||
Booting
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<script>
|
||||
const statusText = document.getElementById("status-text");
|
||||
const statusBadge = document.getElementById("status-badge");
|
||||
|
||||
applyTheme(detectPreferredTheme());
|
||||
showAppVersion();
|
||||
check();
|
||||
listenForSystemThemeChanges();
|
||||
|
||||
async function showAppVersion() {
|
||||
const appVersion = await window.electron.appVersion();
|
||||
document.getElementById("app-version").innerText = "v" + appVersion;
|
||||
}
|
||||
} catch (e) {}
|
||||
return window.matchMedia && window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
|
||||
}
|
||||
|
||||
function applyTheme(theme) {
|
||||
const isDark = theme === "dark";
|
||||
document.documentElement.classList.toggle("dark", isDark);
|
||||
document.body.dataset.theme = isDark ? "dark" : "light";
|
||||
}
|
||||
function detectPreferredTheme() {
|
||||
try {
|
||||
const storedTheme =
|
||||
localStorage.getItem("meshchat.theme") || localStorage.getItem("meshchatx.theme");
|
||||
if (storedTheme === "dark" || storedTheme === "light") {
|
||||
return storedTheme;
|
||||
}
|
||||
} catch (e) {}
|
||||
return window.matchMedia && window.matchMedia("(prefers-color-scheme: dark)").matches
|
||||
? "dark"
|
||||
: "light";
|
||||
}
|
||||
|
||||
function listenForSystemThemeChanges() {
|
||||
if (!window.matchMedia) {
|
||||
return;
|
||||
}
|
||||
const media = window.matchMedia("(prefers-color-scheme: dark)");
|
||||
media.addEventListener("change", (event) => {
|
||||
applyTheme(event.matches ? "dark" : "light");
|
||||
});
|
||||
}
|
||||
function applyTheme(theme) {
|
||||
const isDark = theme === "dark";
|
||||
document.documentElement.classList.toggle("dark", isDark);
|
||||
document.body.dataset.theme = isDark ? "dark" : "light";
|
||||
}
|
||||
|
||||
let detectedProtocol = "http";
|
||||
|
||||
async function check() {
|
||||
const protocols = ["https", "http"];
|
||||
for (const protocol of protocols) {
|
||||
try {
|
||||
const result = await fetch(`${protocol}://localhost:9337/api/v1/status`, {
|
||||
cache: "no-store",
|
||||
});
|
||||
const status = result.status;
|
||||
const data = await result.json();
|
||||
if (status === 200 && data.status === "ok") {
|
||||
detectedProtocol = protocol;
|
||||
statusText.innerText = "Launching UI";
|
||||
statusBadge.innerText = "Ready";
|
||||
syncThemeFromConfig();
|
||||
setTimeout(onReady, 200);
|
||||
function listenForSystemThemeChanges() {
|
||||
if (!window.matchMedia) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
continue;
|
||||
const media = window.matchMedia("(prefers-color-scheme: dark)");
|
||||
media.addEventListener("change", (event) => {
|
||||
applyTheme(event.matches ? "dark" : "light");
|
||||
});
|
||||
}
|
||||
}
|
||||
setTimeout(check, 300);
|
||||
}
|
||||
|
||||
function onReady() {
|
||||
const timestamp = (new Date()).getTime();
|
||||
window.location.href = `${detectedProtocol}://localhost:9337/?nocache=${timestamp}`;
|
||||
}
|
||||
let detectedProtocol = "http";
|
||||
|
||||
async function syncThemeFromConfig() {
|
||||
try {
|
||||
const response = await fetch(`${detectedProtocol}://localhost:9337/api/v1/config`, { cache: "no-store" });
|
||||
if (!response.ok) {
|
||||
return;
|
||||
async function check() {
|
||||
const protocols = ["https", "http"];
|
||||
for (const protocol of protocols) {
|
||||
try {
|
||||
const result = await fetch(`${protocol}://localhost:9337/api/v1/status`, {
|
||||
cache: "no-store",
|
||||
});
|
||||
const status = result.status;
|
||||
const data = await result.json();
|
||||
if (status === 200 && data.status === "ok") {
|
||||
detectedProtocol = protocol;
|
||||
statusText.innerText = "Launching UI";
|
||||
statusBadge.innerText = "Ready";
|
||||
syncThemeFromConfig();
|
||||
setTimeout(onReady, 200);
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
setTimeout(check, 300);
|
||||
}
|
||||
const config = await response.json();
|
||||
if (config && (config.theme === "dark" || config.theme === "light")) {
|
||||
applyTheme(config.theme);
|
||||
|
||||
function onReady() {
|
||||
const timestamp = new Date().getTime();
|
||||
window.location.href = `${detectedProtocol}://localhost:9337/?nocache=${timestamp}`;
|
||||
}
|
||||
|
||||
async function syncThemeFromConfig() {
|
||||
try {
|
||||
localStorage.setItem("meshchat.theme", config.theme);
|
||||
const response = await fetch(`${detectedProtocol}://localhost:9337/api/v1/config`, {
|
||||
cache: "no-store",
|
||||
});
|
||||
if (!response.ok) {
|
||||
return;
|
||||
}
|
||||
const config = await response.json();
|
||||
if (config && (config.theme === "dark" || config.theme === "light")) {
|
||||
applyTheme(config.theme);
|
||||
try {
|
||||
localStorage.setItem("meshchat.theme", config.theme);
|
||||
} catch (e) {}
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
BIN
logo/icon.ico
BIN
logo/icon.ico
Binary file not shown.
|
Before Width: | Height: | Size: 127 KiB After Width: | Height: | Size: 110 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 109 KiB |
Binary file not shown.
BIN
logo/logo.png
BIN
logo/logo.png
Binary file not shown.
|
Before Width: | Height: | Size: 80 KiB After Width: | Height: | Size: 289 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 289 KiB |
@@ -63,6 +63,7 @@ from meshchatx.src.backend.rnstatus_handler import RNStatusHandler
|
||||
from meshchatx.src.backend.sideband_commands import SidebandCommands
|
||||
from meshchatx.src.backend.telephone_manager import TelephoneManager
|
||||
from meshchatx.src.backend.translator_handler import TranslatorHandler
|
||||
from meshchatx.src.backend.voicemail_manager import VoicemailManager
|
||||
from meshchatx.src.version import __version__ as app_version
|
||||
|
||||
|
||||
@@ -193,7 +194,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# init database
|
||||
self.database = Database(self.database_path)
|
||||
self.db = self.database # keep for compatibility with parts I haven't changed yet
|
||||
self.db = (
|
||||
self.database
|
||||
) # keep for compatibility with parts I haven't changed yet
|
||||
|
||||
try:
|
||||
self.database.initialize()
|
||||
@@ -218,7 +221,7 @@ class ReticulumMeshChat:
|
||||
self.announce_manager = AnnounceManager(self.database)
|
||||
self.archiver_manager = ArchiverManager(self.database)
|
||||
self.map_manager = MapManager(self.config, self.storage_dir)
|
||||
self.forwarding_manager = None # will init after lxmf router
|
||||
self.forwarding_manager = None # will init after lxmf router
|
||||
|
||||
# remember if authentication is enabled
|
||||
self.auth_enabled = auth_enabled or self.config.auth_enabled.get()
|
||||
@@ -327,6 +330,17 @@ class ReticulumMeshChat:
|
||||
)
|
||||
self.telephone_manager.init_telephone()
|
||||
|
||||
# init Voicemail Manager
|
||||
self.voicemail_manager = VoicemailManager(
|
||||
db=self.database,
|
||||
telephone_manager=self.telephone_manager,
|
||||
storage_dir=self.storage_path,
|
||||
)
|
||||
# Monkey patch VoicemailManager to use our get_name_for_identity_hash
|
||||
self.voicemail_manager.get_name_for_identity_hash = (
|
||||
self.get_name_for_identity_hash
|
||||
)
|
||||
|
||||
# init RNCP handler
|
||||
self.rncp_handler = RNCPHandler(
|
||||
reticulum_instance=self.reticulum,
|
||||
@@ -345,7 +359,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# init Translator handler
|
||||
libretranslate_url = self.config.get("libretranslate_url", None)
|
||||
self.translator_handler = TranslatorHandler(libretranslate_url=libretranslate_url)
|
||||
self.translator_handler = TranslatorHandler(
|
||||
libretranslate_url=libretranslate_url
|
||||
)
|
||||
|
||||
# start background thread for auto announce loop
|
||||
thread = threading.Thread(target=asyncio.run, args=(self.announce_loop(),))
|
||||
@@ -552,7 +568,8 @@ class ReticulumMeshChat:
|
||||
def backup_identity(self):
|
||||
identity_bytes = self._get_identity_bytes()
|
||||
target_path = self.identity_file_path or os.path.join(
|
||||
self.storage_dir, "identity",
|
||||
self.storage_dir,
|
||||
"identity",
|
||||
)
|
||||
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
||||
with open(target_path, "wb") as f:
|
||||
@@ -567,7 +584,8 @@ class ReticulumMeshChat:
|
||||
|
||||
def restore_identity_from_bytes(self, identity_bytes: bytes):
|
||||
target_path = self.identity_file_path or os.path.join(
|
||||
self.storage_dir, "identity",
|
||||
self.storage_dir,
|
||||
"identity",
|
||||
)
|
||||
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
||||
with open(target_path, "wb") as f:
|
||||
@@ -690,9 +708,13 @@ class ReticulumMeshChat:
|
||||
if self.config.crawler_enabled.get():
|
||||
# Proactively queue any known nodes from the database that haven't been queued yet
|
||||
# get known propagation nodes from database
|
||||
known_nodes = self.database.announces.get_announces(aspect="nomadnetwork.node")
|
||||
known_nodes = self.database.announces.get_announces(
|
||||
aspect="nomadnetwork.node"
|
||||
)
|
||||
for node in known_nodes:
|
||||
self.queue_crawler_task(node["destination_hash"], "/page/index.mu")
|
||||
self.queue_crawler_task(
|
||||
node["destination_hash"], "/page/index.mu"
|
||||
)
|
||||
|
||||
# process pending or failed tasks
|
||||
# ensure we handle potential string comparison issues in SQLite
|
||||
@@ -702,7 +724,9 @@ class ReticulumMeshChat:
|
||||
)
|
||||
|
||||
# process tasks concurrently up to the limit
|
||||
await asyncio.gather(*[self.process_crawler_task(task) for task in tasks])
|
||||
await asyncio.gather(
|
||||
*[self.process_crawler_task(task) for task in tasks]
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error in crawler loop: {e}")
|
||||
@@ -713,12 +737,16 @@ class ReticulumMeshChat:
|
||||
async def process_crawler_task(self, task):
|
||||
# mark as crawling
|
||||
task_id = task["id"]
|
||||
self.database.misc.update_crawl_task(task_id, status="crawling", last_retry_at=datetime.now(UTC))
|
||||
self.database.misc.update_crawl_task(
|
||||
task_id, status="crawling", last_retry_at=datetime.now(UTC)
|
||||
)
|
||||
|
||||
destination_hash = task["destination_hash"]
|
||||
page_path = task["page_path"]
|
||||
|
||||
print(f"Crawler: Archiving {destination_hash}:{page_path} (Attempt {task['retry_count'] + 1})")
|
||||
print(
|
||||
f"Crawler: Archiving {destination_hash}:{page_path} (Attempt {task['retry_count'] + 1})"
|
||||
)
|
||||
|
||||
# completion event
|
||||
done_event = asyncio.Event()
|
||||
@@ -762,17 +790,23 @@ class ReticulumMeshChat:
|
||||
|
||||
await download_task
|
||||
except Exception as e:
|
||||
print(f"Crawler: Error during download for {destination_hash}:{page_path}: {e}")
|
||||
print(
|
||||
f"Crawler: Error during download for {destination_hash}:{page_path}: {e}"
|
||||
)
|
||||
failure_reason[0] = str(e)
|
||||
done_event.set()
|
||||
|
||||
if success[0]:
|
||||
print(f"Crawler: Successfully archived {destination_hash}:{page_path}")
|
||||
self.archive_page(destination_hash, page_path, content_received[0], is_manual=False)
|
||||
self.archive_page(
|
||||
destination_hash, page_path, content_received[0], is_manual=False
|
||||
)
|
||||
task.status = "completed"
|
||||
task.save()
|
||||
else:
|
||||
print(f"Crawler: Failed to archive {destination_hash}:{page_path} - {failure_reason[0]}")
|
||||
print(
|
||||
f"Crawler: Failed to archive {destination_hash}:{page_path} - {failure_reason[0]}"
|
||||
)
|
||||
task.retry_count += 1
|
||||
task.status = "failed"
|
||||
|
||||
@@ -911,13 +945,17 @@ class ReticulumMeshChat:
|
||||
# returns the latest message for the provided destination hash
|
||||
def get_conversation_latest_message(self, destination_hash: str):
|
||||
local_hash = self.identity.hexhash
|
||||
messages = self.message_handler.get_conversation_messages(local_hash, destination_hash, limit=1)
|
||||
messages = self.message_handler.get_conversation_messages(
|
||||
local_hash, destination_hash, limit=1
|
||||
)
|
||||
return messages[0] if messages else None
|
||||
|
||||
# returns true if the conversation with the provided destination hash has any attachments
|
||||
def conversation_has_attachments(self, destination_hash: str):
|
||||
local_hash = self.identity.hexhash
|
||||
messages = self.message_handler.get_conversation_messages(local_hash, destination_hash)
|
||||
messages = self.message_handler.get_conversation_messages(
|
||||
local_hash, destination_hash
|
||||
)
|
||||
for message in messages:
|
||||
if self.message_fields_have_attachments(message["fields"]):
|
||||
return True
|
||||
@@ -957,9 +995,13 @@ class ReticulumMeshChat:
|
||||
matches.add(message["source_hash"])
|
||||
|
||||
# also check custom display names
|
||||
custom_names = self.database.announces.get_announces() # Or more specific if needed
|
||||
custom_names = (
|
||||
self.database.announces.get_announces()
|
||||
) # Or more specific if needed
|
||||
for announce in custom_names:
|
||||
custom_name = self.database.announces.get_custom_display_name(announce["destination_hash"])
|
||||
custom_name = self.database.announces.get_custom_display_name(
|
||||
announce["destination_hash"]
|
||||
)
|
||||
if custom_name and search_term.lower() in custom_name.lower():
|
||||
matches.add(announce["destination_hash"])
|
||||
|
||||
@@ -974,6 +1016,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# handle receiving a new audio call
|
||||
def on_incoming_telephone_call(self, caller_identity: RNS.Identity):
|
||||
# Trigger voicemail handling
|
||||
self.voicemail_manager.handle_incoming_call(caller_identity)
|
||||
|
||||
print(f"on_incoming_telephone_call: {caller_identity.hash.hex()}")
|
||||
AsyncUtils.run_async(
|
||||
self.websocket_broadcast(
|
||||
@@ -998,7 +1043,12 @@ class ReticulumMeshChat:
|
||||
)
|
||||
|
||||
def on_telephone_call_ended(self, caller_identity: RNS.Identity):
|
||||
print(f"on_telephone_call_ended: {caller_identity.hash.hex() if caller_identity else 'Unknown'}")
|
||||
# Stop voicemail recording if active
|
||||
self.voicemail_manager.stop_recording()
|
||||
|
||||
print(
|
||||
f"on_telephone_call_ended: {caller_identity.hash.hex() if caller_identity else 'Unknown'}"
|
||||
)
|
||||
|
||||
# Record call history
|
||||
if caller_identity:
|
||||
@@ -2474,7 +2524,9 @@ class ReticulumMeshChat:
|
||||
"remote_identity_hash": remote_identity_hash,
|
||||
"remote_identity_name": remote_identity_name,
|
||||
"audio_profile_id": self.telephone_manager.telephone.transmit_codec.profile
|
||||
if hasattr(self.telephone_manager.telephone.transmit_codec, "profile")
|
||||
if hasattr(
|
||||
self.telephone_manager.telephone.transmit_codec, "profile"
|
||||
)
|
||||
else None,
|
||||
"tx_packets": getattr(telephone_active_call, "tx", 0),
|
||||
"rx_packets": getattr(telephone_active_call, "rx", 0),
|
||||
@@ -2482,6 +2534,7 @@ class ReticulumMeshChat:
|
||||
"rx_bytes": getattr(telephone_active_call, "rxbytes", 0),
|
||||
"is_mic_muted": self.telephone_manager.telephone.transmit_muted,
|
||||
"is_speaker_muted": self.telephone_manager.telephone.receive_muted,
|
||||
"is_voicemail": self.voicemail_manager.is_recording,
|
||||
}
|
||||
|
||||
return web.json_response(
|
||||
@@ -2492,6 +2545,10 @@ class ReticulumMeshChat:
|
||||
"active_call": active_call,
|
||||
"is_mic_muted": self.telephone_manager.telephone.transmit_muted,
|
||||
"is_speaker_muted": self.telephone_manager.telephone.receive_muted,
|
||||
"voicemail": {
|
||||
"is_recording": self.voicemail_manager.is_recording,
|
||||
"unread_count": self.database.voicemails.get_unread_count(),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -2506,7 +2563,9 @@ class ReticulumMeshChat:
|
||||
caller_identity = active_call.get_remote_identity()
|
||||
|
||||
# answer call
|
||||
await asyncio.to_thread(self.telephone_manager.telephone.answer, caller_identity)
|
||||
await asyncio.to_thread(
|
||||
self.telephone_manager.telephone.answer, caller_identity
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -2563,9 +2622,12 @@ class ReticulumMeshChat:
|
||||
profile_id = request.match_info.get("profile_id")
|
||||
try:
|
||||
await asyncio.to_thread(
|
||||
self.telephone_manager.telephone.switch_profile, int(profile_id)
|
||||
self.telephone_manager.telephone.switch_profile,
|
||||
int(profile_id),
|
||||
)
|
||||
return web.json_response(
|
||||
{"message": f"Switched to profile {profile_id}"}
|
||||
)
|
||||
return web.json_response({"message": f"Switched to profile {profile_id}"})
|
||||
except Exception as e:
|
||||
return web.json_response({"message": str(e)}, status=500)
|
||||
|
||||
@@ -2602,9 +2664,11 @@ class ReticulumMeshChat:
|
||||
identity_hash_bytes = bytes.fromhex(announce["identity_hash"])
|
||||
|
||||
# calculate telephony destination hash
|
||||
telephony_destination_hash = RNS.Destination.hash_from_name_and_identity(
|
||||
f"{LXST.APP_NAME}.telephony",
|
||||
identity_hash_bytes,
|
||||
telephony_destination_hash = (
|
||||
RNS.Destination.hash_from_name_and_identity(
|
||||
f"{LXST.APP_NAME}.telephony",
|
||||
identity_hash_bytes,
|
||||
)
|
||||
)
|
||||
|
||||
# request path to telephony destination
|
||||
@@ -2673,6 +2737,83 @@ class ReticulumMeshChat:
|
||||
},
|
||||
)
|
||||
|
||||
# voicemail status
|
||||
@routes.get("/api/v1/telephone/voicemail/status")
|
||||
async def telephone_voicemail_status(request):
|
||||
return web.json_response(
|
||||
{
|
||||
"has_espeak": self.voicemail_manager.has_espeak,
|
||||
"has_ffmpeg": self.voicemail_manager.has_ffmpeg,
|
||||
"is_recording": self.voicemail_manager.is_recording,
|
||||
},
|
||||
)
|
||||
|
||||
# list voicemails
|
||||
@routes.get("/api/v1/telephone/voicemails")
|
||||
async def telephone_voicemails(request):
|
||||
limit = int(request.query.get("limit", 50))
|
||||
offset = int(request.query.get("offset", 0))
|
||||
voicemails = self.database.voicemails.get_voicemails(
|
||||
limit=limit, offset=offset
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
"voicemails": [dict(row) for row in voicemails],
|
||||
"unread_count": self.database.voicemails.get_unread_count(),
|
||||
},
|
||||
)
|
||||
|
||||
# mark voicemail as read
|
||||
@routes.post("/api/v1/telephone/voicemails/{id}/read")
|
||||
async def telephone_voicemail_mark_read(request):
|
||||
voicemail_id = request.match_info.get("id")
|
||||
self.database.voicemails.mark_as_read(voicemail_id)
|
||||
return web.json_response({"message": "Voicemail marked as read"})
|
||||
|
||||
# delete voicemail
|
||||
@routes.delete("/api/v1/telephone/voicemails/{id}")
|
||||
async def telephone_voicemail_delete(request):
|
||||
voicemail_id = request.match_info.get("id")
|
||||
voicemail = self.database.voicemails.get_voicemail(voicemail_id)
|
||||
if voicemail:
|
||||
filepath = os.path.join(
|
||||
self.voicemail_manager.recordings_dir, voicemail["filename"]
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
os.remove(filepath)
|
||||
self.database.voicemails.delete_voicemail(voicemail_id)
|
||||
return web.json_response({"message": "Voicemail deleted"})
|
||||
return web.json_response({"message": "Voicemail not found"}, status=404)
|
||||
|
||||
# serve voicemail audio
|
||||
@routes.get("/api/v1/telephone/voicemails/{id}/audio")
|
||||
async def telephone_voicemail_audio(request):
|
||||
voicemail_id = request.match_info.get("id")
|
||||
voicemail = self.database.voicemails.get_voicemail(voicemail_id)
|
||||
if voicemail:
|
||||
filepath = os.path.join(
|
||||
self.voicemail_manager.recordings_dir, voicemail["filename"]
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return web.FileResponse(filepath)
|
||||
return web.json_response(
|
||||
{"message": "Voicemail audio not found"}, status=404
|
||||
)
|
||||
|
||||
# generate greeting
|
||||
@routes.post("/api/v1/telephone/voicemail/generate-greeting")
|
||||
async def telephone_voicemail_generate_greeting(request):
|
||||
try:
|
||||
text = self.config.voicemail_greeting.get()
|
||||
path = await asyncio.to_thread(
|
||||
self.voicemail_manager.generate_greeting, text
|
||||
)
|
||||
return web.json_response(
|
||||
{"message": "Greeting generated", "path": path}
|
||||
)
|
||||
except Exception as e:
|
||||
return web.json_response({"message": str(e)}, status=500)
|
||||
|
||||
# announce
|
||||
@routes.get("/api/v1/announce")
|
||||
async def announce_trigger(request):
|
||||
@@ -2694,7 +2835,9 @@ class ReticulumMeshChat:
|
||||
search_query = request.query.get("search", None)
|
||||
limit = request.query.get("limit", None)
|
||||
offset = request.query.get("offset", None)
|
||||
include_blocked = request.query.get("include_blocked", "false").lower() == "true"
|
||||
include_blocked = (
|
||||
request.query.get("include_blocked", "false").lower() == "true"
|
||||
)
|
||||
|
||||
blocked_identity_hashes = None
|
||||
if not include_blocked:
|
||||
@@ -2721,7 +2864,8 @@ class ReticulumMeshChat:
|
||||
|
||||
# process announces
|
||||
announces = [
|
||||
self.convert_db_announce_to_dict(announce) for announce in paginated_results
|
||||
self.convert_db_announce_to_dict(announce)
|
||||
for announce in paginated_results
|
||||
]
|
||||
|
||||
return web.json_response(
|
||||
@@ -2742,8 +2886,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# process favourites
|
||||
favourites = [
|
||||
self.convert_db_favourite_to_dict(favourite)
|
||||
for favourite in results
|
||||
self.convert_db_favourite_to_dict(favourite) for favourite in results
|
||||
]
|
||||
|
||||
return web.json_response(
|
||||
@@ -2789,7 +2932,9 @@ class ReticulumMeshChat:
|
||||
)
|
||||
|
||||
# upsert favourite
|
||||
self.database.announces.upsert_favourite(destination_hash, display_name, aspect)
|
||||
self.database.announces.upsert_favourite(
|
||||
destination_hash, display_name, aspect
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
"message": "Favourite has been added!",
|
||||
@@ -2808,7 +2953,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# update display name if provided
|
||||
if len(display_name) > 0:
|
||||
self.database.announces.upsert_custom_display_name(destination_hash, display_name)
|
||||
self.database.announces.upsert_custom_display_name(
|
||||
destination_hash, display_name
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -2853,31 +3000,43 @@ class ReticulumMeshChat:
|
||||
archives = []
|
||||
for archive in archives_results:
|
||||
# find node name from announces or custom display names
|
||||
node_name = self.get_custom_destination_display_name(archive["destination_hash"])
|
||||
node_name = self.get_custom_destination_display_name(
|
||||
archive["destination_hash"]
|
||||
)
|
||||
if not node_name:
|
||||
db_announce = self.database.announces.get_announce_by_hash(archive["destination_hash"])
|
||||
db_announce = self.database.announces.get_announce_by_hash(
|
||||
archive["destination_hash"]
|
||||
)
|
||||
if db_announce and db_announce["aspect"] == "nomadnetwork.node":
|
||||
node_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(db_announce["app_data"])
|
||||
node_name = (
|
||||
ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
db_announce["app_data"]
|
||||
)
|
||||
)
|
||||
|
||||
archives.append({
|
||||
"id": archive["id"],
|
||||
"destination_hash": archive["destination_hash"],
|
||||
"node_name": node_name or "Unknown Node",
|
||||
"page_path": archive["page_path"],
|
||||
"content": archive["content"],
|
||||
"hash": archive["hash"],
|
||||
"created_at": archive["created_at"],
|
||||
})
|
||||
archives.append(
|
||||
{
|
||||
"id": archive["id"],
|
||||
"destination_hash": archive["destination_hash"],
|
||||
"node_name": node_name or "Unknown Node",
|
||||
"page_path": archive["page_path"],
|
||||
"content": archive["content"],
|
||||
"hash": archive["hash"],
|
||||
"created_at": archive["created_at"],
|
||||
}
|
||||
)
|
||||
|
||||
return web.json_response({
|
||||
"archives": archives,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total_count": total_count,
|
||||
"total_pages": total_pages,
|
||||
},
|
||||
})
|
||||
return web.json_response(
|
||||
{
|
||||
"archives": archives,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total_count": total_count,
|
||||
"total_pages": total_pages,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@routes.get("/api/v1/lxmf/propagation-node/status")
|
||||
async def propagation_node_status(request):
|
||||
@@ -2937,7 +3096,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# limit results
|
||||
if limit is not None:
|
||||
results = results[:int(limit)]
|
||||
results = results[: int(limit)]
|
||||
|
||||
# process announces
|
||||
lxmf_propagation_nodes = []
|
||||
@@ -2947,14 +3106,20 @@ class ReticulumMeshChat:
|
||||
aspect="lxmf.delivery",
|
||||
identity_hash=announce["identity_hash"],
|
||||
)
|
||||
lxmf_delivery_announce = lxmf_delivery_results[0] if lxmf_delivery_results else None
|
||||
lxmf_delivery_announce = (
|
||||
lxmf_delivery_results[0] if lxmf_delivery_results else None
|
||||
)
|
||||
|
||||
# find a nomadnetwork.node announce for the same identity hash, so we can use that as an "operated by" name
|
||||
nomadnetwork_node_results = self.database.announces.get_filtered_announces(
|
||||
aspect="nomadnetwork.node",
|
||||
identity_hash=announce["identity_hash"],
|
||||
nomadnetwork_node_results = (
|
||||
self.database.announces.get_filtered_announces(
|
||||
aspect="nomadnetwork.node",
|
||||
identity_hash=announce["identity_hash"],
|
||||
)
|
||||
)
|
||||
nomadnetwork_node_announce = (
|
||||
nomadnetwork_node_results[0] if nomadnetwork_node_results else None
|
||||
)
|
||||
nomadnetwork_node_announce = nomadnetwork_node_results[0] if nomadnetwork_node_results else None
|
||||
|
||||
# get a display name from other announces belonging to the propagation nodes identity
|
||||
operator_display_name = None
|
||||
@@ -2970,9 +3135,11 @@ class ReticulumMeshChat:
|
||||
nomadnetwork_node_announce is not None
|
||||
and nomadnetwork_node_announce["app_data"] is not None
|
||||
):
|
||||
operator_display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
nomadnetwork_node_announce["app_data"],
|
||||
None,
|
||||
operator_display_name = (
|
||||
ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
nomadnetwork_node_announce["app_data"],
|
||||
None,
|
||||
)
|
||||
)
|
||||
|
||||
# parse app_data so we can see if propagation is enabled or disabled for this node
|
||||
@@ -3097,18 +3264,26 @@ class ReticulumMeshChat:
|
||||
updated_at = None
|
||||
|
||||
# get latest announce from database for the provided destination hash
|
||||
latest_announce = self.database.announces.get_announce_by_hash(destination_hash)
|
||||
latest_announce = self.database.announces.get_announce_by_hash(
|
||||
destination_hash
|
||||
)
|
||||
|
||||
# get latest lxmf message from database sent to us from the provided destination hash
|
||||
local_hash = self.local_lxmf_destination.hexhash
|
||||
messages = self.message_handler.get_conversation_messages(local_hash, destination_hash, limit=1)
|
||||
messages = self.message_handler.get_conversation_messages(
|
||||
local_hash, destination_hash, limit=1
|
||||
)
|
||||
# Filter for incoming messages only
|
||||
latest_lxmf_message = next((m for m in messages if m["source_hash"] == destination_hash), None)
|
||||
latest_lxmf_message = next(
|
||||
(m for m in messages if m["source_hash"] == destination_hash), None
|
||||
)
|
||||
|
||||
# determine when latest announce was received
|
||||
latest_announce_at = None
|
||||
if latest_announce is not None:
|
||||
latest_announce_at = datetime.fromisoformat(latest_announce["updated_at"])
|
||||
latest_announce_at = datetime.fromisoformat(
|
||||
latest_announce["updated_at"]
|
||||
)
|
||||
if latest_announce_at.tzinfo is not None:
|
||||
latest_announce_at = latest_announce_at.replace(tzinfo=None)
|
||||
|
||||
@@ -3392,7 +3567,10 @@ class ReticulumMeshChat:
|
||||
|
||||
@routes.get("/api/v1/rnstatus")
|
||||
async def rnstatus(request):
|
||||
include_link_stats = request.query.get("include_link_stats", "false") in ("true", "1")
|
||||
include_link_stats = request.query.get("include_link_stats", "false") in (
|
||||
"true",
|
||||
"1",
|
||||
)
|
||||
sorting = request.query.get("sorting")
|
||||
sort_reverse = request.query.get("sort_reverse", "false") in ("true", "1")
|
||||
|
||||
@@ -3453,13 +3631,17 @@ class ReticulumMeshChat:
|
||||
async def translator_languages(request):
|
||||
try:
|
||||
libretranslate_url = request.query.get("libretranslate_url")
|
||||
languages = self.translator_handler.get_supported_languages(libretranslate_url=libretranslate_url)
|
||||
return web.json_response({
|
||||
"languages": languages,
|
||||
"has_argos": self.translator_handler.has_argos,
|
||||
"has_argos_lib": self.translator_handler.has_argos_lib,
|
||||
"has_argos_cli": self.translator_handler.has_argos_cli,
|
||||
})
|
||||
languages = self.translator_handler.get_supported_languages(
|
||||
libretranslate_url=libretranslate_url
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
"languages": languages,
|
||||
"has_argos": self.translator_handler.has_argos,
|
||||
"has_argos_lib": self.translator_handler.has_argos_lib,
|
||||
"has_argos_cli": self.translator_handler.has_argos_cli,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
return web.json_response(
|
||||
{"message": str(e)},
|
||||
@@ -3575,7 +3757,9 @@ class ReticulumMeshChat:
|
||||
lxmf_stamp_cost = None
|
||||
announce = self.database.announces.get_announce_by_hash(destination_hash)
|
||||
if announce is not None:
|
||||
lxmf_stamp_cost = ReticulumMeshChat.parse_lxmf_stamp_cost(announce["app_data"])
|
||||
lxmf_stamp_cost = ReticulumMeshChat.parse_lxmf_stamp_cost(
|
||||
announce["app_data"]
|
||||
)
|
||||
|
||||
# get outbound ticket expiry for this lxmf destination
|
||||
lxmf_outbound_ticket_expiry = (
|
||||
@@ -3760,7 +3944,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# get lxmf message from database
|
||||
lxmf_message = None
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(message_hash)
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(
|
||||
message_hash
|
||||
)
|
||||
if db_lxmf_message is not None:
|
||||
lxmf_message = self.convert_db_lxmf_message_to_dict(db_lxmf_message)
|
||||
|
||||
@@ -3864,7 +4050,9 @@ class ReticulumMeshChat:
|
||||
file_index = request.query.get("file_index")
|
||||
|
||||
# find message from database
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(message_hash)
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(
|
||||
message_hash
|
||||
)
|
||||
if db_lxmf_message is None:
|
||||
return web.json_response({"message": "Message not found"}, status=404)
|
||||
|
||||
@@ -3959,14 +4147,16 @@ class ReticulumMeshChat:
|
||||
latest_message_title = db_message["title"]
|
||||
latest_message_preview = db_message["content"]
|
||||
latest_message_timestamp = db_message["timestamp"]
|
||||
latest_message_has_attachments = (
|
||||
self.message_fields_have_attachments(db_message["fields"])
|
||||
latest_message_has_attachments = self.message_fields_have_attachments(
|
||||
db_message["fields"]
|
||||
)
|
||||
|
||||
# using timestamp (sent time) for updated_at as it is more reliable across restarts
|
||||
# and represents the actual time the message was created by the sender.
|
||||
# we convert it to ISO format for the frontend.
|
||||
updated_at = datetime.fromtimestamp(latest_message_timestamp, UTC).isoformat()
|
||||
updated_at = datetime.fromtimestamp(
|
||||
latest_message_timestamp, UTC
|
||||
).isoformat()
|
||||
|
||||
# check if conversation has attachments
|
||||
has_attachments = self.conversation_has_attachments(other_user_hash)
|
||||
@@ -4260,7 +4450,12 @@ class ReticulumMeshChat:
|
||||
self.map_manager.close()
|
||||
self.config.map_offline_path.set(file_path)
|
||||
self.config.map_offline_enabled.set(True)
|
||||
return web.json_response({"message": "Active map updated", "metadata": self.map_manager.get_metadata()})
|
||||
return web.json_response(
|
||||
{
|
||||
"message": "Active map updated",
|
||||
"metadata": self.map_manager.get_metadata(),
|
||||
}
|
||||
)
|
||||
return web.json_response({"error": "File not found"}, status=404)
|
||||
|
||||
# upload offline map
|
||||
@@ -4274,7 +4469,9 @@ class ReticulumMeshChat:
|
||||
|
||||
filename = field.filename
|
||||
if not filename.endswith(".mbtiles"):
|
||||
return web.json_response({"error": "Invalid file format, must be .mbtiles"}, status=400)
|
||||
return web.json_response(
|
||||
{"error": "Invalid file format, must be .mbtiles"}, status=400
|
||||
)
|
||||
|
||||
# save to mbtiles dir
|
||||
mbtiles_dir = self.map_manager.get_mbtiles_dir()
|
||||
@@ -4307,12 +4504,19 @@ class ReticulumMeshChat:
|
||||
os.remove(dest_path)
|
||||
self.config.map_offline_path.set(None)
|
||||
self.config.map_offline_enabled.set(False)
|
||||
return web.json_response({"error": "Invalid MBTiles file or unsupported format (vector maps not supported)"}, status=400)
|
||||
return web.json_response(
|
||||
{
|
||||
"error": "Invalid MBTiles file or unsupported format (vector maps not supported)"
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
return web.json_response({
|
||||
"message": "Map uploaded successfully",
|
||||
"metadata": metadata,
|
||||
})
|
||||
return web.json_response(
|
||||
{
|
||||
"message": "Map uploaded successfully",
|
||||
"metadata": metadata,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
RNS.log(f"Error uploading map: {e}", RNS.LOG_ERROR)
|
||||
return web.json_response({"error": str(e)}, status=500)
|
||||
@@ -4322,7 +4526,7 @@ class ReticulumMeshChat:
|
||||
async def start_map_export(request):
|
||||
try:
|
||||
data = await request.json()
|
||||
bbox = data.get("bbox") # [min_lon, min_lat, max_lon, max_lat]
|
||||
bbox = data.get("bbox") # [min_lon, min_lat, max_lon, max_lat]
|
||||
min_zoom = int(data.get("min_zoom", 0))
|
||||
max_zoom = int(data.get("max_zoom", 10))
|
||||
name = data.get("name", "Exported Map")
|
||||
@@ -4360,7 +4564,9 @@ class ReticulumMeshChat:
|
||||
"Content-Disposition": f'attachment; filename="map_export_{export_id}.mbtiles"',
|
||||
},
|
||||
)
|
||||
return web.json_response({"error": "File not ready or not found"}, status=404)
|
||||
return web.json_response(
|
||||
{"error": "File not ready or not found"}, status=404
|
||||
)
|
||||
|
||||
# MIME type fix middleware - ensures JavaScript files have correct Content-Type
|
||||
@web.middleware
|
||||
@@ -4433,7 +4639,9 @@ class ReticulumMeshChat:
|
||||
)
|
||||
|
||||
# add other middlewares
|
||||
app.middlewares.extend([auth_middleware, mime_type_middleware, security_middleware])
|
||||
app.middlewares.extend(
|
||||
[auth_middleware, mime_type_middleware, security_middleware]
|
||||
)
|
||||
|
||||
app.add_routes(routes)
|
||||
app.add_routes(
|
||||
@@ -4606,10 +4814,14 @@ class ReticulumMeshChat:
|
||||
self.config.page_archiver_enabled.set(bool(data["page_archiver_enabled"]))
|
||||
|
||||
if "page_archiver_max_versions" in data:
|
||||
self.config.page_archiver_max_versions.set(int(data["page_archiver_max_versions"]))
|
||||
self.config.page_archiver_max_versions.set(
|
||||
int(data["page_archiver_max_versions"])
|
||||
)
|
||||
|
||||
if "archives_max_storage_gb" in data:
|
||||
self.config.archives_max_storage_gb.set(int(data["archives_max_storage_gb"]))
|
||||
self.config.archives_max_storage_gb.set(
|
||||
int(data["archives_max_storage_gb"])
|
||||
)
|
||||
|
||||
# update crawler settings
|
||||
if "crawler_enabled" in data:
|
||||
@@ -4619,7 +4831,9 @@ class ReticulumMeshChat:
|
||||
self.config.crawler_max_retries.set(int(data["crawler_max_retries"]))
|
||||
|
||||
if "crawler_retry_delay_seconds" in data:
|
||||
self.config.crawler_retry_delay_seconds.set(int(data["crawler_retry_delay_seconds"]))
|
||||
self.config.crawler_retry_delay_seconds.set(
|
||||
int(data["crawler_retry_delay_seconds"])
|
||||
)
|
||||
|
||||
if "crawler_max_concurrent" in data:
|
||||
self.config.crawler_max_concurrent.set(int(data["crawler_max_concurrent"]))
|
||||
@@ -4695,7 +4909,13 @@ class ReticulumMeshChat:
|
||||
return data
|
||||
|
||||
# archives a page version
|
||||
def archive_page(self, destination_hash: str, page_path: str, content: str, is_manual: bool = False):
|
||||
def archive_page(
|
||||
self,
|
||||
destination_hash: str,
|
||||
page_path: str,
|
||||
content: str,
|
||||
is_manual: bool = False,
|
||||
):
|
||||
if not is_manual and not self.config.page_archiver_enabled.get():
|
||||
return
|
||||
|
||||
@@ -4709,7 +4929,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# returns archived page versions for a given destination and path
|
||||
def get_archived_page_versions(self, destination_hash: str, page_path: str):
|
||||
return self.database.misc.get_archived_page_versions(destination_hash, page_path)
|
||||
return self.database.misc.get_archived_page_versions(
|
||||
destination_hash, page_path
|
||||
)
|
||||
|
||||
# flushes all archived pages
|
||||
def flush_all_archived_pages(self):
|
||||
@@ -4780,7 +5002,9 @@ class ReticulumMeshChat:
|
||||
{
|
||||
"id": archive.id,
|
||||
"hash": archive.hash,
|
||||
"created_at": archive.created_at.isoformat() if hasattr(archive.created_at, "isoformat") else str(archive.created_at),
|
||||
"created_at": archive.created_at.isoformat()
|
||||
if hasattr(archive.created_at, "isoformat")
|
||||
else str(archive.created_at),
|
||||
}
|
||||
for archive in archives
|
||||
],
|
||||
@@ -5030,7 +5254,8 @@ class ReticulumMeshChat:
|
||||
has_archives = (
|
||||
len(
|
||||
self.get_archived_page_versions(
|
||||
destination_hash.hex(), page_path,
|
||||
destination_hash.hex(),
|
||||
page_path,
|
||||
),
|
||||
)
|
||||
> 0
|
||||
@@ -5243,7 +5468,9 @@ class ReticulumMeshChat:
|
||||
identity = self.recall_identity(identity_hash)
|
||||
if identity is not None:
|
||||
# get lxmf.delivery destination hash
|
||||
lxmf_destination_hash = RNS.Destination.hash(identity, "lxmf", "delivery").hex()
|
||||
lxmf_destination_hash = RNS.Destination.hash(
|
||||
identity, "lxmf", "delivery"
|
||||
).hex()
|
||||
|
||||
# use custom name if available
|
||||
custom_name = self.database.announces.get_custom_display_name(
|
||||
@@ -5510,7 +5737,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# find lxmf user icon from database
|
||||
lxmf_user_icon = None
|
||||
db_lxmf_user_icon = self.database.misc.get_user_icon(announce["destination_hash"])
|
||||
db_lxmf_user_icon = self.database.misc.get_user_icon(
|
||||
announce["destination_hash"]
|
||||
)
|
||||
if db_lxmf_user_icon:
|
||||
lxmf_user_icon = {
|
||||
"icon_name": db_lxmf_user_icon["icon_name"],
|
||||
@@ -5634,7 +5863,7 @@ class ReticulumMeshChat:
|
||||
created_at = str(db_lxmf_message["created_at"])
|
||||
if created_at and "+" not in created_at and "Z" not in created_at:
|
||||
created_at += "Z"
|
||||
|
||||
|
||||
updated_at = str(db_lxmf_message["updated_at"])
|
||||
if updated_at and "+" not in updated_at and "Z" not in updated_at:
|
||||
updated_at += "Z"
|
||||
@@ -5790,7 +6019,9 @@ class ReticulumMeshChat:
|
||||
print(e)
|
||||
|
||||
# find message from database
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(lxmf_message.hash.hex())
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(
|
||||
lxmf_message.hash.hex()
|
||||
)
|
||||
if not db_lxmf_message:
|
||||
return
|
||||
|
||||
@@ -5820,7 +6051,9 @@ class ReticulumMeshChat:
|
||||
destination_hash = lxmf_message.destination_hash.hex()
|
||||
|
||||
# check if this message is for an alias identity (REPLY PATH)
|
||||
mapping = self.database.messages.get_forwarding_mapping(alias_hash=destination_hash)
|
||||
mapping = self.database.messages.get_forwarding_mapping(
|
||||
alias_hash=destination_hash
|
||||
)
|
||||
|
||||
if mapping:
|
||||
# this is a reply from User C to User B (alias). Forward to User A.
|
||||
@@ -5840,11 +6073,16 @@ class ReticulumMeshChat:
|
||||
|
||||
# check if this message matches a forwarding rule (FORWARD PATH)
|
||||
# we check for rules that apply to the destination of this message
|
||||
rules = self.database.misc.get_forwarding_rules(identity_hash=destination_hash, active_only=True)
|
||||
rules = self.database.misc.get_forwarding_rules(
|
||||
identity_hash=destination_hash, active_only=True
|
||||
)
|
||||
|
||||
for rule in rules:
|
||||
# check source filter if set
|
||||
if rule["source_filter_hash"] and rule["source_filter_hash"] != source_hash:
|
||||
if (
|
||||
rule["source_filter_hash"]
|
||||
and rule["source_filter_hash"] != source_hash
|
||||
):
|
||||
continue
|
||||
|
||||
# find or create mapping for this (Source, Final Recipient) pair
|
||||
@@ -6303,7 +6541,9 @@ class ReticulumMeshChat:
|
||||
# resends all messages that previously failed to send to the provided destination hash
|
||||
async def resend_failed_messages_for_destination(self, destination_hash: str):
|
||||
# get messages that failed to send to this destination
|
||||
failed_messages = self.database.messages.get_failed_messages_for_destination(destination_hash)
|
||||
failed_messages = self.database.messages.get_failed_messages_for_destination(
|
||||
destination_hash
|
||||
)
|
||||
|
||||
# resend failed messages
|
||||
for failed_message in failed_messages:
|
||||
@@ -6361,7 +6601,9 @@ class ReticulumMeshChat:
|
||||
)
|
||||
|
||||
# remove original failed message from database
|
||||
self.database.messages.delete_lxmf_message_by_hash(failed_message["hash"])
|
||||
self.database.messages.delete_lxmf_message_by_hash(
|
||||
failed_message["hash"]
|
||||
)
|
||||
|
||||
# tell all websocket clients that old failed message was deleted so it can remove from ui
|
||||
await self.websocket_broadcast(
|
||||
@@ -6439,7 +6681,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# gets the custom display name a user has set for the provided destination hash
|
||||
def get_custom_destination_display_name(self, destination_hash: str):
|
||||
db_destination_display_name = self.database.announces.get_custom_display_name(destination_hash)
|
||||
db_destination_display_name = self.database.announces.get_custom_display_name(
|
||||
destination_hash
|
||||
)
|
||||
if db_destination_display_name is not None:
|
||||
return db_destination_display_name.display_name
|
||||
|
||||
@@ -6448,10 +6692,14 @@ class ReticulumMeshChat:
|
||||
# get name to show for an lxmf conversation
|
||||
# currently, this will use the app data from the most recent announce
|
||||
# TODO: we should fetch this from our contacts database, when it gets implemented, and if not found, fallback to app data
|
||||
def get_lxmf_conversation_name(self, destination_hash, default_name: str | None = "Anonymous Peer"):
|
||||
def get_lxmf_conversation_name(
|
||||
self, destination_hash, default_name: str | None = "Anonymous Peer"
|
||||
):
|
||||
# get lxmf.delivery announce from database for the provided destination hash
|
||||
results = self.database.announces.get_announces(aspect="lxmf.delivery")
|
||||
lxmf_announce = next((a for a in results if a["destination_hash"] == destination_hash), None)
|
||||
lxmf_announce = next(
|
||||
(a for a in results if a["destination_hash"] == destination_hash), None
|
||||
)
|
||||
|
||||
# if app data is available in database, it should be base64 encoded text that was announced
|
||||
# we will return the parsed lxmf display name as the conversation name
|
||||
@@ -7018,7 +7266,12 @@ def main():
|
||||
return
|
||||
|
||||
enable_https = not args.no_https
|
||||
reticulum_meshchat.run(args.host, args.port, launch_browser=args.headless is False, enable_https=enable_https)
|
||||
reticulum_meshchat.run(
|
||||
args.host,
|
||||
args.port,
|
||||
launch_browser=args.headless is False,
|
||||
enable_https=enable_https,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -7,7 +7,15 @@ class AnnounceManager:
|
||||
def __init__(self, db: Database):
|
||||
self.db = db
|
||||
|
||||
def upsert_announce(self, reticulum, identity, destination_hash, aspect, app_data, announce_packet_hash):
|
||||
def upsert_announce(
|
||||
self,
|
||||
reticulum,
|
||||
identity,
|
||||
destination_hash,
|
||||
aspect,
|
||||
app_data,
|
||||
announce_packet_hash,
|
||||
):
|
||||
# get rssi, snr and signal quality if available
|
||||
rssi = reticulum.get_packet_rssi(announce_packet_hash)
|
||||
snr = reticulum.get_packet_snr(announce_packet_hash)
|
||||
@@ -15,7 +23,9 @@ class AnnounceManager:
|
||||
|
||||
# prepare data to insert or update
|
||||
data = {
|
||||
"destination_hash": destination_hash.hex() if isinstance(destination_hash, bytes) else destination_hash,
|
||||
"destination_hash": destination_hash.hex()
|
||||
if isinstance(destination_hash, bytes)
|
||||
else destination_hash,
|
||||
"aspect": aspect,
|
||||
"identity_hash": identity.hash.hex(),
|
||||
"identity_public_key": base64.b64encode(identity.get_public_key()).decode(
|
||||
@@ -32,7 +42,14 @@ class AnnounceManager:
|
||||
|
||||
self.db.announces.upsert_announce(data)
|
||||
|
||||
def get_filtered_announces(self, aspect=None, identity_hash=None, destination_hash=None, query=None, blocked_identity_hashes=None):
|
||||
def get_filtered_announces(
|
||||
self,
|
||||
aspect=None,
|
||||
identity_hash=None,
|
||||
destination_hash=None,
|
||||
query=None,
|
||||
blocked_identity_hashes=None,
|
||||
):
|
||||
sql = "SELECT * FROM announces WHERE 1=1"
|
||||
params = []
|
||||
|
||||
@@ -56,4 +73,3 @@ class AnnounceManager:
|
||||
|
||||
sql += " ORDER BY updated_at DESC"
|
||||
return self.db.provider.fetchall(sql, params)
|
||||
|
||||
|
||||
@@ -7,7 +7,9 @@ class ArchiverManager:
|
||||
def __init__(self, db: Database):
|
||||
self.db = db
|
||||
|
||||
def archive_page(self, destination_hash, page_path, content, max_versions=5, max_storage_gb=1):
|
||||
def archive_page(
|
||||
self, destination_hash, page_path, content, max_versions=5, max_storage_gb=1
|
||||
):
|
||||
content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
|
||||
|
||||
# Check if already exists
|
||||
@@ -27,18 +29,25 @@ class ArchiverManager:
|
||||
# Delete older versions
|
||||
to_delete = versions[max_versions:]
|
||||
for version in to_delete:
|
||||
self.db.provider.execute("DELETE FROM archived_pages WHERE id = ?", (version["id"],))
|
||||
self.db.provider.execute(
|
||||
"DELETE FROM archived_pages WHERE id = ?", (version["id"],)
|
||||
)
|
||||
|
||||
# Enforce total storage limit (approximate)
|
||||
total_size_row = self.db.provider.fetchone("SELECT SUM(LENGTH(content)) as total_size FROM archived_pages")
|
||||
total_size_row = self.db.provider.fetchone(
|
||||
"SELECT SUM(LENGTH(content)) as total_size FROM archived_pages"
|
||||
)
|
||||
total_size = total_size_row["total_size"] or 0
|
||||
max_bytes = max_storage_gb * 1024 * 1024 * 1024
|
||||
|
||||
while total_size > max_bytes:
|
||||
oldest = self.db.provider.fetchone("SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1")
|
||||
oldest = self.db.provider.fetchone(
|
||||
"SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1"
|
||||
)
|
||||
if oldest:
|
||||
self.db.provider.execute("DELETE FROM archived_pages WHERE id = ?", (oldest["id"],))
|
||||
self.db.provider.execute(
|
||||
"DELETE FROM archived_pages WHERE id = ?", (oldest["id"],)
|
||||
)
|
||||
total_size -= oldest["size"]
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
class ConfigManager:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
@@ -6,75 +5,139 @@ class ConfigManager:
|
||||
# all possible config items
|
||||
self.database_version = self.IntConfig(self, "database_version", None)
|
||||
self.display_name = self.StringConfig(self, "display_name", "Anonymous Peer")
|
||||
self.auto_announce_enabled = self.BoolConfig(self, "auto_announce_enabled", False)
|
||||
self.auto_announce_interval_seconds = self.IntConfig(self, "auto_announce_interval_seconds", 0)
|
||||
self.auto_announce_enabled = self.BoolConfig(
|
||||
self, "auto_announce_enabled", False
|
||||
)
|
||||
self.auto_announce_interval_seconds = self.IntConfig(
|
||||
self, "auto_announce_interval_seconds", 0
|
||||
)
|
||||
self.last_announced_at = self.IntConfig(self, "last_announced_at", None)
|
||||
self.theme = self.StringConfig(self, "theme", "light")
|
||||
self.language = self.StringConfig(self, "language", "en")
|
||||
self.auto_resend_failed_messages_when_announce_received = self.BoolConfig(
|
||||
self, "auto_resend_failed_messages_when_announce_received", True,
|
||||
self,
|
||||
"auto_resend_failed_messages_when_announce_received",
|
||||
True,
|
||||
)
|
||||
self.allow_auto_resending_failed_messages_with_attachments = self.BoolConfig(
|
||||
self, "allow_auto_resending_failed_messages_with_attachments", False,
|
||||
self,
|
||||
"allow_auto_resending_failed_messages_with_attachments",
|
||||
False,
|
||||
)
|
||||
self.auto_send_failed_messages_to_propagation_node = self.BoolConfig(
|
||||
self, "auto_send_failed_messages_to_propagation_node", False,
|
||||
self,
|
||||
"auto_send_failed_messages_to_propagation_node",
|
||||
False,
|
||||
)
|
||||
self.show_suggested_community_interfaces = self.BoolConfig(
|
||||
self, "show_suggested_community_interfaces", True,
|
||||
self,
|
||||
"show_suggested_community_interfaces",
|
||||
True,
|
||||
)
|
||||
self.lxmf_delivery_transfer_limit_in_bytes = self.IntConfig(
|
||||
self, "lxmf_delivery_transfer_limit_in_bytes", 1000 * 1000 * 10,
|
||||
self,
|
||||
"lxmf_delivery_transfer_limit_in_bytes",
|
||||
1000 * 1000 * 10,
|
||||
) # 10MB
|
||||
self.lxmf_preferred_propagation_node_destination_hash = self.StringConfig(
|
||||
self, "lxmf_preferred_propagation_node_destination_hash", None,
|
||||
self,
|
||||
"lxmf_preferred_propagation_node_destination_hash",
|
||||
None,
|
||||
)
|
||||
self.lxmf_preferred_propagation_node_auto_sync_interval_seconds = self.IntConfig(
|
||||
self, "lxmf_preferred_propagation_node_auto_sync_interval_seconds", 0,
|
||||
self.lxmf_preferred_propagation_node_auto_sync_interval_seconds = (
|
||||
self.IntConfig(
|
||||
self,
|
||||
"lxmf_preferred_propagation_node_auto_sync_interval_seconds",
|
||||
0,
|
||||
)
|
||||
)
|
||||
self.lxmf_preferred_propagation_node_last_synced_at = self.IntConfig(
|
||||
self, "lxmf_preferred_propagation_node_last_synced_at", None,
|
||||
self,
|
||||
"lxmf_preferred_propagation_node_last_synced_at",
|
||||
None,
|
||||
)
|
||||
self.lxmf_local_propagation_node_enabled = self.BoolConfig(
|
||||
self, "lxmf_local_propagation_node_enabled", False,
|
||||
self,
|
||||
"lxmf_local_propagation_node_enabled",
|
||||
False,
|
||||
)
|
||||
self.lxmf_user_icon_name = self.StringConfig(self, "lxmf_user_icon_name", None)
|
||||
self.lxmf_user_icon_foreground_colour = self.StringConfig(
|
||||
self, "lxmf_user_icon_foreground_colour", None,
|
||||
self,
|
||||
"lxmf_user_icon_foreground_colour",
|
||||
None,
|
||||
)
|
||||
self.lxmf_user_icon_background_colour = self.StringConfig(
|
||||
self, "lxmf_user_icon_background_colour", None,
|
||||
self,
|
||||
"lxmf_user_icon_background_colour",
|
||||
None,
|
||||
)
|
||||
self.lxmf_inbound_stamp_cost = self.IntConfig(
|
||||
self, "lxmf_inbound_stamp_cost", 8,
|
||||
self,
|
||||
"lxmf_inbound_stamp_cost",
|
||||
8,
|
||||
) # for direct delivery messages
|
||||
self.lxmf_propagation_node_stamp_cost = self.IntConfig(
|
||||
self, "lxmf_propagation_node_stamp_cost", 16,
|
||||
self,
|
||||
"lxmf_propagation_node_stamp_cost",
|
||||
16,
|
||||
) # for propagation node messages
|
||||
self.page_archiver_enabled = self.BoolConfig(self, "page_archiver_enabled", True)
|
||||
self.page_archiver_max_versions = self.IntConfig(self, "page_archiver_max_versions", 5)
|
||||
self.archives_max_storage_gb = self.IntConfig(self, "archives_max_storage_gb", 1)
|
||||
self.page_archiver_enabled = self.BoolConfig(
|
||||
self, "page_archiver_enabled", True
|
||||
)
|
||||
self.page_archiver_max_versions = self.IntConfig(
|
||||
self, "page_archiver_max_versions", 5
|
||||
)
|
||||
self.archives_max_storage_gb = self.IntConfig(
|
||||
self, "archives_max_storage_gb", 1
|
||||
)
|
||||
self.crawler_enabled = self.BoolConfig(self, "crawler_enabled", False)
|
||||
self.crawler_max_retries = self.IntConfig(self, "crawler_max_retries", 3)
|
||||
self.crawler_retry_delay_seconds = self.IntConfig(self, "crawler_retry_delay_seconds", 3600)
|
||||
self.crawler_retry_delay_seconds = self.IntConfig(
|
||||
self, "crawler_retry_delay_seconds", 3600
|
||||
)
|
||||
self.crawler_max_concurrent = self.IntConfig(self, "crawler_max_concurrent", 1)
|
||||
self.auth_enabled = self.BoolConfig(self, "auth_enabled", False)
|
||||
self.auth_password_hash = self.StringConfig(self, "auth_password_hash", None)
|
||||
self.auth_session_secret = self.StringConfig(self, "auth_session_secret", None)
|
||||
|
||||
# voicemail config
|
||||
self.voicemail_enabled = self.BoolConfig(self, "voicemail_enabled", False)
|
||||
self.voicemail_greeting = self.StringConfig(
|
||||
self,
|
||||
"voicemail_greeting",
|
||||
"Hello, I am not available right now. Please leave a message after the beep.",
|
||||
)
|
||||
self.voicemail_auto_answer_delay_seconds = self.IntConfig(
|
||||
self,
|
||||
"voicemail_auto_answer_delay_seconds",
|
||||
20,
|
||||
)
|
||||
self.voicemail_max_recording_seconds = self.IntConfig(
|
||||
self,
|
||||
"voicemail_max_recording_seconds",
|
||||
60,
|
||||
)
|
||||
|
||||
# map config
|
||||
self.map_offline_enabled = self.BoolConfig(self, "map_offline_enabled", False)
|
||||
self.map_offline_path = self.StringConfig(self, "map_offline_path", None)
|
||||
self.map_mbtiles_dir = self.StringConfig(self, "map_mbtiles_dir", None)
|
||||
self.map_tile_cache_enabled = self.BoolConfig(self, "map_tile_cache_enabled", True)
|
||||
self.map_tile_cache_enabled = self.BoolConfig(
|
||||
self, "map_tile_cache_enabled", True
|
||||
)
|
||||
self.map_default_lat = self.StringConfig(self, "map_default_lat", "0.0")
|
||||
self.map_default_lon = self.StringConfig(self, "map_default_lon", "0.0")
|
||||
self.map_default_zoom = self.IntConfig(self, "map_default_zoom", 2)
|
||||
self.map_tile_server_url = self.StringConfig(
|
||||
self, "map_tile_server_url", "https://tile.openstreetmap.org/{z}/{x}/{y}.png",
|
||||
self,
|
||||
"map_tile_server_url",
|
||||
"https://tile.openstreetmap.org/{z}/{x}/{y}.png",
|
||||
)
|
||||
self.map_nominatim_api_url = self.StringConfig(
|
||||
self, "map_nominatim_api_url", "https://nominatim.openstreetmap.org",
|
||||
self,
|
||||
"map_nominatim_api_url",
|
||||
"https://nominatim.openstreetmap.org",
|
||||
)
|
||||
|
||||
def get(self, key: str, default_value=None) -> str | None:
|
||||
@@ -128,4 +191,3 @@ class ConfigManager:
|
||||
|
||||
def set(self, value: int):
|
||||
self.manager.set(self.key, str(value))
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ from .misc import MiscDAO
|
||||
from .provider import DatabaseProvider
|
||||
from .schema import DatabaseSchema
|
||||
from .telephone import TelephoneDAO
|
||||
from .voicemails import VoicemailDAO
|
||||
|
||||
|
||||
class Database:
|
||||
@@ -17,12 +18,15 @@ class Database:
|
||||
self.announces = AnnounceDAO(self.provider)
|
||||
self.misc = MiscDAO(self.provider)
|
||||
self.telephone = TelephoneDAO(self.provider)
|
||||
self.voicemails = VoicemailDAO(self.provider)
|
||||
|
||||
def initialize(self):
|
||||
self.schema.initialize()
|
||||
|
||||
def migrate_from_legacy(self, reticulum_config_dir, identity_hash_hex):
|
||||
migrator = LegacyMigrator(self.provider, reticulum_config_dir, identity_hash_hex)
|
||||
migrator = LegacyMigrator(
|
||||
self.provider, reticulum_config_dir, identity_hash_hex
|
||||
)
|
||||
if migrator.should_migrate():
|
||||
return migrator.migrate()
|
||||
return False
|
||||
@@ -32,4 +36,3 @@ class Database:
|
||||
|
||||
def close(self):
|
||||
self.provider.close()
|
||||
|
||||
|
||||
@@ -13,16 +13,26 @@ class AnnounceDAO:
|
||||
data = dict(data)
|
||||
|
||||
fields = [
|
||||
"destination_hash", "aspect", "identity_hash", "identity_public_key",
|
||||
"app_data", "rssi", "snr", "quality",
|
||||
"destination_hash",
|
||||
"aspect",
|
||||
"identity_hash",
|
||||
"identity_public_key",
|
||||
"app_data",
|
||||
"rssi",
|
||||
"snr",
|
||||
"quality",
|
||||
]
|
||||
# These are safe as they are from a hardcoded list
|
||||
columns = ", ".join(fields)
|
||||
placeholders = ", ".join(["?"] * len(fields))
|
||||
update_set = ", ".join([f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"])
|
||||
update_set = ", ".join(
|
||||
[f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"]
|
||||
)
|
||||
|
||||
query = f"INSERT INTO announces ({columns}, updated_at) VALUES ({placeholders}, ?) " \
|
||||
f"ON CONFLICT(destination_hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at" # noqa: S608
|
||||
query = (
|
||||
f"INSERT INTO announces ({columns}, updated_at) VALUES ({placeholders}, ?) "
|
||||
f"ON CONFLICT(destination_hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at"
|
||||
) # noqa: S608
|
||||
|
||||
params = [data.get(f) for f in fields]
|
||||
params.append(datetime.now(UTC))
|
||||
@@ -30,13 +40,19 @@ class AnnounceDAO:
|
||||
|
||||
def get_announces(self, aspect=None):
|
||||
if aspect:
|
||||
return self.provider.fetchall("SELECT * FROM announces WHERE aspect = ?", (aspect,))
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM announces WHERE aspect = ?", (aspect,)
|
||||
)
|
||||
return self.provider.fetchall("SELECT * FROM announces")
|
||||
|
||||
def get_announce_by_hash(self, destination_hash):
|
||||
return self.provider.fetchone("SELECT * FROM announces WHERE destination_hash = ?", (destination_hash,))
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM announces WHERE destination_hash = ?", (destination_hash,)
|
||||
)
|
||||
|
||||
def get_filtered_announces(self, aspect=None, search_term=None, limit=None, offset=0):
|
||||
def get_filtered_announces(
|
||||
self, aspect=None, search_term=None, limit=None, offset=0
|
||||
):
|
||||
query = "SELECT * FROM announces WHERE 1=1"
|
||||
params = []
|
||||
if aspect:
|
||||
@@ -58,33 +74,49 @@ class AnnounceDAO:
|
||||
# Custom Display Names
|
||||
def upsert_custom_display_name(self, destination_hash, display_name):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute("""
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO custom_destination_display_names (destination_hash, display_name, updated_at)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, updated_at = EXCLUDED.updated_at
|
||||
""", (destination_hash, display_name, now))
|
||||
""",
|
||||
(destination_hash, display_name, now),
|
||||
)
|
||||
|
||||
def get_custom_display_name(self, destination_hash):
|
||||
row = self.provider.fetchone("SELECT display_name FROM custom_destination_display_names WHERE destination_hash = ?", (destination_hash,))
|
||||
row = self.provider.fetchone(
|
||||
"SELECT display_name FROM custom_destination_display_names WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
return row["display_name"] if row else None
|
||||
|
||||
def delete_custom_display_name(self, destination_hash):
|
||||
self.provider.execute("DELETE FROM custom_destination_display_names WHERE destination_hash = ?", (destination_hash,))
|
||||
self.provider.execute(
|
||||
"DELETE FROM custom_destination_display_names WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
|
||||
# Favourites
|
||||
def upsert_favourite(self, destination_hash, display_name, aspect):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute("""
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO favourite_destinations (destination_hash, display_name, aspect, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, aspect = EXCLUDED.aspect, updated_at = EXCLUDED.updated_at
|
||||
""", (destination_hash, display_name, aspect, now))
|
||||
""",
|
||||
(destination_hash, display_name, aspect, now),
|
||||
)
|
||||
|
||||
def get_favourites(self, aspect=None):
|
||||
if aspect:
|
||||
return self.provider.fetchall("SELECT * FROM favourite_destinations WHERE aspect = ?", (aspect,))
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM favourite_destinations WHERE aspect = ?", (aspect,)
|
||||
)
|
||||
return self.provider.fetchall("SELECT * FROM favourite_destinations")
|
||||
|
||||
def delete_favourite(self, destination_hash):
|
||||
self.provider.execute("DELETE FROM favourite_destinations WHERE destination_hash = ?", (destination_hash,))
|
||||
|
||||
self.provider.execute(
|
||||
"DELETE FROM favourite_destinations WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
|
||||
@@ -24,4 +24,3 @@ class ConfigDAO:
|
||||
|
||||
def delete(self, key):
|
||||
self.provider.execute("DELETE FROM config WHERE key = ?", (key,))
|
||||
|
||||
|
||||
@@ -8,8 +8,7 @@ class LegacyMigrator:
|
||||
self.identity_hash_hex = identity_hash_hex
|
||||
|
||||
def get_legacy_db_path(self):
|
||||
"""Detect the path to the legacy database based on the Reticulum config directory.
|
||||
"""
|
||||
"""Detect the path to the legacy database based on the Reticulum config directory."""
|
||||
possible_dirs = []
|
||||
if self.reticulum_config_dir:
|
||||
possible_dirs.append(self.reticulum_config_dir)
|
||||
@@ -21,7 +20,9 @@ class LegacyMigrator:
|
||||
|
||||
# Check each directory
|
||||
for config_dir in possible_dirs:
|
||||
legacy_path = os.path.join(config_dir, "identities", self.identity_hash_hex, "database.db")
|
||||
legacy_path = os.path.join(
|
||||
config_dir, "identities", self.identity_hash_hex, "database.db"
|
||||
)
|
||||
if os.path.exists(legacy_path):
|
||||
# Ensure it's not the same as our current DB path
|
||||
# (though this is unlikely given the different base directories)
|
||||
@@ -58,8 +59,7 @@ class LegacyMigrator:
|
||||
return True
|
||||
|
||||
def migrate(self):
|
||||
"""Perform the migration from the legacy database.
|
||||
"""
|
||||
"""Perform the migration from the legacy database."""
|
||||
legacy_path = self.get_legacy_db_path()
|
||||
if not legacy_path:
|
||||
return False
|
||||
@@ -100,11 +100,23 @@ class LegacyMigrator:
|
||||
if res:
|
||||
# Get columns from both databases to ensure compatibility
|
||||
# These PRAGMA calls are safe as they use controlled table/alias names
|
||||
legacy_columns = [row["name"] for row in self.provider.fetchall(f"PRAGMA {alias}.table_info({table})")]
|
||||
current_columns = [row["name"] for row in self.provider.fetchall(f"PRAGMA table_info({table})")]
|
||||
legacy_columns = [
|
||||
row["name"]
|
||||
for row in self.provider.fetchall(
|
||||
f"PRAGMA {alias}.table_info({table})"
|
||||
)
|
||||
]
|
||||
current_columns = [
|
||||
row["name"]
|
||||
for row in self.provider.fetchall(
|
||||
f"PRAGMA table_info({table})"
|
||||
)
|
||||
]
|
||||
|
||||
# Find common columns
|
||||
common_columns = [col for col in legacy_columns if col in current_columns]
|
||||
common_columns = [
|
||||
col for col in legacy_columns if col in current_columns
|
||||
]
|
||||
|
||||
if common_columns:
|
||||
cols_str = ", ".join(common_columns)
|
||||
@@ -112,9 +124,13 @@ class LegacyMigrator:
|
||||
# The table and columns are controlled by us
|
||||
migrate_query = f"INSERT OR IGNORE INTO {table} ({cols_str}) SELECT {cols_str} FROM {alias}.{table}" # noqa: S608
|
||||
self.provider.execute(migrate_query)
|
||||
print(f" - Migrated table: {table} ({len(common_columns)} columns)")
|
||||
print(
|
||||
f" - Migrated table: {table} ({len(common_columns)} columns)"
|
||||
)
|
||||
else:
|
||||
print(f" - Skipping table {table}: No common columns found")
|
||||
print(
|
||||
f" - Skipping table {table}: No common columns found"
|
||||
)
|
||||
except Exception as e:
|
||||
print(f" - Failed to migrate table {table}: {e}")
|
||||
|
||||
|
||||
@@ -15,17 +15,33 @@ class MessageDAO:
|
||||
|
||||
# Ensure all required fields are present and handle defaults
|
||||
fields = [
|
||||
"hash", "source_hash", "destination_hash", "state", "progress",
|
||||
"is_incoming", "method", "delivery_attempts", "next_delivery_attempt_at",
|
||||
"title", "content", "fields", "timestamp", "rssi", "snr", "quality", "is_spam",
|
||||
"hash",
|
||||
"source_hash",
|
||||
"destination_hash",
|
||||
"state",
|
||||
"progress",
|
||||
"is_incoming",
|
||||
"method",
|
||||
"delivery_attempts",
|
||||
"next_delivery_attempt_at",
|
||||
"title",
|
||||
"content",
|
||||
"fields",
|
||||
"timestamp",
|
||||
"rssi",
|
||||
"snr",
|
||||
"quality",
|
||||
"is_spam",
|
||||
]
|
||||
|
||||
columns = ", ".join(fields)
|
||||
placeholders = ", ".join(["?"] * len(fields))
|
||||
update_set = ", ".join([f"{f} = EXCLUDED.{f}" for f in fields if f != "hash"])
|
||||
|
||||
query = f"INSERT INTO lxmf_messages ({columns}, updated_at) VALUES ({placeholders}, ?) " \
|
||||
f"ON CONFLICT(hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at" # noqa: S608
|
||||
query = (
|
||||
f"INSERT INTO lxmf_messages ({columns}, updated_at) VALUES ({placeholders}, ?) "
|
||||
f"ON CONFLICT(hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at"
|
||||
) # noqa: S608
|
||||
|
||||
params = []
|
||||
for f in fields:
|
||||
@@ -38,10 +54,14 @@ class MessageDAO:
|
||||
self.provider.execute(query, params)
|
||||
|
||||
def get_lxmf_message_by_hash(self, message_hash):
|
||||
return self.provider.fetchone("SELECT * FROM lxmf_messages WHERE hash = ?", (message_hash,))
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM lxmf_messages WHERE hash = ?", (message_hash,)
|
||||
)
|
||||
|
||||
def delete_lxmf_message_by_hash(self, message_hash):
|
||||
self.provider.execute("DELETE FROM lxmf_messages WHERE hash = ?", (message_hash,))
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_messages WHERE hash = ?", (message_hash,)
|
||||
)
|
||||
|
||||
def get_conversation_messages(self, destination_hash, limit=100, offset=0):
|
||||
return self.provider.fetchall(
|
||||
@@ -73,13 +93,16 @@ class MessageDAO:
|
||||
)
|
||||
|
||||
def is_conversation_unread(self, destination_hash):
|
||||
row = self.provider.fetchone("""
|
||||
row = self.provider.fetchone(
|
||||
"""
|
||||
SELECT m.timestamp, r.last_read_at
|
||||
FROM lxmf_messages m
|
||||
LEFT JOIN lxmf_conversation_read_state r ON r.destination_hash = ?
|
||||
WHERE (m.destination_hash = ? OR m.source_hash = ?)
|
||||
ORDER BY m.timestamp DESC LIMIT 1
|
||||
""", (destination_hash, destination_hash, destination_hash))
|
||||
""",
|
||||
(destination_hash, destination_hash, destination_hash),
|
||||
)
|
||||
|
||||
if not row:
|
||||
return False
|
||||
@@ -93,13 +116,16 @@ class MessageDAO:
|
||||
return row["timestamp"] > last_read_at.timestamp()
|
||||
|
||||
def mark_stuck_messages_as_failed(self):
|
||||
self.provider.execute("""
|
||||
self.provider.execute(
|
||||
"""
|
||||
UPDATE lxmf_messages
|
||||
SET state = 'failed', updated_at = ?
|
||||
WHERE state = 'outbound'
|
||||
OR (state = 'sent' AND method = 'opportunistic')
|
||||
OR state = 'sending'
|
||||
""", (datetime.now(UTC).isoformat(),))
|
||||
""",
|
||||
(datetime.now(UTC).isoformat(),),
|
||||
)
|
||||
|
||||
def get_failed_messages_for_destination(self, destination_hash):
|
||||
return self.provider.fetchall(
|
||||
@@ -115,9 +141,14 @@ class MessageDAO:
|
||||
return row["count"] if row else 0
|
||||
|
||||
# Forwarding Mappings
|
||||
def get_forwarding_mapping(self, alias_hash=None, original_sender_hash=None, final_recipient_hash=None):
|
||||
def get_forwarding_mapping(
|
||||
self, alias_hash=None, original_sender_hash=None, final_recipient_hash=None
|
||||
):
|
||||
if alias_hash:
|
||||
return self.provider.fetchone("SELECT * FROM lxmf_forwarding_mappings WHERE alias_hash = ?", (alias_hash,))
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM lxmf_forwarding_mappings WHERE alias_hash = ?",
|
||||
(alias_hash,),
|
||||
)
|
||||
if original_sender_hash and final_recipient_hash:
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM lxmf_forwarding_mappings WHERE original_sender_hash = ? AND final_recipient_hash = ?",
|
||||
@@ -131,8 +162,11 @@ class MessageDAO:
|
||||
data = dict(data)
|
||||
|
||||
fields = [
|
||||
"alias_identity_private_key", "alias_hash", "original_sender_hash",
|
||||
"final_recipient_hash", "original_destination_hash",
|
||||
"alias_identity_private_key",
|
||||
"alias_hash",
|
||||
"original_sender_hash",
|
||||
"final_recipient_hash",
|
||||
"original_destination_hash",
|
||||
]
|
||||
columns = ", ".join(fields)
|
||||
placeholders = ", ".join(["?"] * len(fields))
|
||||
@@ -143,4 +177,3 @@ class MessageDAO:
|
||||
|
||||
def get_all_forwarding_mappings(self):
|
||||
return self.provider.fetchall("SELECT * FROM lxmf_forwarding_mappings")
|
||||
|
||||
|
||||
@@ -15,13 +15,22 @@ class MiscDAO:
|
||||
)
|
||||
|
||||
def is_destination_blocked(self, destination_hash):
|
||||
return self.provider.fetchone("SELECT 1 FROM blocked_destinations WHERE destination_hash = ?", (destination_hash,)) is not None
|
||||
return (
|
||||
self.provider.fetchone(
|
||||
"SELECT 1 FROM blocked_destinations WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
is not None
|
||||
)
|
||||
|
||||
def get_blocked_destinations(self):
|
||||
return self.provider.fetchall("SELECT * FROM blocked_destinations")
|
||||
|
||||
def delete_blocked_destination(self, destination_hash):
|
||||
self.provider.execute("DELETE FROM blocked_destinations WHERE destination_hash = ?", (destination_hash,))
|
||||
self.provider.execute(
|
||||
"DELETE FROM blocked_destinations WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
|
||||
# Spam Keywords
|
||||
def add_spam_keyword(self, keyword):
|
||||
@@ -45,9 +54,12 @@ class MiscDAO:
|
||||
return False
|
||||
|
||||
# User Icons
|
||||
def update_lxmf_user_icon(self, destination_hash, icon_name, foreground_colour, background_colour):
|
||||
def update_lxmf_user_icon(
|
||||
self, destination_hash, icon_name, foreground_colour, background_colour
|
||||
):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute("""
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO lxmf_user_icons (destination_hash, icon_name, foreground_colour, background_colour, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET
|
||||
@@ -55,10 +67,15 @@ class MiscDAO:
|
||||
foreground_colour = EXCLUDED.foreground_colour,
|
||||
background_colour = EXCLUDED.background_colour,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""", (destination_hash, icon_name, foreground_colour, background_colour, now))
|
||||
""",
|
||||
(destination_hash, icon_name, foreground_colour, background_colour, now),
|
||||
)
|
||||
|
||||
def get_user_icon(self, destination_hash):
|
||||
return self.provider.fetchone("SELECT * FROM lxmf_user_icons WHERE destination_hash = ?", (destination_hash,))
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM lxmf_user_icons WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
|
||||
# Forwarding Rules
|
||||
def get_forwarding_rules(self, identity_hash=None, active_only=False):
|
||||
@@ -71,18 +88,31 @@ class MiscDAO:
|
||||
query += " AND is_active = 1"
|
||||
return self.provider.fetchall(query, params)
|
||||
|
||||
def create_forwarding_rule(self, identity_hash, forward_to_hash, source_filter_hash, is_active=True):
|
||||
def create_forwarding_rule(
|
||||
self, identity_hash, forward_to_hash, source_filter_hash, is_active=True
|
||||
):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"INSERT INTO lxmf_forwarding_rules (identity_hash, forward_to_hash, source_filter_hash, is_active, updated_at) VALUES (?, ?, ?, ?, ?)",
|
||||
(identity_hash, forward_to_hash, source_filter_hash, 1 if is_active else 0, now),
|
||||
(
|
||||
identity_hash,
|
||||
forward_to_hash,
|
||||
source_filter_hash,
|
||||
1 if is_active else 0,
|
||||
now,
|
||||
),
|
||||
)
|
||||
|
||||
def delete_forwarding_rule(self, rule_id):
|
||||
self.provider.execute("DELETE FROM lxmf_forwarding_rules WHERE id = ?", (rule_id,))
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_forwarding_rules WHERE id = ?", (rule_id,)
|
||||
)
|
||||
|
||||
def toggle_forwarding_rule(self, rule_id):
|
||||
self.provider.execute("UPDATE lxmf_forwarding_rules SET is_active = NOT is_active WHERE id = ?", (rule_id,))
|
||||
self.provider.execute(
|
||||
"UPDATE lxmf_forwarding_rules SET is_active = NOT is_active WHERE id = ?",
|
||||
(rule_id,),
|
||||
)
|
||||
|
||||
# Archived Pages
|
||||
def archive_page(self, destination_hash, page_path, content, page_hash):
|
||||
@@ -105,7 +135,9 @@ class MiscDAO:
|
||||
params.append(destination_hash)
|
||||
if query:
|
||||
like_term = f"%{query}%"
|
||||
sql += " AND (destination_hash LIKE ? OR page_path LIKE ? OR content LIKE ?)"
|
||||
sql += (
|
||||
" AND (destination_hash LIKE ? OR page_path LIKE ? OR content LIKE ?)"
|
||||
)
|
||||
params.extend([like_term, like_term, like_term])
|
||||
|
||||
sql += " ORDER BY created_at DESC"
|
||||
@@ -113,25 +145,41 @@ class MiscDAO:
|
||||
|
||||
def delete_archived_pages(self, destination_hash=None, page_path=None):
|
||||
if destination_hash and page_path:
|
||||
self.provider.execute("DELETE FROM archived_pages WHERE destination_hash = ? AND page_path = ?", (destination_hash, page_path))
|
||||
self.provider.execute(
|
||||
"DELETE FROM archived_pages WHERE destination_hash = ? AND page_path = ?",
|
||||
(destination_hash, page_path),
|
||||
)
|
||||
else:
|
||||
self.provider.execute("DELETE FROM archived_pages")
|
||||
|
||||
# Crawl Tasks
|
||||
def upsert_crawl_task(self, destination_hash, page_path, status="pending", retry_count=0):
|
||||
self.provider.execute("""
|
||||
def upsert_crawl_task(
|
||||
self, destination_hash, page_path, status="pending", retry_count=0
|
||||
):
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO crawl_tasks (destination_hash, page_path, status, retry_count)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash, page_path) DO UPDATE SET
|
||||
status = EXCLUDED.status,
|
||||
retry_count = EXCLUDED.retry_count
|
||||
""", (destination_hash, page_path, status, retry_count))
|
||||
""",
|
||||
(destination_hash, page_path, status, retry_count),
|
||||
)
|
||||
|
||||
def get_pending_crawl_tasks(self):
|
||||
return self.provider.fetchall("SELECT * FROM crawl_tasks WHERE status = 'pending'")
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM crawl_tasks WHERE status = 'pending'"
|
||||
)
|
||||
|
||||
def update_crawl_task(self, task_id, **kwargs):
|
||||
allowed_keys = {"destination_hash", "page_path", "status", "retry_count", "updated_at"}
|
||||
allowed_keys = {
|
||||
"destination_hash",
|
||||
"page_path",
|
||||
"status",
|
||||
"retry_count",
|
||||
"updated_at",
|
||||
}
|
||||
filtered_kwargs = {k: v for k, v in kwargs.items() if k in allowed_keys}
|
||||
|
||||
if not filtered_kwargs:
|
||||
@@ -150,5 +198,6 @@ class MiscDAO:
|
||||
)
|
||||
|
||||
def get_archived_page_by_id(self, archive_id):
|
||||
return self.provider.fetchone("SELECT * FROM archived_pages WHERE id = ?", (archive_id,))
|
||||
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM archived_pages WHERE id = ?", (archive_id,)
|
||||
)
|
||||
|
||||
@@ -23,7 +23,9 @@ class DatabaseProvider:
|
||||
@property
|
||||
def connection(self):
|
||||
if not hasattr(self._local, "connection"):
|
||||
self._local.connection = sqlite3.connect(self.db_path, check_same_thread=False)
|
||||
self._local.connection = sqlite3.connect(
|
||||
self.db_path, check_same_thread=False
|
||||
)
|
||||
self._local.connection.row_factory = sqlite3.Row
|
||||
# Enable WAL mode for better concurrency
|
||||
self._local.connection.execute("PRAGMA journal_mode=WAL")
|
||||
@@ -62,4 +64,3 @@ class DatabaseProvider:
|
||||
|
||||
def checkpoint(self):
|
||||
return self.fetchall("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ from .provider import DatabaseProvider
|
||||
|
||||
|
||||
class DatabaseSchema:
|
||||
LATEST_VERSION = 12
|
||||
LATEST_VERSION = 13
|
||||
|
||||
def __init__(self, provider: DatabaseProvider):
|
||||
self.provider = provider
|
||||
@@ -16,7 +16,9 @@ class DatabaseSchema:
|
||||
self.migrate(current_version)
|
||||
|
||||
def _get_current_version(self):
|
||||
row = self.provider.fetchone("SELECT value FROM config WHERE key = ?", ("database_version",))
|
||||
row = self.provider.fetchone(
|
||||
"SELECT value FROM config WHERE key = ?", ("database_version",)
|
||||
)
|
||||
if row:
|
||||
return int(row["value"])
|
||||
return 0
|
||||
@@ -189,21 +191,45 @@ class DatabaseSchema:
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""",
|
||||
"voicemails": """
|
||||
CREATE TABLE IF NOT EXISTS voicemails (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
remote_identity_hash TEXT,
|
||||
remote_identity_name TEXT,
|
||||
filename TEXT,
|
||||
duration_seconds INTEGER,
|
||||
is_read INTEGER DEFAULT 0,
|
||||
timestamp REAL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""",
|
||||
}
|
||||
|
||||
for table_name, create_sql in tables.items():
|
||||
self.provider.execute(create_sql)
|
||||
# Create indexes that were present
|
||||
if table_name == "announces":
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)"
|
||||
)
|
||||
elif table_name == "lxmf_messages":
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)"
|
||||
)
|
||||
elif table_name == "blocked_destinations":
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)"
|
||||
)
|
||||
elif table_name == "spam_keywords":
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)"
|
||||
)
|
||||
|
||||
def migrate(self, current_version):
|
||||
if current_version < 7:
|
||||
@@ -217,9 +243,15 @@ class DatabaseSchema:
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)"
|
||||
)
|
||||
|
||||
if current_version < 8:
|
||||
self.provider.execute("""
|
||||
@@ -234,8 +266,12 @@ class DatabaseSchema:
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)"
|
||||
)
|
||||
|
||||
if current_version < 9:
|
||||
self.provider.execute("""
|
||||
@@ -249,7 +285,9 @@ class DatabaseSchema:
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)"
|
||||
)
|
||||
|
||||
self.provider.execute("""
|
||||
CREATE TABLE IF NOT EXISTS lxmf_forwarding_mappings (
|
||||
@@ -262,9 +300,15 @@ class DatabaseSchema:
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)"
|
||||
)
|
||||
|
||||
if current_version < 10:
|
||||
# Ensure unique constraints exist for ON CONFLICT clauses
|
||||
@@ -272,26 +316,56 @@ class DatabaseSchema:
|
||||
# but a UNIQUE index works for ON CONFLICT.
|
||||
|
||||
# Clean up duplicates before adding unique indexes
|
||||
self.provider.execute("DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)")
|
||||
self.provider.execute("DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)")
|
||||
self.provider.execute("DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)")
|
||||
self.provider.execute("DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)")
|
||||
self.provider.execute("DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)")
|
||||
self.provider.execute("DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)")
|
||||
self.provider.execute("DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)")
|
||||
self.provider.execute(
|
||||
"DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)"
|
||||
)
|
||||
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)")
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)")
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)")
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)")
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)")
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)")
|
||||
self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)")
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)"
|
||||
)
|
||||
|
||||
if current_version < 11:
|
||||
# Add is_spam column to lxmf_messages if it doesn't exist
|
||||
try:
|
||||
self.provider.execute("ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0")
|
||||
self.provider.execute(
|
||||
"ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0"
|
||||
)
|
||||
except Exception:
|
||||
# Column might already exist if table was created with newest schema
|
||||
pass
|
||||
@@ -309,9 +383,35 @@ class DatabaseSchema:
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)")
|
||||
self.provider.execute("CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)"
|
||||
)
|
||||
|
||||
if current_version < 13:
|
||||
self.provider.execute("""
|
||||
CREATE TABLE IF NOT EXISTS voicemails (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
remote_identity_hash TEXT,
|
||||
remote_identity_name TEXT,
|
||||
filename TEXT,
|
||||
duration_seconds INTEGER,
|
||||
is_read INTEGER DEFAULT 0,
|
||||
timestamp REAL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_voicemails_remote_hash ON voicemails(remote_identity_hash)"
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_voicemails_timestamp ON voicemails(timestamp)"
|
||||
)
|
||||
|
||||
# Update version in config
|
||||
self.provider.execute("INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)", ("database_version", str(self.LATEST_VERSION)))
|
||||
|
||||
self.provider.execute(
|
||||
"INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)",
|
||||
("database_version", str(self.LATEST_VERSION)),
|
||||
)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
from .provider import DatabaseProvider
|
||||
|
||||
|
||||
@@ -41,4 +40,3 @@ class TelephoneDAO:
|
||||
"SELECT * FROM call_history ORDER BY timestamp DESC LIMIT ?",
|
||||
(limit,),
|
||||
)
|
||||
|
||||
|
||||
63
meshchatx/src/backend/database/voicemails.py
Normal file
63
meshchatx/src/backend/database/voicemails.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from .provider import DatabaseProvider
|
||||
|
||||
|
||||
class VoicemailDAO:
|
||||
def __init__(self, provider: DatabaseProvider):
|
||||
self.provider = provider
|
||||
|
||||
def add_voicemail(
|
||||
self,
|
||||
remote_identity_hash,
|
||||
remote_identity_name,
|
||||
filename,
|
||||
duration_seconds,
|
||||
timestamp,
|
||||
):
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO voicemails (
|
||||
remote_identity_hash,
|
||||
remote_identity_name,
|
||||
filename,
|
||||
duration_seconds,
|
||||
timestamp
|
||||
) VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
remote_identity_hash,
|
||||
remote_identity_name,
|
||||
filename,
|
||||
duration_seconds,
|
||||
timestamp,
|
||||
),
|
||||
)
|
||||
|
||||
def get_voicemails(self, limit=50, offset=0):
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM voicemails ORDER BY timestamp DESC LIMIT ? OFFSET ?",
|
||||
(limit, offset),
|
||||
)
|
||||
|
||||
def get_voicemail(self, voicemail_id):
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM voicemails WHERE id = ?",
|
||||
(voicemail_id,),
|
||||
)
|
||||
|
||||
def mark_as_read(self, voicemail_id):
|
||||
self.provider.execute(
|
||||
"UPDATE voicemails SET is_read = 1 WHERE id = ?",
|
||||
(voicemail_id,),
|
||||
)
|
||||
|
||||
def delete_voicemail(self, voicemail_id):
|
||||
self.provider.execute(
|
||||
"DELETE FROM voicemails WHERE id = ?",
|
||||
(voicemail_id,),
|
||||
)
|
||||
|
||||
def get_unread_count(self):
|
||||
row = self.provider.fetchone(
|
||||
"SELECT COUNT(*) as count FROM voicemails WHERE is_read = 0"
|
||||
)
|
||||
return row["count"] if row else 0
|
||||
@@ -15,14 +15,20 @@ class ForwardingManager:
|
||||
mappings = self.db.messages.get_all_forwarding_mappings()
|
||||
for mapping in mappings:
|
||||
try:
|
||||
private_key_bytes = base64.b64decode(mapping["alias_identity_private_key"])
|
||||
private_key_bytes = base64.b64decode(
|
||||
mapping["alias_identity_private_key"]
|
||||
)
|
||||
alias_identity = RNS.Identity.from_bytes(private_key_bytes)
|
||||
alias_destination = self.message_router.register_delivery_identity(identity=alias_identity)
|
||||
alias_destination = self.message_router.register_delivery_identity(
|
||||
identity=alias_identity
|
||||
)
|
||||
self.forwarding_destinations[mapping["alias_hash"]] = alias_destination
|
||||
except Exception as e:
|
||||
print(f"Failed to load forwarding alias {mapping['alias_hash']}: {e}")
|
||||
|
||||
def get_or_create_mapping(self, source_hash, final_recipient_hash, original_destination_hash):
|
||||
def get_or_create_mapping(
|
||||
self, source_hash, final_recipient_hash, original_destination_hash
|
||||
):
|
||||
mapping = self.db.messages.get_forwarding_mapping(
|
||||
original_sender_hash=source_hash,
|
||||
final_recipient_hash=final_recipient_hash,
|
||||
@@ -32,11 +38,15 @@ class ForwardingManager:
|
||||
alias_identity = RNS.Identity()
|
||||
alias_hash = alias_identity.hash.hex()
|
||||
|
||||
alias_destination = self.message_router.register_delivery_identity(alias_identity)
|
||||
alias_destination = self.message_router.register_delivery_identity(
|
||||
alias_identity
|
||||
)
|
||||
self.forwarding_destinations[alias_hash] = alias_destination
|
||||
|
||||
data = {
|
||||
"alias_identity_private_key": base64.b64encode(alias_identity.get_private_key()).decode(),
|
||||
"alias_identity_private_key": base64.b64encode(
|
||||
alias_identity.get_private_key()
|
||||
).decode(),
|
||||
"alias_hash": alias_hash,
|
||||
"original_sender_hash": source_hash,
|
||||
"final_recipient_hash": final_recipient_hash,
|
||||
@@ -45,4 +55,3 @@ class ForwardingManager:
|
||||
self.db.messages.create_forwarding_mapping(data)
|
||||
return data
|
||||
return mapping
|
||||
|
||||
|
||||
@@ -55,13 +55,15 @@ class MapManager:
|
||||
if f.endswith(".mbtiles"):
|
||||
full_path = os.path.join(mbtiles_dir, f)
|
||||
stats = os.stat(full_path)
|
||||
files.append({
|
||||
"name": f,
|
||||
"path": full_path,
|
||||
"size": stats.st_size,
|
||||
"mtime": stats.st_mtime,
|
||||
"is_active": full_path == self.get_offline_path(),
|
||||
})
|
||||
files.append(
|
||||
{
|
||||
"name": f,
|
||||
"path": full_path,
|
||||
"size": stats.st_size,
|
||||
"mtime": stats.st_mtime,
|
||||
"is_active": full_path == self.get_offline_path(),
|
||||
}
|
||||
)
|
||||
return sorted(files, key=lambda x: x["mtime"], reverse=True)
|
||||
|
||||
def delete_mbtiles(self, filename):
|
||||
@@ -97,7 +99,10 @@ class MapManager:
|
||||
|
||||
# Basic validation: ensure it's raster (format is not pbf)
|
||||
if metadata.get("format") == "pbf":
|
||||
RNS.log("MBTiles file is in vector (PBF) format, which is not supported.", RNS.LOG_ERROR)
|
||||
RNS.log(
|
||||
"MBTiles file is in vector (PBF) format, which is not supported.",
|
||||
RNS.LOG_ERROR,
|
||||
)
|
||||
return None
|
||||
|
||||
self._metadata_cache = metadata
|
||||
@@ -176,8 +181,12 @@ class MapManager:
|
||||
|
||||
# create schema
|
||||
cursor.execute("CREATE TABLE metadata (name text, value text)")
|
||||
cursor.execute("CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)")
|
||||
cursor.execute("CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)")
|
||||
cursor.execute(
|
||||
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)"
|
||||
)
|
||||
|
||||
# insert metadata
|
||||
metadata = [
|
||||
@@ -205,7 +214,11 @@ class MapManager:
|
||||
# wait a bit to be nice to OSM
|
||||
time.sleep(0.1)
|
||||
|
||||
response = requests.get(tile_url, headers={"User-Agent": "MeshChatX/1.0 MapExporter"}, timeout=10)
|
||||
response = requests.get(
|
||||
tile_url,
|
||||
headers={"User-Agent": "MeshChatX/1.0 MapExporter"},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
# MBTiles uses TMS (y flipped)
|
||||
tms_y = (1 << z) - 1 - y
|
||||
@@ -214,11 +227,16 @@ class MapManager:
|
||||
(z, x, tms_y, response.content),
|
||||
)
|
||||
except Exception as e:
|
||||
RNS.log(f"Export failed to download tile {z}/{x}/{y}: {e}", RNS.LOG_ERROR)
|
||||
RNS.log(
|
||||
f"Export failed to download tile {z}/{x}/{y}: {e}",
|
||||
RNS.LOG_ERROR,
|
||||
)
|
||||
|
||||
current_count += 1
|
||||
self._export_progress[export_id]["current"] = current_count
|
||||
self._export_progress[export_id]["progress"] = int((current_count / total_tiles) * 100)
|
||||
self._export_progress[export_id]["progress"] = int(
|
||||
(current_count / total_tiles) * 100
|
||||
)
|
||||
|
||||
# commit after each zoom level
|
||||
conn.commit()
|
||||
@@ -236,9 +254,13 @@ class MapManager:
|
||||
|
||||
def _lonlat_to_tile(self, lon, lat, zoom):
|
||||
lat_rad = math.radians(lat)
|
||||
n = 2.0 ** zoom
|
||||
n = 2.0**zoom
|
||||
x = int((lon + 180.0) / 360.0 * n)
|
||||
y = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n)
|
||||
y = int(
|
||||
(1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi)
|
||||
/ 2.0
|
||||
* n
|
||||
)
|
||||
return x, y
|
||||
|
||||
def close(self):
|
||||
|
||||
@@ -5,7 +5,15 @@ class MessageHandler:
|
||||
def __init__(self, db: Database):
|
||||
self.db = db
|
||||
|
||||
def get_conversation_messages(self, local_hash, destination_hash, limit=100, offset=0, after_id=None, before_id=None):
|
||||
def get_conversation_messages(
|
||||
self,
|
||||
local_hash,
|
||||
destination_hash,
|
||||
limit=100,
|
||||
offset=0,
|
||||
after_id=None,
|
||||
before_id=None,
|
||||
):
|
||||
query = """
|
||||
SELECT * FROM lxmf_messages
|
||||
WHERE ((source_hash = ? AND destination_hash = ?)
|
||||
@@ -31,7 +39,9 @@ class MessageHandler:
|
||||
WHERE ((source_hash = ? AND destination_hash = ?)
|
||||
OR (destination_hash = ? AND source_hash = ?))
|
||||
"""
|
||||
self.db.provider.execute(query, [local_hash, destination_hash, local_hash, destination_hash])
|
||||
self.db.provider.execute(
|
||||
query, [local_hash, destination_hash, local_hash, destination_hash]
|
||||
)
|
||||
|
||||
def search_messages(self, local_hash, search_term):
|
||||
like_term = f"%{search_term}%"
|
||||
@@ -61,6 +71,12 @@ class MessageHandler:
|
||||
WHERE m1.source_hash = ? OR m1.destination_hash = ?
|
||||
ORDER BY m1.timestamp DESC
|
||||
"""
|
||||
params = [local_hash, local_hash, local_hash, local_hash, local_hash, local_hash]
|
||||
params = [
|
||||
local_hash,
|
||||
local_hash,
|
||||
local_hash,
|
||||
local_hash,
|
||||
local_hash,
|
||||
local_hash,
|
||||
]
|
||||
return self.db.provider.fetchall(query, params)
|
||||
|
||||
|
||||
@@ -22,9 +22,17 @@ class RNCPHandler:
|
||||
self.allow_overwrite_on_receive = False
|
||||
self.allowed_identity_hashes = []
|
||||
|
||||
def setup_receive_destination(self, allowed_hashes=None, fetch_allowed=False, fetch_jail=None, allow_overwrite=False):
|
||||
def setup_receive_destination(
|
||||
self,
|
||||
allowed_hashes=None,
|
||||
fetch_allowed=False,
|
||||
fetch_jail=None,
|
||||
allow_overwrite=False,
|
||||
):
|
||||
if allowed_hashes:
|
||||
self.allowed_identity_hashes = [bytes.fromhex(h) if isinstance(h, str) else h for h in allowed_hashes]
|
||||
self.allowed_identity_hashes = [
|
||||
bytes.fromhex(h) if isinstance(h, str) else h for h in allowed_hashes
|
||||
]
|
||||
|
||||
self.fetch_jail = fetch_jail
|
||||
self.allow_overwrite_on_receive = allow_overwrite
|
||||
@@ -44,7 +52,9 @@ class RNCPHandler:
|
||||
"receive",
|
||||
)
|
||||
|
||||
self.receive_destination.set_link_established_callback(self._client_link_established)
|
||||
self.receive_destination.set_link_established_callback(
|
||||
self._client_link_established
|
||||
)
|
||||
|
||||
if fetch_allowed:
|
||||
self.receive_destination.register_request_handler(
|
||||
@@ -86,7 +96,9 @@ class RNCPHandler:
|
||||
if resource.status == RNS.Resource.COMPLETE:
|
||||
if resource.metadata:
|
||||
try:
|
||||
filename = os.path.basename(resource.metadata["name"].decode("utf-8"))
|
||||
filename = os.path.basename(
|
||||
resource.metadata["name"].decode("utf-8")
|
||||
)
|
||||
save_dir = os.path.join(self.storage_dir, "rncp_received")
|
||||
os.makedirs(save_dir, exist_ok=True)
|
||||
|
||||
@@ -105,13 +117,17 @@ class RNCPHandler:
|
||||
while os.path.isfile(saved_filename):
|
||||
counter += 1
|
||||
base, ext = os.path.splitext(filename)
|
||||
saved_filename = os.path.join(save_dir, f"{base}.{counter}{ext}")
|
||||
saved_filename = os.path.join(
|
||||
save_dir, f"{base}.{counter}{ext}"
|
||||
)
|
||||
|
||||
shutil.move(resource.data.name, saved_filename)
|
||||
|
||||
if transfer_id in self.active_transfers:
|
||||
self.active_transfers[transfer_id]["status"] = "completed"
|
||||
self.active_transfers[transfer_id]["saved_path"] = saved_filename
|
||||
self.active_transfers[transfer_id]["saved_path"] = (
|
||||
saved_filename
|
||||
)
|
||||
self.active_transfers[transfer_id]["filename"] = filename
|
||||
except Exception as e:
|
||||
if transfer_id in self.active_transfers:
|
||||
@@ -120,7 +136,9 @@ class RNCPHandler:
|
||||
elif transfer_id in self.active_transfers:
|
||||
self.active_transfers[transfer_id]["status"] = "failed"
|
||||
|
||||
def _fetch_request(self, path, data, request_id, link_id, remote_identity, requested_at):
|
||||
def _fetch_request(
|
||||
self, path, data, request_id, link_id, remote_identity, requested_at
|
||||
):
|
||||
if self.fetch_jail:
|
||||
if data.startswith(self.fetch_jail + "/"):
|
||||
data = data.replace(self.fetch_jail + "/", "")
|
||||
@@ -171,7 +189,9 @@ class RNCPHandler:
|
||||
RNS.Transport.request_path(destination_hash)
|
||||
|
||||
timeout_after = time.time() + timeout
|
||||
while not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after:
|
||||
while (
|
||||
not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after
|
||||
):
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
if not RNS.Transport.has_path(destination_hash):
|
||||
@@ -257,7 +277,9 @@ class RNCPHandler:
|
||||
RNS.Transport.request_path(destination_hash)
|
||||
|
||||
timeout_after = time.time() + timeout
|
||||
while not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after:
|
||||
while (
|
||||
not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after
|
||||
):
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
if not RNS.Transport.has_path(destination_hash):
|
||||
@@ -326,7 +348,9 @@ class RNCPHandler:
|
||||
if resource.status == RNS.Resource.COMPLETE:
|
||||
if resource.metadata:
|
||||
try:
|
||||
filename = os.path.basename(resource.metadata["name"].decode("utf-8"))
|
||||
filename = os.path.basename(
|
||||
resource.metadata["name"].decode("utf-8")
|
||||
)
|
||||
if save_path:
|
||||
save_dir = os.path.abspath(os.path.expanduser(save_path))
|
||||
os.makedirs(save_dir, exist_ok=True)
|
||||
@@ -367,7 +391,12 @@ class RNCPHandler:
|
||||
link.set_resource_strategy(RNS.Link.ACCEPT_ALL)
|
||||
link.set_resource_started_callback(fetch_resource_started)
|
||||
link.set_resource_concluded_callback(fetch_resource_concluded)
|
||||
link.request("fetch_file", data=file_path, response_callback=request_response, failed_callback=request_failed)
|
||||
link.request(
|
||||
"fetch_file",
|
||||
data=file_path,
|
||||
response_callback=request_response,
|
||||
failed_callback=request_failed,
|
||||
)
|
||||
|
||||
while not request_resolved:
|
||||
await asyncio.sleep(0.1)
|
||||
@@ -418,4 +447,3 @@ class RNCPHandler:
|
||||
"error": transfer.get("error"),
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
@@ -31,8 +31,14 @@ class RNProbeHandler:
|
||||
if not RNS.Transport.has_path(destination_hash):
|
||||
RNS.Transport.request_path(destination_hash)
|
||||
|
||||
timeout_after = time.time() + (timeout or self.DEFAULT_TIMEOUT + self.reticulum.get_first_hop_timeout(destination_hash))
|
||||
while not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after:
|
||||
timeout_after = time.time() + (
|
||||
timeout
|
||||
or self.DEFAULT_TIMEOUT
|
||||
+ self.reticulum.get_first_hop_timeout(destination_hash)
|
||||
)
|
||||
while (
|
||||
not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after
|
||||
):
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
if not RNS.Transport.has_path(destination_hash):
|
||||
@@ -70,8 +76,14 @@ class RNProbeHandler:
|
||||
if_name = self.reticulum.get_next_hop_if_name(destination_hash)
|
||||
if_str = f" on {if_name}" if if_name and if_name != "None" else ""
|
||||
|
||||
timeout_after = time.time() + (timeout or self.DEFAULT_TIMEOUT + self.reticulum.get_first_hop_timeout(destination_hash))
|
||||
while receipt.status == RNS.PacketReceipt.SENT and time.time() < timeout_after:
|
||||
timeout_after = time.time() + (
|
||||
timeout
|
||||
or self.DEFAULT_TIMEOUT
|
||||
+ self.reticulum.get_first_hop_timeout(destination_hash)
|
||||
)
|
||||
while (
|
||||
receipt.status == RNS.PacketReceipt.SENT and time.time() < timeout_after
|
||||
):
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
result: dict = {
|
||||
@@ -96,9 +108,15 @@ class RNProbeHandler:
|
||||
|
||||
reception_stats = {}
|
||||
if self.reticulum.is_connected_to_shared_instance:
|
||||
reception_rssi = self.reticulum.get_packet_rssi(receipt.proof_packet.packet_hash)
|
||||
reception_snr = self.reticulum.get_packet_snr(receipt.proof_packet.packet_hash)
|
||||
reception_q = self.reticulum.get_packet_q(receipt.proof_packet.packet_hash)
|
||||
reception_rssi = self.reticulum.get_packet_rssi(
|
||||
receipt.proof_packet.packet_hash
|
||||
)
|
||||
reception_snr = self.reticulum.get_packet_snr(
|
||||
receipt.proof_packet.packet_hash
|
||||
)
|
||||
reception_q = self.reticulum.get_packet_q(
|
||||
receipt.proof_packet.packet_hash
|
||||
)
|
||||
|
||||
if reception_rssi is not None:
|
||||
reception_stats["rssi"] = reception_rssi
|
||||
@@ -134,4 +152,3 @@ class RNProbeHandler:
|
||||
"timeouts": sum(1 for r in results if r["status"] == "timeout"),
|
||||
"failed": sum(1 for r in results if r["status"] == "failed"),
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,12 @@ class RNStatusHandler:
|
||||
def __init__(self, reticulum_instance):
|
||||
self.reticulum = reticulum_instance
|
||||
|
||||
def get_status(self, include_link_stats: bool = False, sorting: str | None = None, sort_reverse: bool = False):
|
||||
def get_status(
|
||||
self,
|
||||
include_link_stats: bool = False,
|
||||
sorting: str | None = None,
|
||||
sort_reverse: bool = False,
|
||||
):
|
||||
stats = None
|
||||
link_count = None
|
||||
|
||||
@@ -53,15 +58,25 @@ class RNStatusHandler:
|
||||
if sorting and isinstance(sorting, str):
|
||||
sorting = sorting.lower()
|
||||
if sorting in ("rate", "bitrate"):
|
||||
interfaces.sort(key=lambda i: i.get("bitrate", 0) or 0, reverse=sort_reverse)
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("bitrate", 0) or 0, reverse=sort_reverse
|
||||
)
|
||||
elif sorting == "rx":
|
||||
interfaces.sort(key=lambda i: i.get("rxb", 0) or 0, reverse=sort_reverse)
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("rxb", 0) or 0, reverse=sort_reverse
|
||||
)
|
||||
elif sorting == "tx":
|
||||
interfaces.sort(key=lambda i: i.get("txb", 0) or 0, reverse=sort_reverse)
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("txb", 0) or 0, reverse=sort_reverse
|
||||
)
|
||||
elif sorting == "rxs":
|
||||
interfaces.sort(key=lambda i: i.get("rxs", 0) or 0, reverse=sort_reverse)
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("rxs", 0) or 0, reverse=sort_reverse
|
||||
)
|
||||
elif sorting == "txs":
|
||||
interfaces.sort(key=lambda i: i.get("txs", 0) or 0, reverse=sort_reverse)
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("txs", 0) or 0, reverse=sort_reverse
|
||||
)
|
||||
elif sorting == "traffic":
|
||||
interfaces.sort(
|
||||
key=lambda i: (i.get("rxb", 0) or 0) + (i.get("txb", 0) or 0),
|
||||
@@ -84,13 +99,19 @@ class RNStatusHandler:
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
elif sorting == "held":
|
||||
interfaces.sort(key=lambda i: i.get("held_announces", 0) or 0, reverse=sort_reverse)
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("held_announces", 0) or 0, reverse=sort_reverse
|
||||
)
|
||||
|
||||
formatted_interfaces = []
|
||||
for ifstat in interfaces:
|
||||
name = ifstat.get("name", "")
|
||||
|
||||
if name.startswith("LocalInterface[") or name.startswith("TCPInterface[Client") or name.startswith("BackboneInterface[Client on"):
|
||||
if (
|
||||
name.startswith("LocalInterface[")
|
||||
or name.startswith("TCPInterface[Client")
|
||||
or name.startswith("BackboneInterface[Client on")
|
||||
):
|
||||
continue
|
||||
|
||||
formatted_if: dict[str, Any] = {
|
||||
@@ -165,9 +186,13 @@ class RNStatusHandler:
|
||||
formatted_if["peers"] = ifstat["peers"]
|
||||
|
||||
if "incoming_announce_frequency" in ifstat:
|
||||
formatted_if["incoming_announce_frequency"] = ifstat["incoming_announce_frequency"]
|
||||
formatted_if["incoming_announce_frequency"] = ifstat[
|
||||
"incoming_announce_frequency"
|
||||
]
|
||||
if "outgoing_announce_frequency" in ifstat:
|
||||
formatted_if["outgoing_announce_frequency"] = ifstat["outgoing_announce_frequency"]
|
||||
formatted_if["outgoing_announce_frequency"] = ifstat[
|
||||
"outgoing_announce_frequency"
|
||||
]
|
||||
if "held_announces" in ifstat:
|
||||
formatted_if["held_announces"] = ifstat["held_announces"]
|
||||
|
||||
@@ -181,4 +206,3 @@ class RNStatusHandler:
|
||||
"link_count": link_count,
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
|
||||
|
||||
@@ -76,7 +76,9 @@ class TelephoneManager:
|
||||
destination_identity = RNS.Identity.recall(destination_hash)
|
||||
if destination_identity is None:
|
||||
# If not found by identity hash, try as destination hash
|
||||
destination_identity = RNS.Identity.recall(destination_hash) # Identity.recall takes identity hash
|
||||
destination_identity = RNS.Identity.recall(
|
||||
destination_hash
|
||||
) # Identity.recall takes identity hash
|
||||
|
||||
if destination_identity is None:
|
||||
msg = "Destination identity not found"
|
||||
@@ -92,4 +94,3 @@ class TelephoneManager:
|
||||
self.call_is_incoming = False
|
||||
await asyncio.to_thread(self.telephone.call, destination_identity)
|
||||
return self.telephone.active_call
|
||||
|
||||
|
||||
@@ -6,12 +6,14 @@ from typing import Any
|
||||
|
||||
try:
|
||||
import requests
|
||||
|
||||
HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
HAS_REQUESTS = False
|
||||
|
||||
try:
|
||||
from argostranslate import package, translate
|
||||
|
||||
HAS_ARGOS_LIB = True
|
||||
except ImportError:
|
||||
HAS_ARGOS_LIB = False
|
||||
@@ -63,7 +65,9 @@ LANGUAGE_CODE_TO_NAME = {
|
||||
|
||||
class TranslatorHandler:
|
||||
def __init__(self, libretranslate_url: str | None = None):
|
||||
self.libretranslate_url = libretranslate_url or os.getenv("LIBRETRANSLATE_URL", "http://localhost:5000")
|
||||
self.libretranslate_url = libretranslate_url or os.getenv(
|
||||
"LIBRETRANSLATE_URL", "http://localhost:5000"
|
||||
)
|
||||
self.has_argos = HAS_ARGOS
|
||||
self.has_argos_lib = HAS_ARGOS_LIB
|
||||
self.has_argos_cli = HAS_ARGOS_CLI
|
||||
@@ -136,7 +140,12 @@ class TranslatorHandler:
|
||||
if self.has_requests:
|
||||
try:
|
||||
url = libretranslate_url or self.libretranslate_url
|
||||
return self._translate_libretranslate(text, source_lang=source_lang, target_lang=target_lang, libretranslate_url=url)
|
||||
return self._translate_libretranslate(
|
||||
text,
|
||||
source_lang=source_lang,
|
||||
target_lang=target_lang,
|
||||
libretranslate_url=url,
|
||||
)
|
||||
except Exception as e:
|
||||
if self.has_argos:
|
||||
return self._translate_argos(text, source_lang, target_lang)
|
||||
@@ -148,7 +157,13 @@ class TranslatorHandler:
|
||||
msg = "No translation backend available. Install requests for LibreTranslate or argostranslate for local translation."
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def _translate_libretranslate(self, text: str, source_lang: str, target_lang: str, libretranslate_url: str | None = None) -> dict[str, Any]:
|
||||
def _translate_libretranslate(
|
||||
self,
|
||||
text: str,
|
||||
source_lang: str,
|
||||
target_lang: str,
|
||||
libretranslate_url: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
if not self.has_requests:
|
||||
msg = "requests library not available"
|
||||
raise RuntimeError(msg)
|
||||
@@ -172,12 +187,16 @@ class TranslatorHandler:
|
||||
result = response.json()
|
||||
return {
|
||||
"translated_text": result.get("translatedText", ""),
|
||||
"source_lang": result.get("detectedLanguage", {}).get("language", source_lang),
|
||||
"source_lang": result.get("detectedLanguage", {}).get(
|
||||
"language", source_lang
|
||||
),
|
||||
"target_lang": target_lang,
|
||||
"source": "libretranslate",
|
||||
}
|
||||
|
||||
def _translate_argos(self, text: str, source_lang: str, target_lang: str) -> dict[str, Any]:
|
||||
def _translate_argos(
|
||||
self, text: str, source_lang: str, target_lang: str
|
||||
) -> dict[str, Any]:
|
||||
if source_lang == "auto":
|
||||
if self.has_argos_lib:
|
||||
detected_lang = self._detect_language(text)
|
||||
@@ -200,7 +219,9 @@ class TranslatorHandler:
|
||||
msg = "Argos Translate not available (neither library nor CLI)"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def _translate_argos_lib(self, text: str, source_lang: str, target_lang: str) -> dict[str, Any]:
|
||||
def _translate_argos_lib(
|
||||
self, text: str, source_lang: str, target_lang: str
|
||||
) -> dict[str, Any]:
|
||||
try:
|
||||
installed_packages = package.get_installed_packages()
|
||||
translation_package = None
|
||||
@@ -228,7 +249,9 @@ class TranslatorHandler:
|
||||
msg = f"Argos Translate error: {e}"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def _translate_argos_cli(self, text: str, source_lang: str, target_lang: str) -> dict[str, Any]:
|
||||
def _translate_argos_cli(
|
||||
self, text: str, source_lang: str, target_lang: str
|
||||
) -> dict[str, Any]:
|
||||
if source_lang == "auto" or not source_lang:
|
||||
msg = "Auto-detection is not supported with CLI. Please select a source language manually."
|
||||
raise ValueError(msg)
|
||||
@@ -251,7 +274,14 @@ class TranslatorHandler:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
try:
|
||||
args = [executable, "--from-lang", source_lang, "--to-lang", target_lang, text]
|
||||
args = [
|
||||
executable,
|
||||
"--from-lang",
|
||||
source_lang,
|
||||
"--to-lang",
|
||||
target_lang,
|
||||
text,
|
||||
]
|
||||
result = subprocess.run(args, capture_output=True, text=True, check=True) # noqa: S603
|
||||
translated_text = result.stdout.strip()
|
||||
if not translated_text:
|
||||
@@ -264,7 +294,11 @@ class TranslatorHandler:
|
||||
"source": "argos",
|
||||
}
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_msg = e.stderr.decode() if isinstance(e.stderr, bytes) else (e.stderr or str(e))
|
||||
error_msg = (
|
||||
e.stderr.decode()
|
||||
if isinstance(e.stderr, bytes)
|
||||
else (e.stderr or str(e))
|
||||
)
|
||||
msg = f"Argos Translate CLI error: {error_msg}"
|
||||
raise RuntimeError(msg)
|
||||
except Exception as e:
|
||||
@@ -333,7 +367,9 @@ class TranslatorHandler:
|
||||
|
||||
return languages
|
||||
|
||||
def install_language_package(self, package_name: str = "translate") -> dict[str, Any]:
|
||||
def install_language_package(
|
||||
self, package_name: str = "translate"
|
||||
) -> dict[str, Any]:
|
||||
argospm = shutil.which("argospm")
|
||||
if not argospm:
|
||||
msg = "argospm not found in PATH. Install argostranslate first."
|
||||
|
||||
301
meshchatx/src/backend/voicemail_manager.py
Normal file
301
meshchatx/src/backend/voicemail_manager.py
Normal file
@@ -0,0 +1,301 @@
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
|
||||
import LXST
|
||||
import RNS
|
||||
from LXST.Codecs import Null
|
||||
from LXST.Pipeline import Pipeline
|
||||
from LXST.Sinks import OpusFileSink
|
||||
from LXST.Sources import OpusFileSource
|
||||
|
||||
|
||||
class VoicemailManager:
|
||||
def __init__(self, db, telephone_manager, storage_dir):
|
||||
self.db = db
|
||||
self.telephone_manager = telephone_manager
|
||||
self.storage_dir = os.path.join(storage_dir, "voicemails")
|
||||
self.greetings_dir = os.path.join(self.storage_dir, "greetings")
|
||||
self.recordings_dir = os.path.join(self.storage_dir, "recordings")
|
||||
|
||||
# Ensure directories exist
|
||||
os.makedirs(self.greetings_dir, exist_ok=True)
|
||||
os.makedirs(self.recordings_dir, exist_ok=True)
|
||||
|
||||
self.is_recording = False
|
||||
self.recording_pipeline = None
|
||||
self.recording_sink = None
|
||||
self.recording_start_time = None
|
||||
self.recording_remote_identity = None
|
||||
self.recording_filename = None
|
||||
|
||||
# Paths to executables
|
||||
self.espeak_path = self._find_espeak()
|
||||
self.ffmpeg_path = self._find_ffmpeg()
|
||||
|
||||
# Check for presence
|
||||
self.has_espeak = self.espeak_path is not None
|
||||
self.has_ffmpeg = self.ffmpeg_path is not None
|
||||
|
||||
if self.has_espeak:
|
||||
RNS.log(f"Voicemail: Found eSpeak at {self.espeak_path}", RNS.LOG_DEBUG)
|
||||
else:
|
||||
RNS.log("Voicemail: eSpeak not found", RNS.LOG_ERROR)
|
||||
|
||||
if self.has_ffmpeg:
|
||||
RNS.log(f"Voicemail: Found ffmpeg at {self.ffmpeg_path}", RNS.LOG_DEBUG)
|
||||
else:
|
||||
RNS.log("Voicemail: ffmpeg not found", RNS.LOG_ERROR)
|
||||
|
||||
def _find_espeak(self):
|
||||
# Try standard name first
|
||||
path = shutil.which("espeak-ng")
|
||||
if path:
|
||||
return path
|
||||
|
||||
# Try without -ng suffix
|
||||
path = shutil.which("espeak")
|
||||
if path:
|
||||
return path
|
||||
|
||||
# Windows common install locations if not in PATH
|
||||
if platform.system() == "Windows":
|
||||
common_paths = [
|
||||
os.path.expandvars(r"%ProgramFiles%\eSpeak NG\espeak-ng.exe"),
|
||||
os.path.expandvars(r"%ProgramFiles(x86)%\eSpeak NG\espeak-ng.exe"),
|
||||
os.path.expandvars(r"%ProgramFiles%\eSpeak\espeak.exe"),
|
||||
]
|
||||
for p in common_paths:
|
||||
if os.path.exists(p):
|
||||
return p
|
||||
|
||||
return None
|
||||
|
||||
def _find_ffmpeg(self):
|
||||
path = shutil.which("ffmpeg")
|
||||
if path:
|
||||
return path
|
||||
|
||||
# Windows common install locations
|
||||
if platform.system() == "Windows":
|
||||
common_paths = [
|
||||
os.path.expandvars(r"%ProgramFiles%\ffmpeg\bin\ffmpeg.exe"),
|
||||
os.path.expandvars(r"%ProgramFiles(x86)%\ffmpeg\bin\ffmpeg.exe"),
|
||||
]
|
||||
for p in common_paths:
|
||||
if os.path.exists(p):
|
||||
return p
|
||||
|
||||
return None
|
||||
|
||||
def generate_greeting(self, text):
|
||||
if not self.has_espeak or not self.has_ffmpeg:
|
||||
msg = "espeak-ng and ffmpeg are required for greeting generation"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
wav_path = os.path.join(self.greetings_dir, "greeting.wav")
|
||||
opus_path = os.path.join(self.greetings_dir, "greeting.opus")
|
||||
|
||||
try:
|
||||
# espeak-ng to WAV
|
||||
subprocess.run([self.espeak_path, "-w", wav_path, text], check=True)
|
||||
|
||||
# ffmpeg to Opus
|
||||
if os.path.exists(opus_path):
|
||||
os.remove(opus_path)
|
||||
|
||||
subprocess.run(
|
||||
[
|
||||
self.ffmpeg_path,
|
||||
"-i",
|
||||
wav_path,
|
||||
"-c:a",
|
||||
"libopus",
|
||||
"-b:a",
|
||||
"16k",
|
||||
"-vbr",
|
||||
"on",
|
||||
opus_path,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
return opus_path
|
||||
finally:
|
||||
if os.path.exists(wav_path):
|
||||
os.remove(wav_path)
|
||||
|
||||
def handle_incoming_call(self, caller_identity):
|
||||
if not self.db.config.voicemail_enabled.get():
|
||||
return
|
||||
|
||||
delay = self.db.config.voicemail_auto_answer_delay_seconds.get()
|
||||
|
||||
def voicemail_job():
|
||||
time.sleep(delay)
|
||||
|
||||
# Check if still ringing and no other active call
|
||||
telephone = self.telephone_manager.telephone
|
||||
if (
|
||||
telephone
|
||||
and telephone.active_call
|
||||
and telephone.active_call.get_remote_identity() == caller_identity
|
||||
and telephone.call_status == LXST.Signalling.STATUS_RINGING
|
||||
):
|
||||
RNS.log(
|
||||
f"Auto-answering call from {RNS.prettyhexrep(caller_identity.hash)} for voicemail",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
self.start_voicemail_session(caller_identity)
|
||||
|
||||
threading.Thread(target=voicemail_job, daemon=True).start()
|
||||
|
||||
def start_voicemail_session(self, caller_identity):
|
||||
telephone = self.telephone_manager.telephone
|
||||
if not telephone:
|
||||
return
|
||||
|
||||
# Answer the call
|
||||
if not telephone.answer(caller_identity):
|
||||
return
|
||||
|
||||
# Stop microphone if it's active to prevent local noise being sent or recorded
|
||||
if telephone.audio_input:
|
||||
telephone.audio_input.stop()
|
||||
|
||||
# Play greeting
|
||||
greeting_path = os.path.join(self.greetings_dir, "greeting.opus")
|
||||
if not os.path.exists(greeting_path):
|
||||
# Fallback if no greeting generated yet
|
||||
self.generate_greeting(self.db.config.voicemail_greeting.get())
|
||||
|
||||
def session_job():
|
||||
try:
|
||||
# 1. Play greeting
|
||||
greeting_source = OpusFileSource(greeting_path, target_frame_ms=60)
|
||||
# Attach to transmit mixer
|
||||
greeting_pipeline = Pipeline(
|
||||
source=greeting_source, codec=Null(), sink=telephone.transmit_mixer
|
||||
)
|
||||
greeting_pipeline.start()
|
||||
|
||||
# Wait for greeting to finish
|
||||
while greeting_source.running:
|
||||
time.sleep(0.1)
|
||||
if not telephone.active_call:
|
||||
return
|
||||
|
||||
greeting_pipeline.stop()
|
||||
|
||||
# 2. Play beep
|
||||
beep_source = LXST.ToneSource(
|
||||
frequency=800,
|
||||
gain=0.1,
|
||||
target_frame_ms=60,
|
||||
codec=Null(),
|
||||
sink=telephone.transmit_mixer,
|
||||
)
|
||||
beep_source.start()
|
||||
time.sleep(0.5)
|
||||
beep_source.stop()
|
||||
|
||||
# 3. Start recording
|
||||
self.start_recording(caller_identity)
|
||||
|
||||
# 4. Wait for max recording time or hangup
|
||||
max_time = self.db.config.voicemail_max_recording_seconds.get()
|
||||
start_wait = time.time()
|
||||
while self.is_recording and (time.time() - start_wait < max_time):
|
||||
time.sleep(0.5)
|
||||
if not telephone.active_call:
|
||||
break
|
||||
|
||||
# 5. End session
|
||||
if telephone.active_call:
|
||||
telephone.hangup()
|
||||
|
||||
self.stop_recording()
|
||||
|
||||
except Exception as e:
|
||||
RNS.log(f"Error during voicemail session: {e}", RNS.LOG_ERROR)
|
||||
if self.is_recording:
|
||||
self.stop_recording()
|
||||
|
||||
threading.Thread(target=session_job, daemon=True).start()
|
||||
|
||||
def start_recording(self, caller_identity):
|
||||
telephone = self.telephone_manager.telephone
|
||||
if not telephone or not telephone.active_call:
|
||||
return
|
||||
|
||||
timestamp = time.time()
|
||||
filename = f"voicemail_{caller_identity.hash.hex()}_{int(timestamp)}.opus"
|
||||
filepath = os.path.join(self.recordings_dir, filename)
|
||||
|
||||
try:
|
||||
self.recording_sink = OpusFileSink(filepath)
|
||||
# Connect the caller's audio source to our sink
|
||||
# active_call.audio_source is a LinkSource that feeds into receive_mixer
|
||||
# We want to record what we receive.
|
||||
self.recording_pipeline = Pipeline(
|
||||
source=telephone.active_call.audio_source,
|
||||
codec=Null(),
|
||||
sink=self.recording_sink,
|
||||
)
|
||||
self.recording_pipeline.start()
|
||||
|
||||
self.is_recording = True
|
||||
self.recording_start_time = timestamp
|
||||
self.recording_remote_identity = caller_identity
|
||||
self.recording_filename = filename
|
||||
|
||||
RNS.log(
|
||||
f"Started recording voicemail from {RNS.prettyhexrep(caller_identity.hash)}",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
except Exception as e:
|
||||
RNS.log(f"Failed to start recording: {e}", RNS.LOG_ERROR)
|
||||
|
||||
def stop_recording(self):
|
||||
if not self.is_recording:
|
||||
return
|
||||
|
||||
try:
|
||||
duration = int(time.time() - self.recording_start_time)
|
||||
self.recording_pipeline.stop()
|
||||
self.recording_sink = None
|
||||
self.recording_pipeline = None
|
||||
|
||||
# Save to database if long enough
|
||||
if duration >= 1:
|
||||
remote_name = self.telephone_manager.get_name_for_identity_hash(
|
||||
self.recording_remote_identity.hash.hex()
|
||||
)
|
||||
self.db.voicemails.add_voicemail(
|
||||
remote_identity_hash=self.recording_remote_identity.hash.hex(),
|
||||
remote_identity_name=remote_name,
|
||||
filename=self.recording_filename,
|
||||
duration_seconds=duration,
|
||||
timestamp=self.recording_start_time,
|
||||
)
|
||||
RNS.log(
|
||||
f"Saved voicemail from {RNS.prettyhexrep(self.recording_remote_identity.hash)} ({duration}s)",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
else:
|
||||
# Delete short/empty recording
|
||||
filepath = os.path.join(self.recordings_dir, self.recording_filename)
|
||||
if os.path.exists(filepath):
|
||||
os.remove(filepath)
|
||||
|
||||
self.is_recording = False
|
||||
self.recording_start_time = None
|
||||
self.recording_remote_identity = None
|
||||
self.recording_filename = None
|
||||
|
||||
except Exception as e:
|
||||
RNS.log(f"Error stopping recording: {e}", RNS.LOG_ERROR)
|
||||
self.is_recording = False
|
||||
@@ -26,7 +26,7 @@
|
||||
<div
|
||||
class="hidden sm:flex my-auto w-12 h-12 mr-2 rounded-xl overflow-hidden bg-white/70 dark:bg-zinc-800/80 border border-gray-200 dark:border-zinc-700 shadow-inner"
|
||||
>
|
||||
<img class="w-12 h-12 object-contain p-1.5" src="/assets/images/logo-chat-bubble.png" />
|
||||
<img class="w-12 h-12 object-contain p-1.5" src="/assets/images/logo.png" />
|
||||
</div>
|
||||
<div class="my-auto">
|
||||
<div
|
||||
@@ -387,11 +387,7 @@
|
||||
</div>
|
||||
</template>
|
||||
</template>
|
||||
<CallOverlay
|
||||
v-if="activeCall || isCallEnded"
|
||||
:active-call="activeCall || lastCall"
|
||||
:is-ended="isCallEnded"
|
||||
/>
|
||||
<CallOverlay v-if="activeCall || isCallEnded" :active-call="activeCall || lastCall" :is-ended="isCallEnded" />
|
||||
<Toast />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
class="bg-white dark:bg-zinc-900 rounded-2xl shadow-lg border border-gray-200 dark:border-zinc-800 p-8"
|
||||
>
|
||||
<div class="text-center mb-8">
|
||||
<img class="w-16 h-16 mx-auto mb-4" src="/assets/images/logo-chat-bubble.png" />
|
||||
<img class="w-16 h-16 mx-auto mb-4" src="/assets/images/logo.png" />
|
||||
<h1 class="text-2xl font-bold text-gray-900 dark:text-zinc-100 mb-2">
|
||||
{{ isSetup ? "Initial Setup" : "Authentication Required" }}
|
||||
</h1>
|
||||
|
||||
@@ -7,12 +7,17 @@
|
||||
<!-- Header -->
|
||||
<div class="p-3 flex items-center bg-gray-50 dark:bg-zinc-800/50 border-b border-gray-100 dark:border-zinc-800">
|
||||
<div class="flex-1 flex items-center space-x-2">
|
||||
<div
|
||||
class="size-2 rounded-full"
|
||||
:class="isEnded ? 'bg-red-500' : 'bg-green-500 animate-pulse'"
|
||||
></div>
|
||||
<div class="size-2 rounded-full" :class="isEnded ? 'bg-red-500' : 'bg-green-500 animate-pulse'"></div>
|
||||
<span class="text-[10px] font-bold text-gray-500 dark:text-zinc-400 uppercase tracking-wider">
|
||||
{{ isEnded ? "Call Ended" : (activeCall.status === 6 ? "Active Call" : "Call Status") }}
|
||||
{{
|
||||
isEnded
|
||||
? "Call Ended"
|
||||
: activeCall.is_voicemail
|
||||
? "Recording Voicemail"
|
||||
: activeCall.status === 6
|
||||
? "Active Call"
|
||||
: "Call Status"
|
||||
}}
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
@@ -31,12 +36,12 @@
|
||||
<div v-show="!isMinimized" class="p-4">
|
||||
<!-- icon and name -->
|
||||
<div class="flex flex-col items-center mb-4">
|
||||
<div
|
||||
<div
|
||||
class="p-4 rounded-full mb-3"
|
||||
:class="isEnded ? 'bg-red-100 dark:bg-red-900/30' : 'bg-blue-100 dark:bg-blue-900/30'"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
icon-name="account"
|
||||
<MaterialDesignIcon
|
||||
icon-name="account"
|
||||
class="size-8"
|
||||
:class="isEnded ? 'text-red-600 dark:text-red-400' : 'text-blue-600 dark:text-blue-400'"
|
||||
/>
|
||||
@@ -60,10 +65,11 @@
|
||||
<div
|
||||
class="text-sm font-medium"
|
||||
:class="[
|
||||
isEnded ? 'text-red-600 dark:text-red-400 animate-pulse' :
|
||||
(activeCall.status === 6
|
||||
? 'text-green-600 dark:text-green-400'
|
||||
: 'text-gray-600 dark:text-zinc-400')
|
||||
isEnded
|
||||
? 'text-red-600 dark:text-red-400 animate-pulse'
|
||||
: activeCall.status === 6
|
||||
? 'text-green-600 dark:text-green-400'
|
||||
: 'text-gray-600 dark:text-zinc-400',
|
||||
]"
|
||||
>
|
||||
<span v-if="isEnded">Call Ended</span>
|
||||
@@ -150,7 +156,10 @@
|
||||
</div>
|
||||
|
||||
<!-- Minimized State -->
|
||||
<div v-show="isMinimized && !isEnded" class="px-4 py-2 flex items-center justify-between bg-white dark:bg-zinc-900">
|
||||
<div
|
||||
v-show="isMinimized && !isEnded"
|
||||
class="px-4 py-2 flex items-center justify-between bg-white dark:bg-zinc-900"
|
||||
>
|
||||
<div class="flex items-center space-x-2 overflow-hidden mr-2">
|
||||
<MaterialDesignIcon icon-name="account" class="size-5 text-blue-500" />
|
||||
<span class="text-sm font-medium text-gray-700 dark:text-zinc-200 truncate">
|
||||
|
||||
@@ -1,183 +1,230 @@
|
||||
<template>
|
||||
<div class="flex w-full h-full bg-gray-100 dark:bg-zinc-950" :class="{ dark: config?.theme === 'dark' }">
|
||||
<div class="mx-auto my-auto w-full max-w-xl p-4">
|
||||
<div v-if="activeCall || isCallEnded" class="flex">
|
||||
<div class="mx-auto my-auto min-w-64">
|
||||
<div class="text-center">
|
||||
<div>
|
||||
<!-- icon -->
|
||||
<div class="flex mb-4">
|
||||
<div
|
||||
class="mx-auto bg-gray-300 dark:bg-zinc-700 text-gray-500 dark:text-gray-400 p-4 rounded-full"
|
||||
:class="{ 'animate-pulse': activeCall && activeCall.status === 4 }"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="account" class="size-12" />
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex flex-col w-full h-full bg-gray-100 dark:bg-zinc-950" :class="{ dark: config?.theme === 'dark' }">
|
||||
<div class="mx-auto w-full max-w-xl p-4 flex-1 flex flex-col">
|
||||
<!-- Tabs -->
|
||||
<div class="flex border-b border-gray-200 dark:border-zinc-800 mb-6 shrink-0">
|
||||
<button
|
||||
:class="[
|
||||
activeTab === 'phone'
|
||||
? 'border-blue-500 text-blue-600 dark:text-blue-400'
|
||||
: 'border-transparent text-gray-500 hover:text-gray-700 dark:text-zinc-400 dark:hover:text-zinc-200 hover:border-gray-300',
|
||||
]"
|
||||
class="py-2 px-4 border-b-2 font-medium text-sm transition-all"
|
||||
@click="activeTab = 'phone'"
|
||||
>
|
||||
Phone
|
||||
</button>
|
||||
<button
|
||||
:class="[
|
||||
activeTab === 'voicemail'
|
||||
? 'border-blue-500 text-blue-600 dark:text-blue-400'
|
||||
: 'border-transparent text-gray-500 hover:text-gray-700 dark:text-zinc-400 dark:hover:text-zinc-200 hover:border-gray-300',
|
||||
]"
|
||||
class="py-2 px-4 border-b-2 font-medium text-sm flex items-center gap-2 transition-all"
|
||||
@click="activeTab = 'voicemail'"
|
||||
>
|
||||
Voicemail
|
||||
<span
|
||||
v-if="unreadVoicemailsCount > 0"
|
||||
class="bg-red-500 text-white text-[10px] px-1.5 py-0.5 rounded-full animate-pulse"
|
||||
>{{ unreadVoicemailsCount }}</span
|
||||
>
|
||||
</button>
|
||||
<button
|
||||
:class="[
|
||||
activeTab === 'settings'
|
||||
? 'border-blue-500 text-blue-600 dark:text-blue-400'
|
||||
: 'border-transparent text-gray-500 hover:text-gray-700 dark:text-zinc-400 dark:hover:text-zinc-200 hover:border-gray-300',
|
||||
]"
|
||||
class="py-2 px-4 border-b-2 font-medium text-sm ml-auto transition-all"
|
||||
@click="activeTab = 'settings'"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="cog" class="size-4" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- name -->
|
||||
<div class="text-xl font-semibold text-gray-500 dark:text-zinc-100">
|
||||
<span v-if="(activeCall || lastCall)?.remote_identity_name != null">{{
|
||||
(activeCall || lastCall).remote_identity_name
|
||||
}}</span>
|
||||
<span v-else>Unknown</span>
|
||||
</div>
|
||||
|
||||
<!-- identity hash -->
|
||||
<div
|
||||
v-if="(activeCall || lastCall)?.remote_identity_hash != null"
|
||||
class="text-gray-500 dark:text-zinc-100 opacity-60 text-sm"
|
||||
>
|
||||
{{
|
||||
(activeCall || lastCall).remote_identity_hash
|
||||
? formatDestinationHash((activeCall || lastCall).remote_identity_hash)
|
||||
: ""
|
||||
}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- call status -->
|
||||
<div class="text-gray-500 dark:text-zinc-100 mb-4 mt-2">
|
||||
<template v-if="isCallEnded">
|
||||
<span class="text-red-500 font-bold animate-pulse">Call Ended</span>
|
||||
</template>
|
||||
<template v-else-if="activeCall">
|
||||
<span v-if="activeCall.is_incoming && activeCall.status === 4" class="animate-bounce inline-block">Incoming Call...</span>
|
||||
<span v-else>
|
||||
<span v-if="activeCall.status === 0">Busy...</span>
|
||||
<span v-else-if="activeCall.status === 1">Rejected...</span>
|
||||
<span v-else-if="activeCall.status === 2">Calling...</span>
|
||||
<span v-else-if="activeCall.status === 3">Available...</span>
|
||||
<span v-else-if="activeCall.status === 4">Ringing...</span>
|
||||
<span v-else-if="activeCall.status === 5">Connecting...</span>
|
||||
<span v-else-if="activeCall.status === 6" class="text-green-500 font-medium">Connected</span>
|
||||
<span v-else>Status: {{ activeCall.status }}</span>
|
||||
</span>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<!-- settings during connected call -->
|
||||
<div v-if="activeCall && activeCall.status === 6" class="mb-4">
|
||||
<div class="w-full">
|
||||
<select
|
||||
v-model="selectedAudioProfileId"
|
||||
class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-zinc-900 dark:border-zinc-600 dark:text-white dark:focus:ring-blue-600 dark:focus:border-blue-600"
|
||||
@change="switchAudioProfile(selectedAudioProfileId)"
|
||||
>
|
||||
<option
|
||||
v-for="audioProfile in audioProfiles"
|
||||
:key="audioProfile.id"
|
||||
:value="audioProfile.id"
|
||||
<!-- Phone Tab -->
|
||||
<div v-if="activeTab === 'phone'" class="flex-1 flex flex-col">
|
||||
<div v-if="activeCall || isCallEnded" class="flex my-auto">
|
||||
<div class="mx-auto my-auto min-w-64">
|
||||
<div class="text-center">
|
||||
<div>
|
||||
<!-- icon -->
|
||||
<div class="flex mb-4">
|
||||
<div
|
||||
class="mx-auto bg-gray-300 dark:bg-zinc-700 text-gray-500 dark:text-gray-400 p-4 rounded-full"
|
||||
:class="{ 'animate-pulse': activeCall && activeCall.status === 4 }"
|
||||
>
|
||||
{{ audioProfile.name }}
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- controls during connected call -->
|
||||
<div v-if="activeCall && activeCall.status === 6" class="mx-auto space-x-4 mb-8">
|
||||
<!-- mute/unmute mic -->
|
||||
<button
|
||||
type="button"
|
||||
:title="isMicMuted ? 'Unmute Mic' : 'Mute Mic'"
|
||||
:class="[
|
||||
isMicMuted
|
||||
? 'bg-red-500 hover:bg-red-400'
|
||||
: 'bg-gray-200 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-300 dark:hover:bg-zinc-700',
|
||||
]"
|
||||
class="inline-flex items-center gap-x-1 rounded-full p-4 text-sm font-semibold shadow-sm transition-all duration-200"
|
||||
@click="toggleMicrophone"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
:icon-name="isMicMuted ? 'microphone-off' : 'microphone'"
|
||||
class="size-8"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<!-- mute/unmute speaker -->
|
||||
<button
|
||||
type="button"
|
||||
:title="isSpeakerMuted ? 'Unmute Speaker' : 'Mute Speaker'"
|
||||
:class="[
|
||||
isSpeakerMuted
|
||||
? 'bg-red-500 hover:bg-red-400'
|
||||
: 'bg-gray-200 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-300 dark:hover:bg-zinc-700',
|
||||
]"
|
||||
class="inline-flex items-center gap-x-1 rounded-full p-4 text-sm font-semibold shadow-sm transition-all duration-200"
|
||||
@click="toggleSpeaker"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
:icon-name="isSpeakerMuted ? 'volume-off' : 'volume-high'"
|
||||
class="size-8"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<!-- toggle stats -->
|
||||
<button
|
||||
type="button"
|
||||
:class="[
|
||||
isShowingStats
|
||||
? 'bg-blue-500 text-white'
|
||||
: 'bg-gray-200 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-300 dark:hover:bg-zinc-700',
|
||||
]"
|
||||
class="inline-flex items-center gap-x-1 rounded-full p-4 text-sm font-semibold shadow-sm transition-all duration-200"
|
||||
@click="isShowingStats = !isShowingStats"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="chart-bar" class="size-8" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- actions -->
|
||||
<div v-if="activeCall" class="mx-auto space-x-4">
|
||||
<!-- answer call -->
|
||||
<button
|
||||
v-if="activeCall.is_incoming && activeCall.status === 4"
|
||||
title="Answer Call"
|
||||
type="button"
|
||||
class="inline-flex items-center gap-x-2 rounded-2xl bg-green-600 px-6 py-4 text-lg font-bold text-white shadow-xl hover:bg-green-500 transition-all duration-200 animate-bounce"
|
||||
@click="answerCall"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="phone" class="size-6" />
|
||||
<span>Accept</span>
|
||||
</button>
|
||||
|
||||
<!-- hangup/decline call -->
|
||||
<button
|
||||
:title="
|
||||
activeCall.is_incoming && activeCall.status === 4 ? 'Decline Call' : 'Hangup Call'
|
||||
"
|
||||
type="button"
|
||||
class="inline-flex items-center gap-x-2 rounded-2xl bg-red-600 px-6 py-4 text-lg font-bold text-white shadow-xl hover:bg-red-500 transition-all duration-200"
|
||||
@click="hangupCall"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="phone-hangup" class="size-6 rotate-[135deg]" />
|
||||
<span>{{
|
||||
activeCall.is_incoming && activeCall.status === 4 ? "Decline" : "Hangup"
|
||||
}}</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- stats -->
|
||||
<div
|
||||
v-if="isShowingStats"
|
||||
class="mt-4 p-4 text-left bg-gray-200 dark:bg-zinc-800 rounded-lg text-sm text-gray-600 dark:text-zinc-300"
|
||||
>
|
||||
<div class="grid grid-cols-2 gap-2">
|
||||
<div>
|
||||
TX: {{ activeCall.tx_packets }} ({{ formatBytes(activeCall.tx_bytes) }})
|
||||
<MaterialDesignIcon icon-name="account" class="size-12" />
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
RX: {{ activeCall.rx_packets }} ({{ formatBytes(activeCall.rx_bytes) }})
|
||||
|
||||
<!-- name -->
|
||||
<div class="text-xl font-semibold text-gray-500 dark:text-zinc-100">
|
||||
<span v-if="(activeCall || lastCall)?.remote_identity_name != null">{{
|
||||
(activeCall || lastCall).remote_identity_name
|
||||
}}</span>
|
||||
<span v-else>Unknown</span>
|
||||
</div>
|
||||
|
||||
<!-- identity hash -->
|
||||
<div
|
||||
v-if="(activeCall || lastCall)?.remote_identity_hash != null"
|
||||
class="text-gray-500 dark:text-zinc-100 opacity-60 text-sm"
|
||||
>
|
||||
{{
|
||||
(activeCall || lastCall).remote_identity_hash
|
||||
? formatDestinationHash((activeCall || lastCall).remote_identity_hash)
|
||||
: ""
|
||||
}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- call status -->
|
||||
<div class="text-gray-500 dark:text-zinc-100 mb-4 mt-2">
|
||||
<template v-if="isCallEnded">
|
||||
<span class="text-red-500 font-bold animate-pulse">Call Ended</span>
|
||||
</template>
|
||||
<template v-else-if="activeCall">
|
||||
<span
|
||||
v-if="activeCall.is_incoming && activeCall.status === 4"
|
||||
class="animate-bounce inline-block"
|
||||
>Incoming Call...</span
|
||||
>
|
||||
<span v-else>
|
||||
<span v-if="activeCall.status === 0">Busy...</span>
|
||||
<span v-else-if="activeCall.status === 1">Rejected...</span>
|
||||
<span v-else-if="activeCall.status === 2">Calling...</span>
|
||||
<span v-else-if="activeCall.status === 3">Available...</span>
|
||||
<span v-else-if="activeCall.status === 4">Ringing...</span>
|
||||
<span v-else-if="activeCall.status === 5">Connecting...</span>
|
||||
<span v-else-if="activeCall.status === 6" class="text-green-500 font-medium"
|
||||
>Connected</span
|
||||
>
|
||||
<span v-else>Status: {{ activeCall.status }}</span>
|
||||
</span>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<!-- settings during connected call -->
|
||||
<div v-if="activeCall && activeCall.status === 6" class="mb-4">
|
||||
<div class="w-full">
|
||||
<select
|
||||
v-model="selectedAudioProfileId"
|
||||
class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-zinc-900 dark:border-zinc-600 dark:text-white dark:focus:ring-blue-600 dark:focus:border-blue-600"
|
||||
@change="switchAudioProfile(selectedAudioProfileId)"
|
||||
>
|
||||
<option
|
||||
v-for="audioProfile in audioProfiles"
|
||||
:key="audioProfile.id"
|
||||
:value="audioProfile.id"
|
||||
>
|
||||
{{ audioProfile.name }}
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- controls during connected call -->
|
||||
<div v-if="activeCall && activeCall.status === 6" class="mx-auto space-x-4 mb-8">
|
||||
<!-- mute/unmute mic -->
|
||||
<button
|
||||
type="button"
|
||||
:title="isMicMuted ? 'Unmute Mic' : 'Mute Mic'"
|
||||
:class="[
|
||||
isMicMuted
|
||||
? 'bg-red-500 hover:bg-red-400'
|
||||
: 'bg-gray-200 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-300 dark:hover:bg-zinc-700',
|
||||
]"
|
||||
class="inline-flex items-center gap-x-1 rounded-full p-4 text-sm font-semibold shadow-sm transition-all duration-200"
|
||||
@click="toggleMicrophone"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
:icon-name="isMicMuted ? 'microphone-off' : 'microphone'"
|
||||
class="size-8"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<!-- mute/unmute speaker -->
|
||||
<button
|
||||
type="button"
|
||||
:title="isSpeakerMuted ? 'Unmute Speaker' : 'Mute Speaker'"
|
||||
:class="[
|
||||
isSpeakerMuted
|
||||
? 'bg-red-500 hover:bg-red-400'
|
||||
: 'bg-gray-200 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-300 dark:hover:bg-zinc-700',
|
||||
]"
|
||||
class="inline-flex items-center gap-x-1 rounded-full p-4 text-sm font-semibold shadow-sm transition-all duration-200"
|
||||
@click="toggleSpeaker"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
:icon-name="isSpeakerMuted ? 'volume-off' : 'volume-high'"
|
||||
class="size-8"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<!-- toggle stats -->
|
||||
<button
|
||||
type="button"
|
||||
:class="[
|
||||
isShowingStats
|
||||
? 'bg-blue-500 text-white'
|
||||
: 'bg-gray-200 dark:bg-zinc-800 text-gray-700 dark:text-zinc-200 hover:bg-gray-300 dark:hover:bg-zinc-700',
|
||||
]"
|
||||
class="inline-flex items-center gap-x-1 rounded-full p-4 text-sm font-semibold shadow-sm transition-all duration-200"
|
||||
@click="isShowingStats = !isShowingStats"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="chart-bar" class="size-8" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- actions -->
|
||||
<div v-if="activeCall" class="mx-auto space-x-4">
|
||||
<!-- answer call -->
|
||||
<button
|
||||
v-if="activeCall.is_incoming && activeCall.status === 4"
|
||||
title="Answer Call"
|
||||
type="button"
|
||||
class="inline-flex items-center gap-x-2 rounded-2xl bg-green-600 px-6 py-4 text-lg font-bold text-white shadow-xl hover:bg-green-500 transition-all duration-200 animate-bounce"
|
||||
@click="answerCall"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="phone" class="size-6" />
|
||||
<span>Accept</span>
|
||||
</button>
|
||||
|
||||
<!-- hangup/decline call -->
|
||||
<button
|
||||
:title="
|
||||
activeCall.is_incoming && activeCall.status === 4
|
||||
? 'Decline Call'
|
||||
: 'Hangup Call'
|
||||
"
|
||||
type="button"
|
||||
class="inline-flex items-center gap-x-2 rounded-2xl bg-red-600 px-6 py-4 text-lg font-bold text-white shadow-xl hover:bg-red-500 transition-all duration-200"
|
||||
@click="hangupCall"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="phone-hangup" class="size-6 rotate-[135deg]" />
|
||||
<span>{{
|
||||
activeCall.is_incoming && activeCall.status === 4 ? "Decline" : "Hangup"
|
||||
}}</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- stats -->
|
||||
<div
|
||||
v-if="isShowingStats"
|
||||
class="mt-4 p-4 text-left bg-gray-200 dark:bg-zinc-800 rounded-lg text-sm text-gray-600 dark:text-zinc-300"
|
||||
>
|
||||
<div class="grid grid-cols-2 gap-2">
|
||||
<div>TX: {{ activeCall.tx_packets }} ({{ formatBytes(activeCall.tx_bytes) }})</div>
|
||||
<div>RX: {{ activeCall.rx_packets }} ({{ formatBytes(activeCall.rx_bytes) }})</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-else class="flex">
|
||||
<div class="mx-auto my-auto w-full">
|
||||
<div v-else class="my-auto">
|
||||
<div class="text-center mb-4">
|
||||
<div class="text-xl font-semibold text-gray-500 dark:text-zinc-100">Telephone</div>
|
||||
<div class="text-gray-500 dark:text-zinc-400">Enter an identity hash to call.</div>
|
||||
@@ -200,72 +247,304 @@
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Call History -->
|
||||
<div v-if="callHistory.length > 0 && !activeCall && !isCallEnded" class="mt-8">
|
||||
<div
|
||||
class="bg-white dark:bg-zinc-900 rounded-xl shadow-sm border border-gray-200 dark:border-zinc-800 overflow-hidden"
|
||||
>
|
||||
<div
|
||||
class="px-4 py-3 border-b border-gray-200 dark:border-zinc-800 flex justify-between items-center"
|
||||
>
|
||||
<h3 class="text-sm font-bold text-gray-900 dark:text-white uppercase tracking-wider">
|
||||
Call History
|
||||
</h3>
|
||||
<MaterialDesignIcon icon-name="history" class="size-4 text-gray-400" />
|
||||
</div>
|
||||
<ul class="divide-y divide-gray-100 dark:divide-zinc-800">
|
||||
<li
|
||||
v-for="entry in callHistory"
|
||||
:key="entry.id"
|
||||
class="px-4 py-3 hover:bg-gray-50 dark:hover:bg-zinc-800/50 transition-colors"
|
||||
>
|
||||
<div class="flex items-center space-x-3">
|
||||
<div :class="entry.is_incoming ? 'text-blue-500' : 'text-green-500'">
|
||||
<MaterialDesignIcon
|
||||
:icon-name="entry.is_incoming ? 'phone-incoming' : 'phone-outgoing'"
|
||||
class="size-5"
|
||||
/>
|
||||
</div>
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-center justify-between">
|
||||
<p class="text-sm font-semibold text-gray-900 dark:text-white truncate">
|
||||
{{ entry.remote_identity_name || "Unknown" }}
|
||||
</p>
|
||||
<span class="text-[10px] text-gray-500 dark:text-zinc-500 font-mono ml-2">
|
||||
{{ entry.timestamp ? formatDateTime(entry.timestamp * 1000) : "" }}
|
||||
</span>
|
||||
</div>
|
||||
<div class="flex items-center justify-between mt-0.5">
|
||||
<div
|
||||
class="flex items-center text-xs text-gray-500 dark:text-zinc-400 space-x-2"
|
||||
>
|
||||
<span>{{ entry.status }}</span>
|
||||
<span v-if="entry.duration_seconds > 0"
|
||||
>• {{ formatDuration(entry.duration_seconds) }}</span
|
||||
>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
class="text-[10px] text-blue-500 hover:text-blue-600 font-bold uppercase tracking-tighter"
|
||||
@click="
|
||||
destinationHash = entry.remote_identity_hash;
|
||||
call(destinationHash);
|
||||
"
|
||||
>
|
||||
Call Back
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-if="callHistory.length > 0 && !activeCall" class="mt-8">
|
||||
<div
|
||||
class="bg-white dark:bg-zinc-900 rounded-xl shadow-sm border border-gray-200 dark:border-zinc-800 overflow-hidden"
|
||||
>
|
||||
<div
|
||||
class="px-4 py-3 border-b border-gray-200 dark:border-zinc-800 flex justify-between items-center"
|
||||
>
|
||||
<h3 class="text-sm font-bold text-gray-900 dark:text-white uppercase tracking-wider">
|
||||
Call History
|
||||
</h3>
|
||||
<MaterialDesignIcon icon-name="history" class="size-4 text-gray-400" />
|
||||
<!-- Voicemail Tab -->
|
||||
<div v-if="activeTab === 'voicemail'" class="flex-1 flex flex-col">
|
||||
<div v-if="voicemails.length === 0" class="my-auto text-center">
|
||||
<div class="bg-gray-200 dark:bg-zinc-800 p-6 rounded-full inline-block mb-4">
|
||||
<MaterialDesignIcon icon-name="voicemail" class="size-12 text-gray-400" />
|
||||
</div>
|
||||
<ul class="divide-y divide-gray-100 dark:divide-zinc-800">
|
||||
<li
|
||||
v-for="entry in callHistory"
|
||||
:key="entry.id"
|
||||
class="px-4 py-3 hover:bg-gray-50 dark:hover:bg-zinc-800/50 transition-colors"
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">No Voicemails</h3>
|
||||
<p class="text-gray-500 dark:text-zinc-400">
|
||||
When people leave you messages, they'll show up here.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div v-else class="space-y-4">
|
||||
<div
|
||||
class="bg-white dark:bg-zinc-900 rounded-xl shadow-sm border border-gray-200 dark:border-zinc-800 overflow-hidden"
|
||||
>
|
||||
<div
|
||||
class="px-4 py-3 border-b border-gray-200 dark:border-zinc-800 flex justify-between items-center"
|
||||
>
|
||||
<div class="flex items-center space-x-3">
|
||||
<div :class="entry.is_incoming ? 'text-blue-500' : 'text-green-500'">
|
||||
<MaterialDesignIcon
|
||||
:icon-name="entry.is_incoming ? 'phone-incoming' : 'phone-outgoing'"
|
||||
class="size-5"
|
||||
/>
|
||||
</div>
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-center justify-between">
|
||||
<p class="text-sm font-semibold text-gray-900 dark:text-white truncate">
|
||||
{{ entry.remote_identity_name || "Unknown" }}
|
||||
</p>
|
||||
<span class="text-[10px] text-gray-500 dark:text-zinc-500 font-mono ml-2">
|
||||
{{
|
||||
entry.timestamp
|
||||
? formatDateTime(
|
||||
entry.timestamp * 1000
|
||||
)
|
||||
: ""
|
||||
}}
|
||||
</span>
|
||||
</div>
|
||||
<div class="flex items-center justify-between mt-0.5">
|
||||
<div
|
||||
class="flex items-center text-xs text-gray-500 dark:text-zinc-400 space-x-2"
|
||||
>
|
||||
<span>{{ entry.status }}</span>
|
||||
<span v-if="entry.duration_seconds > 0"
|
||||
>• {{ formatDuration(entry.duration_seconds) }}</span
|
||||
>
|
||||
<h3 class="text-sm font-bold text-gray-900 dark:text-white uppercase tracking-wider">
|
||||
Voicemail Inbox
|
||||
</h3>
|
||||
<span
|
||||
class="text-[10px] bg-blue-100 text-blue-600 dark:bg-blue-900/30 dark:text-blue-400 px-2 py-0.5 rounded-full font-bold uppercase"
|
||||
>
|
||||
{{ voicemails.length }} Messages
|
||||
</span>
|
||||
</div>
|
||||
<ul class="divide-y divide-gray-100 dark:divide-zinc-800">
|
||||
<li
|
||||
v-for="voicemail in voicemails"
|
||||
:key="voicemail.id"
|
||||
class="px-4 py-4 hover:bg-gray-50 dark:hover:bg-zinc-800/50 transition-colors"
|
||||
:class="{ 'bg-blue-50/50 dark:bg-blue-900/10': !voicemail.is_read }"
|
||||
>
|
||||
<div class="flex items-start space-x-4">
|
||||
<!-- Play/Pause Button -->
|
||||
<button
|
||||
class="shrink-0 size-10 rounded-full flex items-center justify-center transition-all"
|
||||
:class="
|
||||
playingVoicemailId === voicemail.id
|
||||
? 'bg-red-500 text-white animate-pulse'
|
||||
: 'bg-blue-500 text-white hover:bg-blue-600'
|
||||
"
|
||||
@click="playVoicemail(voicemail)"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
:icon-name="playingVoicemailId === voicemail.id ? 'stop' : 'play'"
|
||||
class="size-6"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-center justify-between mb-1">
|
||||
<p class="text-sm font-bold text-gray-900 dark:text-white truncate">
|
||||
{{ voicemail.remote_identity_name || "Unknown" }}
|
||||
<span
|
||||
v-if="!voicemail.is_read"
|
||||
class="ml-2 size-2 inline-block rounded-full bg-blue-500"
|
||||
></span>
|
||||
</p>
|
||||
<span class="text-[10px] text-gray-500 dark:text-zinc-500 font-mono">
|
||||
{{ formatDateTime(voicemail.timestamp * 1000) }}
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
class="text-[10px] text-blue-500 hover:text-blue-600 font-bold uppercase tracking-tighter"
|
||||
@click="
|
||||
destinationHash = entry.remote_identity_hash;
|
||||
call(destinationHash);
|
||||
"
|
||||
|
||||
<div
|
||||
class="flex items-center text-xs text-gray-500 dark:text-zinc-400 space-x-3 mb-3"
|
||||
>
|
||||
Call Back
|
||||
</button>
|
||||
<span class="flex items-center gap-1">
|
||||
<MaterialDesignIcon icon-name="clock-outline" class="size-3" />
|
||||
{{ formatDuration(voicemail.duration_seconds) }}
|
||||
</span>
|
||||
<span class="opacity-60 font-mono text-[10px]">{{
|
||||
formatDestinationHash(voicemail.remote_identity_hash)
|
||||
}}</span>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center gap-4">
|
||||
<button
|
||||
type="button"
|
||||
class="text-[10px] flex items-center gap-1 text-blue-500 hover:text-blue-600 font-bold uppercase tracking-wider transition-colors"
|
||||
@click="
|
||||
destinationHash = voicemail.remote_identity_hash;
|
||||
activeTab = 'phone';
|
||||
call(destinationHash);
|
||||
"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="phone" class="size-3" />
|
||||
Call Back
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
class="text-[10px] flex items-center gap-1 text-red-500 hover:text-red-600 font-bold uppercase tracking-wider transition-colors"
|
||||
@click="deleteVoicemail(voicemail.id)"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="delete" class="size-3" />
|
||||
Delete
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Settings Tab -->
|
||||
<div v-if="activeTab === 'settings' && config" class="flex-1 space-y-6">
|
||||
<div
|
||||
class="bg-white dark:bg-zinc-900 rounded-xl p-6 shadow-sm border border-gray-200 dark:border-zinc-800"
|
||||
>
|
||||
<h3
|
||||
class="text-sm font-bold text-gray-900 dark:text-white uppercase tracking-wider mb-6 flex items-center gap-2"
|
||||
>
|
||||
<MaterialDesignIcon icon-name="voicemail" class="size-5 text-blue-500" />
|
||||
Voicemail Settings
|
||||
</h3>
|
||||
|
||||
<!-- Status Banner -->
|
||||
<div
|
||||
v-if="!voicemailStatus.has_espeak || !voicemailStatus.has_ffmpeg"
|
||||
class="mb-6 p-4 bg-amber-50 dark:bg-amber-900/20 border border-amber-200 dark:border-amber-800 rounded-lg flex gap-3 items-start"
|
||||
>
|
||||
<MaterialDesignIcon
|
||||
icon-name="alert"
|
||||
class="size-5 text-amber-600 dark:text-amber-400 shrink-0"
|
||||
/>
|
||||
<div class="text-xs text-amber-800 dark:text-amber-200">
|
||||
<p class="font-bold mb-1">Dependencies Missing</p>
|
||||
<p v-if="!voicemailStatus.has_espeak">
|
||||
Voicemail requires `espeak-ng` to generate greetings. Please install it on your system.
|
||||
</p>
|
||||
<p v-if="!voicemailStatus.has_ffmpeg" :class="{ 'mt-1': !voicemailStatus.has_espeak }">
|
||||
Voicemail requires `ffmpeg` to process audio files. Please install it on your system.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="space-y-6">
|
||||
<!-- Enabled Toggle -->
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<div class="text-sm font-semibold text-gray-900 dark:text-white">Enable Voicemail</div>
|
||||
<div class="text-xs text-gray-500 dark:text-zinc-400">
|
||||
Accept calls automatically and record messages
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
<button
|
||||
:disabled="!voicemailStatus.has_espeak || !voicemailStatus.has_ffmpeg"
|
||||
class="relative inline-flex h-6 w-11 shrink-0 cursor-pointer rounded-full border-2 border-transparent transition-colors duration-200 ease-in-out focus:outline-none disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
:class="config.voicemail_enabled ? 'bg-blue-600' : 'bg-gray-200 dark:bg-zinc-700'"
|
||||
@click="
|
||||
config.voicemail_enabled = !config.voicemail_enabled;
|
||||
updateConfig({ voicemail_enabled: config.voicemail_enabled });
|
||||
"
|
||||
>
|
||||
<span
|
||||
class="pointer-events-none inline-block h-5 w-5 transform rounded-full bg-white shadow ring-0 transition duration-200 ease-in-out"
|
||||
:class="config.voicemail_enabled ? 'translate-x-5' : 'translate-x-0'"
|
||||
></span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Greeting Text -->
|
||||
<div class="space-y-2">
|
||||
<label class="text-xs font-bold text-gray-500 dark:text-zinc-400 uppercase tracking-tighter"
|
||||
>Greeting Message</label
|
||||
>
|
||||
<textarea
|
||||
v-model="config.voicemail_greeting"
|
||||
rows="3"
|
||||
class="block w-full rounded-lg border-0 py-2 text-gray-900 dark:text-white shadow-sm ring-1 ring-inset ring-gray-300 dark:ring-zinc-800 placeholder:text-gray-400 focus:ring-2 focus:ring-inset focus:ring-blue-600 sm:text-sm sm:leading-6 dark:bg-zinc-900"
|
||||
placeholder="Enter greeting text..."
|
||||
></textarea>
|
||||
<div class="flex justify-between items-center">
|
||||
<p class="text-[10px] text-gray-500 dark:text-zinc-500">
|
||||
This text will be converted to speech using eSpeak NG.
|
||||
</p>
|
||||
<button
|
||||
:disabled="!voicemailStatus.has_espeak || isGeneratingGreeting"
|
||||
class="text-[10px] bg-gray-100 dark:bg-zinc-800 text-gray-700 dark:text-zinc-300 px-3 py-1 rounded-full font-bold hover:bg-gray-200 dark:hover:bg-zinc-700 transition-colors disabled:opacity-50"
|
||||
@click="
|
||||
updateConfig({ voicemail_greeting: config.voicemail_greeting });
|
||||
generateGreeting();
|
||||
"
|
||||
>
|
||||
{{ isGeneratingGreeting ? "Generating..." : "Save & Generate" }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Delays -->
|
||||
<div class="grid grid-cols-2 gap-4">
|
||||
<div class="space-y-2">
|
||||
<label
|
||||
class="text-xs font-bold text-gray-500 dark:text-zinc-400 uppercase tracking-tighter"
|
||||
>Answer Delay (s)</label
|
||||
>
|
||||
<input
|
||||
v-model.number="config.voicemail_auto_answer_delay_seconds"
|
||||
type="number"
|
||||
min="1"
|
||||
max="120"
|
||||
class="block w-full rounded-lg border-0 py-1.5 text-gray-900 dark:text-white shadow-sm ring-1 ring-inset ring-gray-300 dark:ring-zinc-800 focus:ring-2 focus:ring-inset focus:ring-blue-600 sm:text-sm dark:bg-zinc-900"
|
||||
@change="
|
||||
updateConfig({
|
||||
voicemail_auto_answer_delay_seconds:
|
||||
config.voicemail_auto_answer_delay_seconds,
|
||||
})
|
||||
"
|
||||
/>
|
||||
</div>
|
||||
<div class="space-y-2">
|
||||
<label
|
||||
class="text-xs font-bold text-gray-500 dark:text-zinc-400 uppercase tracking-tighter"
|
||||
>Max Recording (s)</label
|
||||
>
|
||||
<input
|
||||
v-model.number="config.voicemail_max_recording_seconds"
|
||||
type="number"
|
||||
min="5"
|
||||
max="600"
|
||||
class="block w-full rounded-lg border-0 py-1.5 text-gray-900 dark:text-white shadow-sm ring-1 ring-inset ring-gray-300 dark:ring-zinc-800 focus:ring-2 focus:ring-inset focus:ring-blue-600 sm:text-sm dark:bg-zinc-900"
|
||||
@change="
|
||||
updateConfig({
|
||||
voicemail_max_recording_seconds: config.voicemail_max_recording_seconds,
|
||||
})
|
||||
"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -292,6 +571,17 @@ export default {
|
||||
isCallEnded: false,
|
||||
lastCall: null,
|
||||
endedTimeout: null,
|
||||
activeTab: "phone",
|
||||
voicemails: [],
|
||||
unreadVoicemailsCount: 0,
|
||||
voicemailStatus: {
|
||||
has_espeak: false,
|
||||
has_ffmpeg: false,
|
||||
is_recording: false,
|
||||
},
|
||||
isGeneratingGreeting: false,
|
||||
playingVoicemailId: null,
|
||||
audioPlayer: null,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
@@ -307,15 +597,19 @@ export default {
|
||||
this.getAudioProfiles();
|
||||
this.getStatus();
|
||||
this.getHistory();
|
||||
this.getVoicemails();
|
||||
this.getVoicemailStatus();
|
||||
|
||||
// poll for status
|
||||
this.statusInterval = setInterval(() => {
|
||||
this.getStatus();
|
||||
this.getVoicemailStatus();
|
||||
}, 1000);
|
||||
|
||||
// poll for history less frequently
|
||||
// poll for history/voicemails less frequently
|
||||
this.historyInterval = setInterval(() => {
|
||||
this.getHistory();
|
||||
this.getVoicemails();
|
||||
}, 10000);
|
||||
|
||||
// autofill destination hash from query string
|
||||
@@ -329,6 +623,10 @@ export default {
|
||||
if (this.statusInterval) clearInterval(this.statusInterval);
|
||||
if (this.historyInterval) clearInterval(this.historyInterval);
|
||||
if (this.endedTimeout) clearTimeout(this.endedTimeout);
|
||||
if (this.audioPlayer) {
|
||||
this.audioPlayer.pause();
|
||||
this.audioPlayer = null;
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
formatDestinationHash(hash) {
|
||||
@@ -351,6 +649,15 @@ export default {
|
||||
console.log(e);
|
||||
}
|
||||
},
|
||||
async updateConfig(config) {
|
||||
try {
|
||||
await window.axios.patch("/api/v1/config", config);
|
||||
await this.getConfig();
|
||||
ToastUtils.success("Settings saved");
|
||||
} catch {
|
||||
ToastUtils.error("Failed to save settings");
|
||||
}
|
||||
},
|
||||
async getAudioProfiles() {
|
||||
try {
|
||||
const response = await window.axios.get("/api/v1/telephone/audio-profiles");
|
||||
@@ -366,12 +673,17 @@ export default {
|
||||
const oldCall = this.activeCall;
|
||||
this.activeCall = response.data.active_call;
|
||||
|
||||
if (response.data.voicemail) {
|
||||
this.unreadVoicemailsCount = response.data.voicemail.unread_count;
|
||||
}
|
||||
|
||||
// If call just ended, refresh history and show ended state
|
||||
if (oldCall != null && this.activeCall == null) {
|
||||
this.getHistory();
|
||||
this.getVoicemails();
|
||||
this.lastCall = oldCall;
|
||||
this.isCallEnded = true;
|
||||
|
||||
|
||||
if (this.endedTimeout) clearTimeout(this.endedTimeout);
|
||||
this.endedTimeout = setTimeout(() => {
|
||||
this.isCallEnded = false;
|
||||
@@ -395,6 +707,72 @@ export default {
|
||||
console.log(e);
|
||||
}
|
||||
},
|
||||
async getVoicemailStatus() {
|
||||
try {
|
||||
const response = await window.axios.get("/api/v1/telephone/voicemail/status");
|
||||
this.voicemailStatus = response.data;
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
},
|
||||
async getVoicemails() {
|
||||
try {
|
||||
const response = await window.axios.get("/api/v1/telephone/voicemails");
|
||||
this.voicemails = response.data.voicemails;
|
||||
this.unreadVoicemailsCount = response.data.unread_count;
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
},
|
||||
async generateGreeting() {
|
||||
this.isGeneratingGreeting = true;
|
||||
try {
|
||||
await window.axios.post("/api/v1/telephone/voicemail/generate-greeting");
|
||||
ToastUtils.success("Greeting generated successfully");
|
||||
} catch (e) {
|
||||
ToastUtils.error(e.response?.data?.message || "Failed to generate greeting");
|
||||
} finally {
|
||||
this.isGeneratingGreeting = false;
|
||||
}
|
||||
},
|
||||
async playVoicemail(voicemail) {
|
||||
if (this.playingVoicemailId === voicemail.id) {
|
||||
this.audioPlayer.pause();
|
||||
this.playingVoicemailId = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.audioPlayer) {
|
||||
this.audioPlayer.pause();
|
||||
}
|
||||
|
||||
this.playingVoicemailId = voicemail.id;
|
||||
this.audioPlayer = new Audio(`/api/v1/telephone/voicemails/${voicemail.id}/audio`);
|
||||
this.audioPlayer.play();
|
||||
this.audioPlayer.onended = () => {
|
||||
this.playingVoicemailId = null;
|
||||
};
|
||||
|
||||
// Mark as read
|
||||
if (!voicemail.is_read) {
|
||||
try {
|
||||
await window.axios.post(`/api/v1/telephone/voicemails/${voicemail.id}/read`);
|
||||
voicemail.is_read = 1;
|
||||
this.unreadVoicemailsCount = Math.max(0, this.unreadVoicemailsCount - 1);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
},
|
||||
async deleteVoicemail(voicemailId) {
|
||||
try {
|
||||
await window.axios.delete(`/api/v1/telephone/voicemails/${voicemailId}`);
|
||||
this.getVoicemails();
|
||||
ToastUtils.success("Voicemail deleted");
|
||||
} catch {
|
||||
ToastUtils.error("Failed to delete voicemail");
|
||||
}
|
||||
},
|
||||
async call(identityHash) {
|
||||
if (!identityHash) {
|
||||
ToastUtils.error("Enter an identity hash to call");
|
||||
|
||||
@@ -388,7 +388,9 @@
|
||||
|
||||
<div class="border-t border-gray-100 dark:border-zinc-800 pt-4 space-y-4">
|
||||
<div>
|
||||
<label class="block text-xs font-bold text-gray-500 uppercase mb-1">MBTiles Storage Directory</label>
|
||||
<label class="block text-xs font-bold text-gray-500 uppercase mb-1"
|
||||
>MBTiles Storage Directory</label
|
||||
>
|
||||
<input
|
||||
v-model="mbtilesDir"
|
||||
type="text"
|
||||
@@ -407,8 +409,14 @@
|
||||
class="flex items-center justify-between p-2 rounded-lg bg-gray-50 dark:bg-zinc-800/50 border border-gray-200 dark:border-zinc-800"
|
||||
>
|
||||
<div class="flex flex-col min-w-0 flex-1 mr-2">
|
||||
<span class="text-xs font-medium text-gray-900 dark:text-zinc-100 truncate" :title="file.name">{{ file.name }}</span>
|
||||
<span class="text-[10px] text-gray-500">{{ (file.size / 1024 / 1024).toFixed(1) }} MB</span>
|
||||
<span
|
||||
class="text-xs font-medium text-gray-900 dark:text-zinc-100 truncate"
|
||||
:title="file.name"
|
||||
>{{ file.name }}</span
|
||||
>
|
||||
<span class="text-[10px] text-gray-500"
|
||||
>{{ (file.size / 1024 / 1024).toFixed(1) }} MB</span
|
||||
>
|
||||
</div>
|
||||
<div class="flex items-center space-x-1">
|
||||
<button
|
||||
@@ -648,7 +656,8 @@ export default {
|
||||
const response = await window.axios.get("/api/v1/config");
|
||||
this.config = response.data.config;
|
||||
this.offlineEnabled = this.config.map_offline_enabled;
|
||||
this.cachingEnabled = this.config.map_tile_cache_enabled !== undefined ? this.config.map_tile_cache_enabled : true;
|
||||
this.cachingEnabled =
|
||||
this.config.map_tile_cache_enabled !== undefined ? this.config.map_tile_cache_enabled : true;
|
||||
this.mbtilesDir = this.config.map_mbtiles_dir || "";
|
||||
if (this.config.map_tile_server_url) {
|
||||
this.tileServerUrl = this.config.map_tile_server_url;
|
||||
@@ -674,7 +683,7 @@ export default {
|
||||
await this.checkOfflineMap();
|
||||
await this.loadMBTilesList();
|
||||
ToastUtils.success("Map source updated");
|
||||
} catch (e) {
|
||||
} catch {
|
||||
ToastUtils.error("Failed to set active map");
|
||||
}
|
||||
},
|
||||
@@ -687,7 +696,7 @@ export default {
|
||||
await this.checkOfflineMap();
|
||||
}
|
||||
ToastUtils.success("File deleted");
|
||||
} catch (e) {
|
||||
} catch {
|
||||
ToastUtils.error("Failed to delete file");
|
||||
}
|
||||
},
|
||||
@@ -698,7 +707,7 @@ export default {
|
||||
});
|
||||
ToastUtils.success("Storage directory saved");
|
||||
this.loadMBTilesList();
|
||||
} catch (e) {
|
||||
} catch {
|
||||
ToastUtils.error("Failed to save directory");
|
||||
}
|
||||
},
|
||||
@@ -800,7 +809,7 @@ export default {
|
||||
const customTileUrl = this.tileServerUrl || defaultTileUrl;
|
||||
const isCustomLocal = this.isLocalUrl(customTileUrl);
|
||||
const isDefaultOnline = this.isDefaultOnlineUrl(customTileUrl, "tile");
|
||||
|
||||
|
||||
let tileUrl;
|
||||
if (isOffline) {
|
||||
if (isCustomLocal || (!isDefaultOnline && customTileUrl !== defaultTileUrl)) {
|
||||
@@ -811,14 +820,14 @@ export default {
|
||||
} else {
|
||||
tileUrl = customTileUrl;
|
||||
}
|
||||
|
||||
|
||||
const source = new XYZ({
|
||||
url: tileUrl,
|
||||
crossOrigin: "anonymous",
|
||||
});
|
||||
|
||||
const originalTileLoadFunction = source.getTileLoadFunction();
|
||||
|
||||
|
||||
if (isOffline) {
|
||||
source.setTileLoadFunction(async (tile, src) => {
|
||||
try {
|
||||
@@ -832,7 +841,7 @@ export default {
|
||||
}
|
||||
const blob = await response.blob();
|
||||
tile.getImage().src = URL.createObjectURL(blob);
|
||||
} catch (error) {
|
||||
} catch {
|
||||
tile.setState(3);
|
||||
}
|
||||
});
|
||||
@@ -902,15 +911,15 @@ export default {
|
||||
if (enabled) {
|
||||
const defaultTileUrl = "https://tile.openstreetmap.org/{z}/{x}/{y}.png";
|
||||
const defaultNominatimUrl = "https://nominatim.openstreetmap.org";
|
||||
|
||||
|
||||
const isCustomTileLocal = this.isLocalUrl(this.tileServerUrl);
|
||||
const isDefaultTileOnline = this.isDefaultOnlineUrl(this.tileServerUrl, "tile");
|
||||
const hasCustomTile = this.tileServerUrl && this.tileServerUrl !== defaultTileUrl;
|
||||
|
||||
|
||||
const isCustomNominatimLocal = this.isLocalUrl(this.nominatimApiUrl);
|
||||
const isDefaultNominatimOnline = this.isDefaultOnlineUrl(this.nominatimApiUrl, "nominatim");
|
||||
const hasCustomNominatim = this.nominatimApiUrl && this.nominatimApiUrl !== defaultNominatimUrl;
|
||||
|
||||
|
||||
if (hasCustomTile && !isCustomTileLocal && !isDefaultTileOnline) {
|
||||
const isAccessible = await this.checkApiConnection(this.tileServerUrl);
|
||||
if (!isAccessible) {
|
||||
@@ -918,7 +927,7 @@ export default {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (hasCustomNominatim && !isCustomNominatimLocal && !isDefaultNominatimOnline) {
|
||||
const isAccessible = await this.checkApiConnection(this.nominatimApiUrl);
|
||||
if (!isAccessible) {
|
||||
@@ -1199,7 +1208,7 @@ export default {
|
||||
const defaultNominatimUrl = "https://nominatim.openstreetmap.org";
|
||||
const isCustomLocal = this.isLocalUrl(this.nominatimApiUrl);
|
||||
const isDefaultOnline = this.isDefaultOnlineUrl(this.nominatimApiUrl, "nominatim");
|
||||
|
||||
|
||||
if (this.offlineEnabled) {
|
||||
if (isCustomLocal || (!isDefaultOnline && this.nominatimApiUrl !== defaultNominatimUrl)) {
|
||||
const isAccessible = await this.checkApiConnection(this.nominatimApiUrl);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
@font-face {
|
||||
font-family: 'Roboto Mono Nerd Font';
|
||||
src: url('./RobotoMonoNerdFont-Regular.ttf') format('truetype');
|
||||
font-family: "Roboto Mono Nerd Font";
|
||||
src: url("./RobotoMonoNerdFont-Regular.ttf") format("truetype");
|
||||
font-weight: 400;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
@@ -1,33 +1,35 @@
|
||||
<!DOCTYPE html>
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||
<link rel="manifest" href="/manifest.json">
|
||||
<link rel="icon" type="image/png" href="favicons/favicon-512x512.png"/>
|
||||
<title>Reticulum MeshChat</title>
|
||||
|
||||
</head>
|
||||
<body class="bg-gray-100">
|
||||
<div id="app"></div>
|
||||
<script type="module" src="main.js"></script>
|
||||
<script>
|
||||
// install service worker
|
||||
if('serviceWorker' in navigator){
|
||||
navigator.serviceWorker.register('/service-worker.js').catch((error) => {
|
||||
// Silently handle SSL certificate errors and other registration failures
|
||||
// This is common in development with self-signed certificates
|
||||
const errorMessage = error.message || '';
|
||||
const errorName = error.name || '';
|
||||
if (errorName === 'SecurityError' || errorMessage.includes('SSL certificate') || errorMessage.includes('certificate')) {
|
||||
return;
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<link rel="icon" type="image/png" href="favicons/favicon-512x512.png" />
|
||||
<title>Reticulum MeshChat</title>
|
||||
</head>
|
||||
<body class="bg-gray-100">
|
||||
<div id="app"></div>
|
||||
<script type="module" src="main.js"></script>
|
||||
<script>
|
||||
// install service worker
|
||||
if ("serviceWorker" in navigator) {
|
||||
navigator.serviceWorker.register("/service-worker.js").catch((error) => {
|
||||
// Silently handle SSL certificate errors and other registration failures
|
||||
// This is common in development with self-signed certificates
|
||||
const errorMessage = error.message || "";
|
||||
const errorName = error.name || "";
|
||||
if (
|
||||
errorName === "SecurityError" ||
|
||||
errorMessage.includes("SSL certificate") ||
|
||||
errorMessage.includes("certificate")
|
||||
) {
|
||||
return;
|
||||
}
|
||||
// Log other errors for debugging but don't throw
|
||||
console.debug("Service worker registration failed:", error);
|
||||
});
|
||||
}
|
||||
// Log other errors for debugging but don't throw
|
||||
console.debug('Service worker registration failed:', error);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -79,7 +79,7 @@ class Utils {
|
||||
|
||||
static formatTimeAgo(datetimeString) {
|
||||
if (!datetimeString) return "unknown";
|
||||
|
||||
|
||||
// ensure UTC if no timezone is provided
|
||||
let dateString = datetimeString;
|
||||
if (typeof dateString === "string" && !dateString.includes("Z") && !dateString.includes("+")) {
|
||||
@@ -87,7 +87,7 @@ class Utils {
|
||||
// Replace space with T and append Z for ISO format
|
||||
dateString = dateString.replace(" ", "T") + "Z";
|
||||
}
|
||||
|
||||
|
||||
const millisecondsAgo = Date.now() - new Date(dateString).getTime();
|
||||
const secondsAgo = Math.round(millisecondsAgo / 1000);
|
||||
return this.formatSeconds(secondsAgo);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
||||
"""
|
||||
Auto-generated helper so Python tooling and the Electron build
|
||||
"""Auto-generated helper so Python tooling and the Electron build
|
||||
share the same version string.
|
||||
"""
|
||||
|
||||
__version__ = '2.50.0'
|
||||
__version__ = "2.50.0"
|
||||
|
||||
294
package.json
294
package.json
@@ -1,155 +1,155 @@
|
||||
{
|
||||
"name": "reticulum-meshchatx",
|
||||
"version": "2.50.0",
|
||||
"description": "A simple mesh network communications app powered by the Reticulum Network Stack",
|
||||
"author": "Sudo-Ivan",
|
||||
"main": "electron/main.js",
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"watch": "pnpm run build-frontend -- --watch",
|
||||
"build-frontend": "vite build",
|
||||
"build-backend": "node scripts/build-backend.js",
|
||||
"build": "pnpm run build-frontend && pnpm run build-backend",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"format": "prettier --write .",
|
||||
"electron-postinstall": "electron-builder install-app-deps",
|
||||
"electron": "pnpm run electron-postinstall && pnpm run build && electron .",
|
||||
"dist": "pnpm run electron-postinstall && pnpm run build && electron-builder --publish=never",
|
||||
"dist-prebuilt": "pnpm run electron-postinstall && pnpm run build-backend && electron-builder --publish=never",
|
||||
"dist:mac-arm64": "pnpm run electron-postinstall && pnpm run build && electron-builder --mac --arm64 --publish=never",
|
||||
"dist:mac-universal": "pnpm run electron-postinstall && pnpm run build && electron-builder --mac --universal --publish=never"
|
||||
},
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"packageManager": "pnpm@10.0.0",
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@rushstack/eslint-patch": "^1.15.0",
|
||||
"@vue/eslint-config-prettier": "^10.2.0",
|
||||
"electron": "^39.2.7",
|
||||
"electron-builder": "^24.13.3",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"eslint-plugin-security": "^3.0.1",
|
||||
"eslint-plugin-vue": "^10.6.2",
|
||||
"globals": "^16.5.0",
|
||||
"prettier": "^3.7.4",
|
||||
"terser": "^5.44.1"
|
||||
},
|
||||
"build": {
|
||||
"appId": "com.sudoivan.reticulummeshchat",
|
||||
"productName": "Reticulum MeshChatX",
|
||||
"asar": true,
|
||||
"asarUnpack": [
|
||||
"build/exe/**/*"
|
||||
],
|
||||
"files": [
|
||||
"electron/**/*"
|
||||
],
|
||||
"directories": {
|
||||
"buildResources": "electron/build"
|
||||
"name": "reticulum-meshchatx",
|
||||
"version": "2.50.0",
|
||||
"description": "A simple mesh network communications app powered by the Reticulum Network Stack",
|
||||
"author": "Sudo-Ivan",
|
||||
"main": "electron/main.js",
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"watch": "pnpm run build-frontend -- --watch",
|
||||
"build-frontend": "vite build",
|
||||
"build-backend": "node scripts/build-backend.js",
|
||||
"build": "pnpm run build-frontend && pnpm run build-backend",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"format": "prettier --write .",
|
||||
"electron-postinstall": "electron-builder install-app-deps",
|
||||
"electron": "pnpm run electron-postinstall && pnpm run build && electron .",
|
||||
"dist": "pnpm run electron-postinstall && pnpm run build && electron-builder --publish=never",
|
||||
"dist-prebuilt": "pnpm run electron-postinstall && pnpm run build-backend && electron-builder --publish=never",
|
||||
"dist:mac-arm64": "pnpm run electron-postinstall && pnpm run build && electron-builder --mac --arm64 --publish=never",
|
||||
"dist:mac-universal": "pnpm run electron-postinstall && pnpm run build && electron-builder --mac --universal --publish=never"
|
||||
},
|
||||
"mac": {
|
||||
"target": {
|
||||
"target": "dmg",
|
||||
"arch": [
|
||||
"universal"
|
||||
]
|
||||
},
|
||||
"identity": null,
|
||||
"artifactName": "ReticulumMeshChat-v${version}-mac-${arch}.${ext}",
|
||||
"x64ArchFiles": "Contents/Resources/app/electron/build/exe/**",
|
||||
"extendInfo": {
|
||||
"NSMicrophoneUsageDescription": "Microphone access is only needed for Audio Calls",
|
||||
"com.apple.security.device.audio-input": true
|
||||
},
|
||||
"extraFiles": [
|
||||
{
|
||||
"from": "build/exe",
|
||||
"to": "Resources/app/electron/build/exe",
|
||||
"filter": [
|
||||
"**/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"win": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}.${ext}",
|
||||
"target": [
|
||||
{
|
||||
"target": "portable"
|
||||
"packageManager": "pnpm@10.0.0",
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@rushstack/eslint-patch": "^1.15.0",
|
||||
"@vue/eslint-config-prettier": "^10.2.0",
|
||||
"electron": "^39.2.7",
|
||||
"electron-builder": "^24.13.3",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"eslint-plugin-security": "^3.0.1",
|
||||
"eslint-plugin-vue": "^10.6.2",
|
||||
"globals": "^16.5.0",
|
||||
"prettier": "^3.7.4",
|
||||
"terser": "^5.44.1"
|
||||
},
|
||||
"build": {
|
||||
"appId": "com.sudoivan.reticulummeshchat",
|
||||
"productName": "Reticulum MeshChatX",
|
||||
"asar": true,
|
||||
"asarUnpack": [
|
||||
"build/exe/**/*"
|
||||
],
|
||||
"files": [
|
||||
"electron/**/*"
|
||||
],
|
||||
"directories": {
|
||||
"buildResources": "electron/build"
|
||||
},
|
||||
{
|
||||
"target": "nsis"
|
||||
"mac": {
|
||||
"target": {
|
||||
"target": "dmg",
|
||||
"arch": [
|
||||
"universal"
|
||||
]
|
||||
},
|
||||
"identity": null,
|
||||
"artifactName": "ReticulumMeshChat-v${version}-mac-${arch}.${ext}",
|
||||
"x64ArchFiles": "Contents/Resources/app/electron/build/exe/**",
|
||||
"extendInfo": {
|
||||
"NSMicrophoneUsageDescription": "Microphone access is only needed for Audio Calls",
|
||||
"com.apple.security.device.audio-input": true
|
||||
},
|
||||
"extraFiles": [
|
||||
{
|
||||
"from": "build/exe",
|
||||
"to": "Resources/app/electron/build/exe",
|
||||
"filter": [
|
||||
"**/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"win": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}.${ext}",
|
||||
"target": [
|
||||
{
|
||||
"target": "portable"
|
||||
},
|
||||
{
|
||||
"target": "nsis"
|
||||
}
|
||||
],
|
||||
"extraFiles": [
|
||||
{
|
||||
"from": "build/exe",
|
||||
"to": "Resources/app/electron/build/exe",
|
||||
"filter": [
|
||||
"**/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}.${ext}",
|
||||
"target": [
|
||||
"AppImage",
|
||||
"deb"
|
||||
],
|
||||
"maintainer": "Sudo-Ivan",
|
||||
"category": "Network",
|
||||
"extraFiles": [
|
||||
{
|
||||
"from": "build/exe",
|
||||
"to": "resources/app/electron/build/exe",
|
||||
"filter": [
|
||||
"**/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"dmg": {
|
||||
"writeUpdateInfo": false
|
||||
},
|
||||
"portable": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}-portable.${ext}"
|
||||
},
|
||||
"nsis": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}-installer.${ext}",
|
||||
"oneClick": false,
|
||||
"allowToChangeInstallationDirectory": true
|
||||
}
|
||||
],
|
||||
"extraFiles": [
|
||||
{
|
||||
"from": "build/exe",
|
||||
"to": "Resources/app/electron/build/exe",
|
||||
"filter": [
|
||||
"**/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}.${ext}",
|
||||
"target": [
|
||||
"AppImage",
|
||||
"deb"
|
||||
],
|
||||
"maintainer": "Sudo-Ivan",
|
||||
"category": "Network",
|
||||
"extraFiles": [
|
||||
{
|
||||
"from": "build/exe",
|
||||
"to": "resources/app/electron/build/exe",
|
||||
"filter": [
|
||||
"**/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"dmg": {
|
||||
"writeUpdateInfo": false
|
||||
},
|
||||
"portable": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}-portable.${ext}"
|
||||
},
|
||||
"nsis": {
|
||||
"artifactName": "ReticulumMeshChat-v${version}-${os}-installer.${ext}",
|
||||
"oneClick": false,
|
||||
"allowToChangeInstallationDirectory": true
|
||||
"dependencies": {
|
||||
"@mdi/js": "^7.4.47",
|
||||
"@tailwindcss/forms": "^0.5.11",
|
||||
"@vitejs/plugin-vue": "^5.2.4",
|
||||
"autoprefixer": "^10.4.23",
|
||||
"axios": "^1.13.2",
|
||||
"click-outside-vue3": "^4.0.1",
|
||||
"compressorjs": "^1.2.1",
|
||||
"dayjs": "^1.11.19",
|
||||
"electron-prompt": "^1.7.0",
|
||||
"micron-parser": "^1.0.2",
|
||||
"mitt": "^3.0.1",
|
||||
"ol": "^10.7.0",
|
||||
"postcss": "^8.5.6",
|
||||
"protobufjs": "^7.5.4",
|
||||
"tailwindcss": "^3.4.19",
|
||||
"vis-data": "^7.1.10",
|
||||
"vis-network": "^9.1.13",
|
||||
"vite": "^6.4.1",
|
||||
"vite-plugin-vuetify": "^2.1.2",
|
||||
"vue": "^3.5.26",
|
||||
"vue-i18n": "^11.2.8",
|
||||
"vue-router": "^4.6.4",
|
||||
"vuetify": "^3.11.6"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@mdi/js": "^7.4.47",
|
||||
"@tailwindcss/forms": "^0.5.11",
|
||||
"@vitejs/plugin-vue": "^5.2.4",
|
||||
"autoprefixer": "^10.4.23",
|
||||
"axios": "^1.13.2",
|
||||
"click-outside-vue3": "^4.0.1",
|
||||
"compressorjs": "^1.2.1",
|
||||
"dayjs": "^1.11.19",
|
||||
"electron-prompt": "^1.7.0",
|
||||
"micron-parser": "^1.0.2",
|
||||
"mitt": "^3.0.1",
|
||||
"ol": "^10.7.0",
|
||||
"postcss": "^8.5.6",
|
||||
"protobufjs": "^7.5.4",
|
||||
"tailwindcss": "^3.4.19",
|
||||
"vis-data": "^7.1.10",
|
||||
"vis-network": "^9.1.13",
|
||||
"vite": "^6.4.1",
|
||||
"vite-plugin-vuetify": "^2.1.2",
|
||||
"vue": "^3.5.26",
|
||||
"vue-i18n": "^11.2.8",
|
||||
"vue-router": "^4.6.4",
|
||||
"vuetify": "^3.11.6"
|
||||
}
|
||||
}
|
||||
|
||||
14
poetry.lock
generated
14
poetry.lock
generated
@@ -1375,6 +1375,18 @@ files = [
|
||||
[package.extras]
|
||||
test = ["importlib_metadata (>=2.0)", "pytest (>=6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "ply"
|
||||
version = "3.11"
|
||||
description = "Python Lex & Yacc"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
|
||||
{file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "propcache"
|
||||
version = "0.4.1"
|
||||
@@ -1975,4 +1987,4 @@ propcache = ">=0.2.1"
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11"
|
||||
content-hash = "fc66bbe16d88af079264f801bc18fd10385c0e6af437fdf0e5ab960349971b21"
|
||||
content-hash = "6dae87a310bad0bec81b8eea974fd5cb4b0c40dd2873d09ff7f4659a13f63d7e"
|
||||
|
||||
@@ -35,6 +35,7 @@ dependencies = [
|
||||
"requests (>=2.32.5,<3.0.0)",
|
||||
"lxst (>=0.4.5,<0.5.0)",
|
||||
"audioop-lts (>=0.2.2); python_version>='3.13'",
|
||||
"ply (>=3.11,<4.0)",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
Reference in New Issue
Block a user