diff --git a/.gitea/workflows/bearer-pr.yml b/.gitea/workflows/bearer-pr.yml index f8fe964..5456998 100644 --- a/.gitea/workflows/bearer-pr.yml +++ b/.gitea/workflows/bearer-pr.yml @@ -1,20 +1,20 @@ name: Bearer PR Check on: - pull_request: - types: [opened, synchronize, reopened] + pull_request: + types: [opened, synchronize, reopened] permissions: - security-events: write + security-events: write jobs: - rule_check: - runs-on: ubuntu-latest + rule_check: + runs-on: ubuntu-latest - steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - name: Bearer - uses: bearer/bearer-action@828eeb928ce2f4a7ca5ed57fb8b59508cb8c79bc # v2 - with: - diff: true + - name: Bearer + uses: bearer/bearer-action@828eeb928ce2f4a7ca5ed57fb8b59508cb8c79bc # v2 + with: + diff: true diff --git a/.gitea/workflows/build.yml b/.gitea/workflows/build.yml index 6cdbc4a..af825b5 100644 --- a/.gitea/workflows/build.yml +++ b/.gitea/workflows/build.yml @@ -1,343 +1,343 @@ name: Build and Release on: - push: - tags: - - "*" - workflow_dispatch: - inputs: - build_windows: - description: 'Build Windows' - required: false - default: 'true' - type: boolean - build_mac: - description: 'Build macOS' - required: false - default: 'true' - type: boolean - build_linux: - description: 'Build Linux' - required: false - default: 'true' - type: boolean - build_docker: - description: 'Build Docker' - required: false - default: 'true' - type: boolean + push: + tags: + - "*" + workflow_dispatch: + inputs: + build_windows: + description: "Build Windows" + required: false + default: "true" + type: boolean + build_mac: + description: "Build macOS" + required: false + default: "true" + type: boolean + build_linux: + description: "Build Linux" + required: false + default: "true" + type: boolean + build_docker: + description: "Build Docker" + required: false + default: "true" + type: boolean permissions: - contents: read + contents: read jobs: - build_frontend: - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - name: Clone Repo - uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 + build_frontend: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Clone Repo + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 - - name: Install NodeJS - uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1 - with: - node-version: 22 + - name: Install NodeJS + uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1 + with: + node-version: 22 - - name: Install Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 - with: - python-version: "3.12" + - name: Install Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 + with: + python-version: "3.12" - - name: Sync versions - run: python scripts/sync_version.py + - name: Sync versions + run: python scripts/sync_version.py - - name: Install pnpm - uses: pnpm/action-setup@v4 - with: - version: 9 + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 - - name: Install NodeJS Deps - run: pnpm install + - name: Install NodeJS Deps + run: pnpm install - - name: Build Frontend - run: pnpm run build-frontend + - name: Build Frontend + run: pnpm run build-frontend - - name: Upload frontend artifact - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - with: - name: frontend-build - path: meshchatx/public - if-no-files-found: error + - name: Upload frontend artifact + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + with: + name: frontend-build + path: meshchatx/public + if-no-files-found: error - build_desktop: - name: Build Desktop (${{ matrix.name }}) - needs: build_frontend - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - name: windows - os: windows-latest - node: 22 - python: "3.13" - release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe" - build_input: build_windows - dist_script: dist-prebuilt - variant: standard - electron_version: "39.2.4" - - name: mac - os: macos-14 - node: 22 - python: "3.13" - release_artifacts: "dist/*-mac-*.dmg" - build_input: build_mac - dist_script: dist:mac-universal - variant: standard - electron_version: "39.2.4" - - name: linux - os: ubuntu-latest - node: 22 - python: "3.13" - release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl" - build_input: build_linux - dist_script: dist-prebuilt - variant: standard - electron_version: "39.2.4" - - name: windows-legacy - os: windows-latest - node: 18 - python: "3.11" - release_artifacts: "dist/*-win-installer*.exe,dist/*-win-portable*.exe" - build_input: build_windows - dist_script: dist-prebuilt - variant: legacy - electron_version: "30.0.8" - - name: linux-legacy - os: ubuntu-latest - node: 18 - python: "3.11" - release_artifacts: "dist/*-linux*.AppImage,dist/*-linux*.deb,python-dist/*.whl" - build_input: build_linux - dist_script: dist-prebuilt - variant: legacy - electron_version: "30.0.8" - permissions: - contents: write - steps: - - name: Clone Repo - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 + build_desktop: + name: Build Desktop (${{ matrix.name }}) + needs: build_frontend + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - name: windows + os: windows-latest + node: 22 + python: "3.13" + release_artifacts: "dist/*-win-installer.exe,dist/*-win-portable.exe" + build_input: build_windows + dist_script: dist-prebuilt + variant: standard + electron_version: "39.2.4" + - name: mac + os: macos-14 + node: 22 + python: "3.13" + release_artifacts: "dist/*-mac-*.dmg" + build_input: build_mac + dist_script: dist:mac-universal + variant: standard + electron_version: "39.2.4" + - name: linux + os: ubuntu-latest + node: 22 + python: "3.13" + release_artifacts: "dist/*-linux.AppImage,dist/*-linux.deb,python-dist/*.whl" + build_input: build_linux + dist_script: dist-prebuilt + variant: standard + electron_version: "39.2.4" + - name: windows-legacy + os: windows-latest + node: 18 + python: "3.11" + release_artifacts: "dist/*-win-installer*.exe,dist/*-win-portable*.exe" + build_input: build_windows + dist_script: dist-prebuilt + variant: legacy + electron_version: "30.0.8" + - name: linux-legacy + os: ubuntu-latest + node: 18 + python: "3.11" + release_artifacts: "dist/*-linux*.AppImage,dist/*-linux*.deb,python-dist/*.whl" + build_input: build_linux + dist_script: dist-prebuilt + variant: legacy + electron_version: "30.0.8" + permissions: + contents: write + steps: + - name: Clone Repo + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 - - name: Set legacy Electron version - if: | - matrix.variant == 'legacy' && - (github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) - shell: bash - run: | - node -e "const fs=require('fs');const pkg=require('./package.json');pkg.devDependencies.electron='${{ matrix.electron_version }}';fs.writeFileSync('package.json', JSON.stringify(pkg,null,2));" - if [ -f pnpm-lock.yaml ]; then rm pnpm-lock.yaml; fi + - name: Set legacy Electron version + if: | + matrix.variant == 'legacy' && + (github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) + shell: bash + run: | + node -e "const fs=require('fs');const pkg=require('./package.json');pkg.devDependencies.electron='${{ matrix.electron_version }}';fs.writeFileSync('package.json', JSON.stringify(pkg,null,2));" + if [ -f pnpm-lock.yaml ]; then rm pnpm-lock.yaml; fi - - name: Install NodeJS - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1 - with: - node-version: ${{ matrix.node }} + - name: Install NodeJS + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + uses: actions/setup-node@f1f314fca9dfce2769ece7d933488f076716723e # v1 + with: + node-version: ${{ matrix.node }} - - name: Install Python - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 - with: - python-version: ${{ matrix.python }} + - name: Install Python + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 + with: + python-version: ${{ matrix.python }} - - name: Install Poetry - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - run: python -m pip install --upgrade pip poetry + - name: Install Poetry + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + run: python -m pip install --upgrade pip poetry - - name: Sync versions - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - run: python scripts/sync_version.py + - name: Sync versions + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + run: python scripts/sync_version.py - - name: Install Python Deps - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - run: python -m poetry install + - name: Install Python Deps + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + run: python -m poetry install - - name: Install pnpm - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - uses: pnpm/action-setup@v4 - with: - version: 9 + - name: Install pnpm + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + uses: pnpm/action-setup@v4 + with: + version: 9 - - name: Install NodeJS Deps - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - run: pnpm install + - name: Install NodeJS Deps + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + run: pnpm install - - name: Prepare frontend directory - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - run: python scripts/prepare_frontend_dir.py + - name: Prepare frontend directory + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + run: python scripts/prepare_frontend_dir.py - - name: Download frontend artifact - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 - with: - name: frontend-build - path: meshchatx/public + - name: Download frontend artifact + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 + with: + name: frontend-build + path: meshchatx/public - - name: Install patchelf - if: | - startsWith(matrix.name, 'linux') && - (github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) - run: sudo apt-get update && sudo apt-get install -y patchelf + - name: Install patchelf + if: | + startsWith(matrix.name, 'linux') && + (github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) + run: sudo apt-get update && sudo apt-get install -y patchelf - - name: Build Python wheel - if: | - startsWith(matrix.name, 'linux') && - (github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) - run: | - python -m poetry build -f wheel - mkdir -p python-dist - mv dist/*.whl python-dist/ - rm -rf dist + - name: Build Python wheel + if: | + startsWith(matrix.name, 'linux') && + (github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) + run: | + python -m poetry build -f wheel + mkdir -p python-dist + mv dist/*.whl python-dist/ + rm -rf dist - - name: Build Electron App (Universal) - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - run: pnpm run ${{ matrix.dist_script }} + - name: Build Electron App (Universal) + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + run: pnpm run ${{ matrix.dist_script }} - - name: Rename artifacts for legacy build - if: | - matrix.variant == 'legacy' && - (github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) - run: ./scripts/rename_legacy_artifacts.sh + - name: Rename artifacts for legacy build + if: | + matrix.variant == 'legacy' && + (github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true)) + run: ./scripts/rename_legacy_artifacts.sh - - name: Upload build artifacts - if: | - github.event_name == 'push' || - (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) - uses: actions/upload-artifact@v4 - with: - name: build-${{ matrix.name }} - path: | - dist/*-win-installer*.exe - dist/*-win-portable*.exe - dist/*-mac-*.dmg - dist/*-linux*.AppImage - dist/*-linux*.deb - python-dist/*.whl - if-no-files-found: ignore + - name: Upload build artifacts + if: | + github.event_name == 'push' || + (github.event_name == 'workflow_dispatch' && inputs[matrix.build_input] == true) + uses: actions/upload-artifact@v4 + with: + name: build-${{ matrix.name }} + path: | + dist/*-win-installer*.exe + dist/*-win-portable*.exe + dist/*-mac-*.dmg + dist/*-linux*.AppImage + dist/*-linux*.deb + python-dist/*.whl + if-no-files-found: ignore - create_release: - name: Create Release - needs: build_desktop - runs-on: ubuntu-latest - if: github.event_name == 'push' - permissions: - contents: write - steps: - - name: Download all artifacts - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 - with: - path: artifacts + create_release: + name: Create Release + needs: build_desktop + runs-on: ubuntu-latest + if: github.event_name == 'push' + permissions: + contents: write + steps: + - name: Download all artifacts + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 + with: + path: artifacts - - name: Display structure of downloaded files - run: ls -R artifacts + - name: Display structure of downloaded files + run: ls -R artifacts - - name: Prepare release assets - run: | - mkdir -p release-assets - find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \; - ls -lh release-assets/ + - name: Prepare release assets + run: | + mkdir -p release-assets + find artifacts -type f \( -name "*.exe" -o -name "*.dmg" -o -name "*.AppImage" -o -name "*.deb" -o -name "*.whl" \) -exec cp {} release-assets/ \; + ls -lh release-assets/ - - name: Generate SHA256 checksums - run: | - cd release-assets - echo "## SHA256 Checksums" > release-body.md - echo "" >> release-body.md + - name: Generate SHA256 checksums + run: | + cd release-assets + echo "## SHA256 Checksums" > release-body.md + echo "" >> release-body.md - for file in *.exe *.dmg *.AppImage *.deb *.whl; do - if [ -f "$file" ]; then - sha256sum "$file" | tee "${file}.sha256" - echo "\`$(cat "${file}.sha256")\`" >> release-body.md - fi - done - - echo "" >> release-body.md - echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md - - cat release-body.md - echo "" - echo "Generated .sha256 files:" - ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found" + for file in *.exe *.dmg *.AppImage *.deb *.whl; do + if [ -f "$file" ]; then + sha256sum "$file" | tee "${file}.sha256" + echo "\`$(cat "${file}.sha256")\`" >> release-body.md + fi + done - - name: Create Release - uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1 - with: - draft: true - artifacts: "release-assets/*" - bodyFile: "release-assets/release-body.md" + echo "" >> release-body.md + echo "Individual \`.sha256\` files are included for each artifact." >> release-body.md - build_docker: - runs-on: ubuntu-latest - if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_docker == 'true') - permissions: - packages: write - contents: read - steps: - - name: Clone Repo - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + cat release-body.md + echo "" + echo "Generated .sha256 files:" + ls -1 *.sha256 2>/dev/null || echo "No .sha256 files found" - - name: Set lowercase repository owner - run: echo "REPO_OWNER_LC=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_ENV + - name: Create Release + uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1 + with: + draft: true + artifacts: "release-assets/*" + bodyFile: "release-assets/release-body.md" - - name: Set up QEMU - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 + build_docker: + runs-on: ubuntu-latest + if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.build_docker == 'true') + permissions: + packages: write + contents: read + steps: + - name: Clone Repo + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3 + - name: Set lowercase repository owner + run: echo "REPO_OWNER_LC=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_ENV - - name: Log in to the GitHub Container registry - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + - name: Set up QEMU + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 - - name: Build and push Docker images - uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - tags: >- - ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:latest, - ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:${{ github.ref_name }} - labels: >- - org.opencontainers.image.title=Reticulum MeshChatX, - org.opencontainers.image.description=Docker image for Reticulum MeshChatX, - org.opencontainers.image.url=https://github.com/${{ github.repository }}/pkgs/container/reticulum-meshchatx/ + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3 + + - name: Log in to the GitHub Container registry + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push Docker images + uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: >- + ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:latest, + ghcr.io/${{ env.REPO_OWNER_LC }}/reticulum-meshchatx:${{ github.ref_name }} + labels: >- + org.opencontainers.image.title=Reticulum MeshChatX, + org.opencontainers.image.description=Docker image for Reticulum MeshChatX, + org.opencontainers.image.url=https://github.com/${{ github.repository }}/pkgs/container/reticulum-meshchatx/ diff --git a/.gitea/workflows/dependency-review.yml b/.gitea/workflows/dependency-review.yml index af9b511..f94e9ab 100644 --- a/.gitea/workflows/dependency-review.yml +++ b/.gitea/workflows/dependency-review.yml @@ -1,22 +1,22 @@ -name: 'Dependency review' +name: "Dependency review" on: - pull_request: - branches: [ "master" ] + pull_request: + branches: ["master"] permissions: - contents: read - pull-requests: write + contents: read + pull-requests: write jobs: - dependency-review: - runs-on: ubuntu-latest + dependency-review: + runs-on: ubuntu-latest - steps: - - name: 'Checkout repository' - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + steps: + - name: "Checkout repository" + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - name: 'Dependency Review' - uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4 - with: - comment-summary-in-pr: always \ No newline at end of file + - name: "Dependency Review" + uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4 + with: + comment-summary-in-pr: always diff --git a/.prettierrc b/.prettierrc index d6974cc..36426ec 100644 --- a/.prettierrc +++ b/.prettierrc @@ -1,9 +1,8 @@ { - "semi": true, - "tabWidth": 4, - "singleQuote": false, - "printWidth": 120, - "trailingComma": "es5", - "endOfLine": "auto" + "semi": true, + "tabWidth": 4, + "singleQuote": false, + "printWidth": 120, + "trailingComma": "es5", + "endOfLine": "auto" } - diff --git a/README.md b/README.md index 4e7707c..3608477 100644 --- a/README.md +++ b/README.md @@ -2,11 +2,13 @@ A heavily customized and updated fork of [Reticulum MeshChat](https://github.com/liamcottle/reticulum-meshchat). +This project is seperate from the original Reticulum MeshChat project, and is not affiliated with the original project. + ## Features of this Fork ### Major -- Full LXST support. +- Full LXST support w/ custom voicemail support. - Map (w/ MBTiles support for offline) - Security improvements - Custom UI/UX @@ -27,12 +29,12 @@ A heavily customized and updated fork of [Reticulum MeshChat](https://github.com - [ ] Spam filter (based on keywords) - [ ] Multi-identity support. - [ ] TAK tool/integration -- [ ] RNS Tunnel - tunnel your regular services over RNS to another MeshchatX user. +- [ ] RNS Tunnel - tunnel your regular services over RNS to another MeshchatX user. - [ ] RNS Filesync - P2P file sync ## Usage -Check [releases](https://git.quad4.io/RNS-Things/reticulum-meshchatX/releases) for pre-built binaries or appimages. +Check [releases](https://git.quad4.io/Ivan/MeshChatX/releases) for pre-built binaries or appimages. ## Building @@ -47,28 +49,29 @@ You can run `task run` or `task develop` (a thin alias) to start the backend + f ### Available Tasks -| Task | Description | -|------|-------------| -| `task install` | Install all dependencies (syncs version, installs node modules and python deps) | -| `task node_modules` | Install Node.js dependencies only | -| `task python` | Install Python dependencies using Poetry only | -| `task sync-version` | Sync version numbers across project files | -| `task run` | Run the application | -| `task develop` | Run the application in development mode (alias for `run`) | -| `task build` | Build the application (frontend and backend) | -| `task build-frontend` | Build only the frontend | -| `task clean` | Clean build artifacts and dependencies | -| `task wheel` | Build Python wheel package (outputs to `python-dist/`) | -| `task build-appimage` | Build Linux AppImage | -| `task build-exe` | Build Windows portable executable | -| `task dist` | Build distribution (defaults to AppImage) | -| `task electron-legacy` | Install legacy Electron version | -| `task build-appimage-legacy` | Build Linux AppImage with legacy Electron version | -| `task build-exe-legacy` | Build Windows portable executable with legacy Electron version | -| `task build-docker` | Build Docker image using buildx | -| `task run-docker` | Run Docker container using docker-compose | +| Task | Description | +| ---------------------------- | ------------------------------------------------------------------------------- | +| `task install` | Install all dependencies (syncs version, installs node modules and python deps) | +| `task node_modules` | Install Node.js dependencies only | +| `task python` | Install Python dependencies using Poetry only | +| `task sync-version` | Sync version numbers across project files | +| `task run` | Run the application | +| `task develop` | Run the application in development mode (alias for `run`) | +| `task build` | Build the application (frontend and backend) | +| `task build-frontend` | Build only the frontend | +| `task clean` | Clean build artifacts and dependencies | +| `task wheel` | Build Python wheel package (outputs to `python-dist/`) | +| `task build-appimage` | Build Linux AppImage | +| `task build-exe` | Build Windows portable executable | +| `task dist` | Build distribution (defaults to AppImage) | +| `task electron-legacy` | Install legacy Electron version | +| `task build-appimage-legacy` | Build Linux AppImage with legacy Electron version | +| `task build-exe-legacy` | Build Windows portable executable with legacy Electron version | +| `task build-docker` | Build Docker image using buildx | +| `task run-docker` | Run Docker container using docker-compose | All tasks support environment variable overrides. For example: + - `PYTHON=python3.12 task install` - `DOCKER_PLATFORMS=linux/amd64,linux/arm64 task build-docker` @@ -130,11 +133,12 @@ The `cx_setup.py` script uses cx_Freeze for creating standalone executables (App ## Internationalization (i18n) -Multi-language support is in progress. We use `vue-i18n` for the frontend. +Multi-language support is in progress. We use `vue-i18n` for the frontend. Translation files are located in `meshchatx/src/frontend/locales/`. Currently supported languages: + - English (Primary) - Russian -- German \ No newline at end of file +- German diff --git a/Taskfile.yml b/Taskfile.yml index f4fc95a..02f99c6 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -1,153 +1,153 @@ -version: '3' +version: "3" vars: - PYTHON: - sh: echo "${PYTHON:-python}" - NPM: - sh: echo "${NPM:-pnpm}" - LEGACY_ELECTRON_VERSION: - sh: echo "${LEGACY_ELECTRON_VERSION:-30.0.8}" - DOCKER_COMPOSE_CMD: - sh: echo "${DOCKER_COMPOSE_CMD:-docker compose}" - DOCKER_COMPOSE_FILE: - sh: echo "${DOCKER_COMPOSE_FILE:-docker-compose.yml}" - DOCKER_IMAGE: - sh: echo "${DOCKER_IMAGE:-reticulum-meshchatx:local}" - DOCKER_BUILDER: - sh: echo "${DOCKER_BUILDER:-meshchatx-builder}" - DOCKER_PLATFORMS: - sh: echo "${DOCKER_PLATFORMS:-linux/amd64}" - DOCKER_BUILD_FLAGS: - sh: echo "${DOCKER_BUILD_FLAGS:---load}" - DOCKER_BUILD_ARGS: - sh: echo "${DOCKER_BUILD_ARGS:-}" - DOCKER_CONTEXT: - sh: echo "${DOCKER_CONTEXT:-.}" - DOCKERFILE: - sh: echo "${DOCKERFILE:-Dockerfile}" + PYTHON: + sh: echo "${PYTHON:-python}" + NPM: + sh: echo "${NPM:-pnpm}" + LEGACY_ELECTRON_VERSION: + sh: echo "${LEGACY_ELECTRON_VERSION:-30.0.8}" + DOCKER_COMPOSE_CMD: + sh: echo "${DOCKER_COMPOSE_CMD:-docker compose}" + DOCKER_COMPOSE_FILE: + sh: echo "${DOCKER_COMPOSE_FILE:-docker-compose.yml}" + DOCKER_IMAGE: + sh: echo "${DOCKER_IMAGE:-reticulum-meshchatx:local}" + DOCKER_BUILDER: + sh: echo "${DOCKER_BUILDER:-meshchatx-builder}" + DOCKER_PLATFORMS: + sh: echo "${DOCKER_PLATFORMS:-linux/amd64}" + DOCKER_BUILD_FLAGS: + sh: echo "${DOCKER_BUILD_FLAGS:---load}" + DOCKER_BUILD_ARGS: + sh: echo "${DOCKER_BUILD_ARGS:-}" + DOCKER_CONTEXT: + sh: echo "${DOCKER_CONTEXT:-.}" + DOCKERFILE: + sh: echo "${DOCKERFILE:-Dockerfile}" tasks: - default: - desc: Show available tasks - cmds: - - task --list + default: + desc: Show available tasks + cmds: + - task --list - install: - desc: Install all dependencies (syncs version, installs node modules and python deps) - deps: [sync-version, node_modules, python] + install: + desc: Install all dependencies (syncs version, installs node modules and python deps) + deps: [sync-version, node_modules, python] - node_modules: - desc: Install Node.js dependencies - cmds: - - '{{.NPM}} install' + node_modules: + desc: Install Node.js dependencies + cmds: + - "{{.NPM}} install" - python: - desc: Install Python dependencies using Poetry - cmds: - - '{{.PYTHON}} -m poetry install' + python: + desc: Install Python dependencies using Poetry + cmds: + - "{{.PYTHON}} -m poetry install" - run: - desc: Run the application - deps: [install] - cmds: - - '{{.PYTHON}} -m poetry run meshchat' + run: + desc: Run the application + deps: [install] + cmds: + - "{{.PYTHON}} -m poetry run meshchat" - develop: - desc: Run the application in development mode - cmds: - - task: run + develop: + desc: Run the application in development mode + cmds: + - task: run - build: - desc: Build the application (frontend and backend) - deps: [install] - cmds: - - '{{.NPM}} run build' + build: + desc: Build the application (frontend and backend) + deps: [install] + cmds: + - "{{.NPM}} run build" - build-frontend: - desc: Build only the frontend - deps: [node_modules] - cmds: - - '{{.NPM}} run build-frontend' + build-frontend: + desc: Build only the frontend + deps: [node_modules] + cmds: + - "{{.NPM}} run build-frontend" - wheel: - desc: Build Python wheel package - deps: [install] - cmds: - - '{{.PYTHON}} -m poetry build -f wheel' - - '{{.PYTHON}} scripts/move_wheels.py' + wheel: + desc: Build Python wheel package + deps: [install] + cmds: + - "{{.PYTHON}} -m poetry build -f wheel" + - "{{.PYTHON}} scripts/move_wheels.py" - build-appimage: - desc: Build Linux AppImage - deps: [build] - cmds: - - '{{.NPM}} run electron-postinstall' - - '{{.NPM}} run dist -- --linux AppImage' + build-appimage: + desc: Build Linux AppImage + deps: [build] + cmds: + - "{{.NPM}} run electron-postinstall" + - "{{.NPM}} run dist -- --linux AppImage" - build-exe: - desc: Build Windows portable executable - deps: [build] - cmds: - - '{{.NPM}} run electron-postinstall' - - '{{.NPM}} run dist -- --win portable' + build-exe: + desc: Build Windows portable executable + deps: [build] + cmds: + - "{{.NPM}} run electron-postinstall" + - "{{.NPM}} run dist -- --win portable" - dist: - desc: Build distribution (defaults to AppImage) - cmds: - - task: build-appimage + dist: + desc: Build distribution (defaults to AppImage) + cmds: + - task: build-appimage - electron-legacy: - desc: Install legacy Electron version - cmds: - - '{{.NPM}} install --no-save electron@{{.LEGACY_ELECTRON_VERSION}}' + electron-legacy: + desc: Install legacy Electron version + cmds: + - "{{.NPM}} install --no-save electron@{{.LEGACY_ELECTRON_VERSION}}" - build-appimage-legacy: - desc: Build Linux AppImage with legacy Electron version - deps: [build, electron-legacy] - cmds: - - '{{.NPM}} run electron-postinstall' - - '{{.NPM}} run dist -- --linux AppImage' - - './scripts/rename_legacy_artifacts.sh' + build-appimage-legacy: + desc: Build Linux AppImage with legacy Electron version + deps: [build, electron-legacy] + cmds: + - "{{.NPM}} run electron-postinstall" + - "{{.NPM}} run dist -- --linux AppImage" + - "./scripts/rename_legacy_artifacts.sh" - build-exe-legacy: - desc: Build Windows portable executable with legacy Electron version - deps: [build, electron-legacy] - cmds: - - '{{.NPM}} run electron-postinstall' - - '{{.NPM}} run dist -- --win portable' - - './scripts/rename_legacy_artifacts.sh' + build-exe-legacy: + desc: Build Windows portable executable with legacy Electron version + deps: [build, electron-legacy] + cmds: + - "{{.NPM}} run electron-postinstall" + - "{{.NPM}} run dist -- --win portable" + - "./scripts/rename_legacy_artifacts.sh" - clean: - desc: Clean build artifacts and dependencies - cmds: - - rm -rf node_modules - - rm -rf build - - rm -rf dist - - rm -rf python-dist - - rm -rf meshchatx/public + clean: + desc: Clean build artifacts and dependencies + cmds: + - rm -rf node_modules + - rm -rf build + - rm -rf dist + - rm -rf python-dist + - rm -rf meshchatx/public - sync-version: - desc: Sync version numbers across project files - cmds: - - '{{.PYTHON}} scripts/sync_version.py' + sync-version: + desc: Sync version numbers across project files + cmds: + - "{{.PYTHON}} scripts/sync_version.py" - build-docker: - desc: Build Docker image using buildx - cmds: - - | - if ! docker buildx inspect {{.DOCKER_BUILDER}} >/dev/null 2>&1; then - docker buildx create --name {{.DOCKER_BUILDER}} --use >/dev/null - else - docker buildx use {{.DOCKER_BUILDER}} - fi - - | - docker buildx build --builder {{.DOCKER_BUILDER}} --platform {{.DOCKER_PLATFORMS}} \ - {{.DOCKER_BUILD_FLAGS}} \ - -t {{.DOCKER_IMAGE}} \ - {{.DOCKER_BUILD_ARGS}} \ - -f {{.DOCKERFILE}} \ - {{.DOCKER_CONTEXT}} + build-docker: + desc: Build Docker image using buildx + cmds: + - | + if ! docker buildx inspect {{.DOCKER_BUILDER}} >/dev/null 2>&1; then + docker buildx create --name {{.DOCKER_BUILDER}} --use >/dev/null + else + docker buildx use {{.DOCKER_BUILDER}} + fi + - | + docker buildx build --builder {{.DOCKER_BUILDER}} --platform {{.DOCKER_PLATFORMS}} \ + {{.DOCKER_BUILD_FLAGS}} \ + -t {{.DOCKER_IMAGE}} \ + {{.DOCKER_BUILD_ARGS}} \ + -f {{.DOCKERFILE}} \ + {{.DOCKER_CONTEXT}} - run-docker: - desc: Run Docker container using docker-compose - cmds: - - 'MESHCHAT_IMAGE="{{.DOCKER_IMAGE}}" {{.DOCKER_COMPOSE_CMD}} -f {{.DOCKER_COMPOSE_FILE}} up --remove-orphans --pull never reticulum-meshchatx' + run-docker: + desc: Run Docker container using docker-compose + cmds: + - 'MESHCHAT_IMAGE="{{.DOCKER_IMAGE}}" {{.DOCKER_COMPOSE_CMD}} -f {{.DOCKER_COMPOSE_FILE}} up --remove-orphans --pull never reticulum-meshchatx' diff --git a/docker-compose.yml b/docker-compose.yml index d513770..4f8959f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,17 +1,17 @@ services: - reticulum-meshchatx: - container_name: reticulum-meshchatx - image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest} - pull_policy: always - restart: unless-stopped - # Make the meshchat web interface accessible from the host on port 8000 - ports: - - 127.0.0.1:8000:8000 - volumes: - - meshchat-config:/config - # Uncomment if you have a USB device connected, such as an RNode - # devices: - # - /dev/ttyUSB0:/dev/ttyUSB0 + reticulum-meshchatx: + container_name: reticulum-meshchatx + image: ${MESHCHAT_IMAGE:-ghcr.io/sudo-ivan/reticulum-meshchatx:latest} + pull_policy: always + restart: unless-stopped + # Make the meshchat web interface accessible from the host on port 8000 + ports: + - 127.0.0.1:8000:8000 + volumes: + - meshchat-config:/config + # Uncomment if you have a USB device connected, such as an RNode + # devices: + # - /dev/ttyUSB0:/dev/ttyUSB0 volumes: meshchat-config: diff --git a/docs/meshchat_on_raspberry_pi.md b/docs/meshchat_on_raspberry_pi.md index 767b74b..a1385ef 100644 --- a/docs/meshchat_on_raspberry_pi.md +++ b/docs/meshchat_on_raspberry_pi.md @@ -96,4 +96,4 @@ sudo systemctl status reticulum-meshchat.service You should now be able to access MeshChat via your Pi's IP address. -> Note: Don't forget to include the default port `8000` \ No newline at end of file +> Note: Don't forget to include the default port `8000` diff --git a/electron/loading.html b/electron/loading.html index e49f966..693df9b 100644 --- a/electron/loading.html +++ b/electron/loading.html @@ -1,161 +1,205 @@ - - - - - - MeshChatX - - - - -
-
-
-
- -
-
-
-
- -
-
- MeshChatX logo -
-
-

MeshChatX

-
MeshChatX
-
Custom fork by Sudo-Ivan
-
-
- -
-
- - Preparing your node -
-
- - Starting services -
-
- -
-
- - - -
-
-
Loading services
-
Waiting for the MeshChatX API to come online.
-
-
- -
-
-
Version
-
v0.0.0
-
-
-
Status
-
Booting
-
-
- -
+ + + + + + MeshChatX + + + +
+
+
-
-
- - - - \ No newline at end of file + + + diff --git a/logo/icon.ico b/logo/icon.ico index f00e77e..56b2854 100644 Binary files a/logo/icon.ico and b/logo/icon.ico differ diff --git a/logo/logo-chat-bubble.png b/logo/logo-chat-bubble.png deleted file mode 100644 index aea1c67..0000000 Binary files a/logo/logo-chat-bubble.png and /dev/null differ diff --git a/logo/logo.afdesign b/logo/logo.afdesign deleted file mode 100644 index c2a6696..0000000 Binary files a/logo/logo.afdesign and /dev/null differ diff --git a/logo/logo.png b/logo/logo.png index ab9ae1f..05ae152 100644 Binary files a/logo/logo.png and b/logo/logo.png differ diff --git a/meshchatx.png/meshchatx.png b/meshchatx.png/meshchatx.png deleted file mode 100644 index 05ae152..0000000 Binary files a/meshchatx.png/meshchatx.png and /dev/null differ diff --git a/meshchatx/meshchat.py b/meshchatx/meshchat.py index 32d8ba4..a5a040b 100644 --- a/meshchatx/meshchat.py +++ b/meshchatx/meshchat.py @@ -63,6 +63,7 @@ from meshchatx.src.backend.rnstatus_handler import RNStatusHandler from meshchatx.src.backend.sideband_commands import SidebandCommands from meshchatx.src.backend.telephone_manager import TelephoneManager from meshchatx.src.backend.translator_handler import TranslatorHandler +from meshchatx.src.backend.voicemail_manager import VoicemailManager from meshchatx.src.version import __version__ as app_version @@ -193,7 +194,9 @@ class ReticulumMeshChat: # init database self.database = Database(self.database_path) - self.db = self.database # keep for compatibility with parts I haven't changed yet + self.db = ( + self.database + ) # keep for compatibility with parts I haven't changed yet try: self.database.initialize() @@ -218,7 +221,7 @@ class ReticulumMeshChat: self.announce_manager = AnnounceManager(self.database) self.archiver_manager = ArchiverManager(self.database) self.map_manager = MapManager(self.config, self.storage_dir) - self.forwarding_manager = None # will init after lxmf router + self.forwarding_manager = None # will init after lxmf router # remember if authentication is enabled self.auth_enabled = auth_enabled or self.config.auth_enabled.get() @@ -327,6 +330,17 @@ class ReticulumMeshChat: ) self.telephone_manager.init_telephone() + # init Voicemail Manager + self.voicemail_manager = VoicemailManager( + db=self.database, + telephone_manager=self.telephone_manager, + storage_dir=self.storage_path, + ) + # Monkey patch VoicemailManager to use our get_name_for_identity_hash + self.voicemail_manager.get_name_for_identity_hash = ( + self.get_name_for_identity_hash + ) + # init RNCP handler self.rncp_handler = RNCPHandler( reticulum_instance=self.reticulum, @@ -345,7 +359,9 @@ class ReticulumMeshChat: # init Translator handler libretranslate_url = self.config.get("libretranslate_url", None) - self.translator_handler = TranslatorHandler(libretranslate_url=libretranslate_url) + self.translator_handler = TranslatorHandler( + libretranslate_url=libretranslate_url + ) # start background thread for auto announce loop thread = threading.Thread(target=asyncio.run, args=(self.announce_loop(),)) @@ -552,7 +568,8 @@ class ReticulumMeshChat: def backup_identity(self): identity_bytes = self._get_identity_bytes() target_path = self.identity_file_path or os.path.join( - self.storage_dir, "identity", + self.storage_dir, + "identity", ) os.makedirs(os.path.dirname(target_path), exist_ok=True) with open(target_path, "wb") as f: @@ -567,7 +584,8 @@ class ReticulumMeshChat: def restore_identity_from_bytes(self, identity_bytes: bytes): target_path = self.identity_file_path or os.path.join( - self.storage_dir, "identity", + self.storage_dir, + "identity", ) os.makedirs(os.path.dirname(target_path), exist_ok=True) with open(target_path, "wb") as f: @@ -690,9 +708,13 @@ class ReticulumMeshChat: if self.config.crawler_enabled.get(): # Proactively queue any known nodes from the database that haven't been queued yet # get known propagation nodes from database - known_nodes = self.database.announces.get_announces(aspect="nomadnetwork.node") + known_nodes = self.database.announces.get_announces( + aspect="nomadnetwork.node" + ) for node in known_nodes: - self.queue_crawler_task(node["destination_hash"], "/page/index.mu") + self.queue_crawler_task( + node["destination_hash"], "/page/index.mu" + ) # process pending or failed tasks # ensure we handle potential string comparison issues in SQLite @@ -702,7 +724,9 @@ class ReticulumMeshChat: ) # process tasks concurrently up to the limit - await asyncio.gather(*[self.process_crawler_task(task) for task in tasks]) + await asyncio.gather( + *[self.process_crawler_task(task) for task in tasks] + ) except Exception as e: print(f"Error in crawler loop: {e}") @@ -713,12 +737,16 @@ class ReticulumMeshChat: async def process_crawler_task(self, task): # mark as crawling task_id = task["id"] - self.database.misc.update_crawl_task(task_id, status="crawling", last_retry_at=datetime.now(UTC)) + self.database.misc.update_crawl_task( + task_id, status="crawling", last_retry_at=datetime.now(UTC) + ) destination_hash = task["destination_hash"] page_path = task["page_path"] - print(f"Crawler: Archiving {destination_hash}:{page_path} (Attempt {task['retry_count'] + 1})") + print( + f"Crawler: Archiving {destination_hash}:{page_path} (Attempt {task['retry_count'] + 1})" + ) # completion event done_event = asyncio.Event() @@ -762,17 +790,23 @@ class ReticulumMeshChat: await download_task except Exception as e: - print(f"Crawler: Error during download for {destination_hash}:{page_path}: {e}") + print( + f"Crawler: Error during download for {destination_hash}:{page_path}: {e}" + ) failure_reason[0] = str(e) done_event.set() if success[0]: print(f"Crawler: Successfully archived {destination_hash}:{page_path}") - self.archive_page(destination_hash, page_path, content_received[0], is_manual=False) + self.archive_page( + destination_hash, page_path, content_received[0], is_manual=False + ) task.status = "completed" task.save() else: - print(f"Crawler: Failed to archive {destination_hash}:{page_path} - {failure_reason[0]}") + print( + f"Crawler: Failed to archive {destination_hash}:{page_path} - {failure_reason[0]}" + ) task.retry_count += 1 task.status = "failed" @@ -911,13 +945,17 @@ class ReticulumMeshChat: # returns the latest message for the provided destination hash def get_conversation_latest_message(self, destination_hash: str): local_hash = self.identity.hexhash - messages = self.message_handler.get_conversation_messages(local_hash, destination_hash, limit=1) + messages = self.message_handler.get_conversation_messages( + local_hash, destination_hash, limit=1 + ) return messages[0] if messages else None # returns true if the conversation with the provided destination hash has any attachments def conversation_has_attachments(self, destination_hash: str): local_hash = self.identity.hexhash - messages = self.message_handler.get_conversation_messages(local_hash, destination_hash) + messages = self.message_handler.get_conversation_messages( + local_hash, destination_hash + ) for message in messages: if self.message_fields_have_attachments(message["fields"]): return True @@ -957,9 +995,13 @@ class ReticulumMeshChat: matches.add(message["source_hash"]) # also check custom display names - custom_names = self.database.announces.get_announces() # Or more specific if needed + custom_names = ( + self.database.announces.get_announces() + ) # Or more specific if needed for announce in custom_names: - custom_name = self.database.announces.get_custom_display_name(announce["destination_hash"]) + custom_name = self.database.announces.get_custom_display_name( + announce["destination_hash"] + ) if custom_name and search_term.lower() in custom_name.lower(): matches.add(announce["destination_hash"]) @@ -974,6 +1016,9 @@ class ReticulumMeshChat: # handle receiving a new audio call def on_incoming_telephone_call(self, caller_identity: RNS.Identity): + # Trigger voicemail handling + self.voicemail_manager.handle_incoming_call(caller_identity) + print(f"on_incoming_telephone_call: {caller_identity.hash.hex()}") AsyncUtils.run_async( self.websocket_broadcast( @@ -998,7 +1043,12 @@ class ReticulumMeshChat: ) def on_telephone_call_ended(self, caller_identity: RNS.Identity): - print(f"on_telephone_call_ended: {caller_identity.hash.hex() if caller_identity else 'Unknown'}") + # Stop voicemail recording if active + self.voicemail_manager.stop_recording() + + print( + f"on_telephone_call_ended: {caller_identity.hash.hex() if caller_identity else 'Unknown'}" + ) # Record call history if caller_identity: @@ -2474,7 +2524,9 @@ class ReticulumMeshChat: "remote_identity_hash": remote_identity_hash, "remote_identity_name": remote_identity_name, "audio_profile_id": self.telephone_manager.telephone.transmit_codec.profile - if hasattr(self.telephone_manager.telephone.transmit_codec, "profile") + if hasattr( + self.telephone_manager.telephone.transmit_codec, "profile" + ) else None, "tx_packets": getattr(telephone_active_call, "tx", 0), "rx_packets": getattr(telephone_active_call, "rx", 0), @@ -2482,6 +2534,7 @@ class ReticulumMeshChat: "rx_bytes": getattr(telephone_active_call, "rxbytes", 0), "is_mic_muted": self.telephone_manager.telephone.transmit_muted, "is_speaker_muted": self.telephone_manager.telephone.receive_muted, + "is_voicemail": self.voicemail_manager.is_recording, } return web.json_response( @@ -2492,6 +2545,10 @@ class ReticulumMeshChat: "active_call": active_call, "is_mic_muted": self.telephone_manager.telephone.transmit_muted, "is_speaker_muted": self.telephone_manager.telephone.receive_muted, + "voicemail": { + "is_recording": self.voicemail_manager.is_recording, + "unread_count": self.database.voicemails.get_unread_count(), + }, }, ) @@ -2506,7 +2563,9 @@ class ReticulumMeshChat: caller_identity = active_call.get_remote_identity() # answer call - await asyncio.to_thread(self.telephone_manager.telephone.answer, caller_identity) + await asyncio.to_thread( + self.telephone_manager.telephone.answer, caller_identity + ) return web.json_response( { @@ -2563,9 +2622,12 @@ class ReticulumMeshChat: profile_id = request.match_info.get("profile_id") try: await asyncio.to_thread( - self.telephone_manager.telephone.switch_profile, int(profile_id) + self.telephone_manager.telephone.switch_profile, + int(profile_id), + ) + return web.json_response( + {"message": f"Switched to profile {profile_id}"} ) - return web.json_response({"message": f"Switched to profile {profile_id}"}) except Exception as e: return web.json_response({"message": str(e)}, status=500) @@ -2602,9 +2664,11 @@ class ReticulumMeshChat: identity_hash_bytes = bytes.fromhex(announce["identity_hash"]) # calculate telephony destination hash - telephony_destination_hash = RNS.Destination.hash_from_name_and_identity( - f"{LXST.APP_NAME}.telephony", - identity_hash_bytes, + telephony_destination_hash = ( + RNS.Destination.hash_from_name_and_identity( + f"{LXST.APP_NAME}.telephony", + identity_hash_bytes, + ) ) # request path to telephony destination @@ -2673,6 +2737,83 @@ class ReticulumMeshChat: }, ) + # voicemail status + @routes.get("/api/v1/telephone/voicemail/status") + async def telephone_voicemail_status(request): + return web.json_response( + { + "has_espeak": self.voicemail_manager.has_espeak, + "has_ffmpeg": self.voicemail_manager.has_ffmpeg, + "is_recording": self.voicemail_manager.is_recording, + }, + ) + + # list voicemails + @routes.get("/api/v1/telephone/voicemails") + async def telephone_voicemails(request): + limit = int(request.query.get("limit", 50)) + offset = int(request.query.get("offset", 0)) + voicemails = self.database.voicemails.get_voicemails( + limit=limit, offset=offset + ) + return web.json_response( + { + "voicemails": [dict(row) for row in voicemails], + "unread_count": self.database.voicemails.get_unread_count(), + }, + ) + + # mark voicemail as read + @routes.post("/api/v1/telephone/voicemails/{id}/read") + async def telephone_voicemail_mark_read(request): + voicemail_id = request.match_info.get("id") + self.database.voicemails.mark_as_read(voicemail_id) + return web.json_response({"message": "Voicemail marked as read"}) + + # delete voicemail + @routes.delete("/api/v1/telephone/voicemails/{id}") + async def telephone_voicemail_delete(request): + voicemail_id = request.match_info.get("id") + voicemail = self.database.voicemails.get_voicemail(voicemail_id) + if voicemail: + filepath = os.path.join( + self.voicemail_manager.recordings_dir, voicemail["filename"] + ) + if os.path.exists(filepath): + os.remove(filepath) + self.database.voicemails.delete_voicemail(voicemail_id) + return web.json_response({"message": "Voicemail deleted"}) + return web.json_response({"message": "Voicemail not found"}, status=404) + + # serve voicemail audio + @routes.get("/api/v1/telephone/voicemails/{id}/audio") + async def telephone_voicemail_audio(request): + voicemail_id = request.match_info.get("id") + voicemail = self.database.voicemails.get_voicemail(voicemail_id) + if voicemail: + filepath = os.path.join( + self.voicemail_manager.recordings_dir, voicemail["filename"] + ) + if os.path.exists(filepath): + return web.FileResponse(filepath) + return web.json_response( + {"message": "Voicemail audio not found"}, status=404 + ) + + # generate greeting + @routes.post("/api/v1/telephone/voicemail/generate-greeting") + async def telephone_voicemail_generate_greeting(request): + try: + text = self.config.voicemail_greeting.get() + path = await asyncio.to_thread( + self.voicemail_manager.generate_greeting, text + ) + return web.json_response( + {"message": "Greeting generated", "path": path} + ) + except Exception as e: + return web.json_response({"message": str(e)}, status=500) + # announce @routes.get("/api/v1/announce") async def announce_trigger(request): @@ -2694,7 +2835,9 @@ class ReticulumMeshChat: search_query = request.query.get("search", None) limit = request.query.get("limit", None) offset = request.query.get("offset", None) - include_blocked = request.query.get("include_blocked", "false").lower() == "true" + include_blocked = ( + request.query.get("include_blocked", "false").lower() == "true" + ) blocked_identity_hashes = None if not include_blocked: @@ -2721,7 +2864,8 @@ class ReticulumMeshChat: # process announces announces = [ - self.convert_db_announce_to_dict(announce) for announce in paginated_results + self.convert_db_announce_to_dict(announce) + for announce in paginated_results ] return web.json_response( @@ -2742,8 +2886,7 @@ class ReticulumMeshChat: # process favourites favourites = [ - self.convert_db_favourite_to_dict(favourite) - for favourite in results + self.convert_db_favourite_to_dict(favourite) for favourite in results ] return web.json_response( @@ -2789,7 +2932,9 @@ class ReticulumMeshChat: ) # upsert favourite - self.database.announces.upsert_favourite(destination_hash, display_name, aspect) + self.database.announces.upsert_favourite( + destination_hash, display_name, aspect + ) return web.json_response( { "message": "Favourite has been added!", @@ -2808,7 +2953,9 @@ class ReticulumMeshChat: # update display name if provided if len(display_name) > 0: - self.database.announces.upsert_custom_display_name(destination_hash, display_name) + self.database.announces.upsert_custom_display_name( + destination_hash, display_name + ) return web.json_response( { @@ -2853,31 +3000,43 @@ class ReticulumMeshChat: archives = [] for archive in archives_results: # find node name from announces or custom display names - node_name = self.get_custom_destination_display_name(archive["destination_hash"]) + node_name = self.get_custom_destination_display_name( + archive["destination_hash"] + ) if not node_name: - db_announce = self.database.announces.get_announce_by_hash(archive["destination_hash"]) + db_announce = self.database.announces.get_announce_by_hash( + archive["destination_hash"] + ) if db_announce and db_announce["aspect"] == "nomadnetwork.node": - node_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name(db_announce["app_data"]) + node_name = ( + ReticulumMeshChat.parse_nomadnetwork_node_display_name( + db_announce["app_data"] + ) + ) - archives.append({ - "id": archive["id"], - "destination_hash": archive["destination_hash"], - "node_name": node_name or "Unknown Node", - "page_path": archive["page_path"], - "content": archive["content"], - "hash": archive["hash"], - "created_at": archive["created_at"], - }) + archives.append( + { + "id": archive["id"], + "destination_hash": archive["destination_hash"], + "node_name": node_name or "Unknown Node", + "page_path": archive["page_path"], + "content": archive["content"], + "hash": archive["hash"], + "created_at": archive["created_at"], + } + ) - return web.json_response({ - "archives": archives, - "pagination": { - "page": page, - "limit": limit, - "total_count": total_count, - "total_pages": total_pages, - }, - }) + return web.json_response( + { + "archives": archives, + "pagination": { + "page": page, + "limit": limit, + "total_count": total_count, + "total_pages": total_pages, + }, + } + ) @routes.get("/api/v1/lxmf/propagation-node/status") async def propagation_node_status(request): @@ -2937,7 +3096,7 @@ class ReticulumMeshChat: # limit results if limit is not None: - results = results[:int(limit)] + results = results[: int(limit)] # process announces lxmf_propagation_nodes = [] @@ -2947,14 +3106,20 @@ class ReticulumMeshChat: aspect="lxmf.delivery", identity_hash=announce["identity_hash"], ) - lxmf_delivery_announce = lxmf_delivery_results[0] if lxmf_delivery_results else None + lxmf_delivery_announce = ( + lxmf_delivery_results[0] if lxmf_delivery_results else None + ) # find a nomadnetwork.node announce for the same identity hash, so we can use that as an "operated by" name - nomadnetwork_node_results = self.database.announces.get_filtered_announces( - aspect="nomadnetwork.node", - identity_hash=announce["identity_hash"], + nomadnetwork_node_results = ( + self.database.announces.get_filtered_announces( + aspect="nomadnetwork.node", + identity_hash=announce["identity_hash"], + ) + ) + nomadnetwork_node_announce = ( + nomadnetwork_node_results[0] if nomadnetwork_node_results else None ) - nomadnetwork_node_announce = nomadnetwork_node_results[0] if nomadnetwork_node_results else None # get a display name from other announces belonging to the propagation nodes identity operator_display_name = None @@ -2970,9 +3135,11 @@ class ReticulumMeshChat: nomadnetwork_node_announce is not None and nomadnetwork_node_announce["app_data"] is not None ): - operator_display_name = ReticulumMeshChat.parse_nomadnetwork_node_display_name( - nomadnetwork_node_announce["app_data"], - None, + operator_display_name = ( + ReticulumMeshChat.parse_nomadnetwork_node_display_name( + nomadnetwork_node_announce["app_data"], + None, + ) ) # parse app_data so we can see if propagation is enabled or disabled for this node @@ -3097,18 +3264,26 @@ class ReticulumMeshChat: updated_at = None # get latest announce from database for the provided destination hash - latest_announce = self.database.announces.get_announce_by_hash(destination_hash) + latest_announce = self.database.announces.get_announce_by_hash( + destination_hash + ) # get latest lxmf message from database sent to us from the provided destination hash local_hash = self.local_lxmf_destination.hexhash - messages = self.message_handler.get_conversation_messages(local_hash, destination_hash, limit=1) + messages = self.message_handler.get_conversation_messages( + local_hash, destination_hash, limit=1 + ) # Filter for incoming messages only - latest_lxmf_message = next((m for m in messages if m["source_hash"] == destination_hash), None) + latest_lxmf_message = next( + (m for m in messages if m["source_hash"] == destination_hash), None + ) # determine when latest announce was received latest_announce_at = None if latest_announce is not None: - latest_announce_at = datetime.fromisoformat(latest_announce["updated_at"]) + latest_announce_at = datetime.fromisoformat( + latest_announce["updated_at"] + ) if latest_announce_at.tzinfo is not None: latest_announce_at = latest_announce_at.replace(tzinfo=None) @@ -3392,7 +3567,10 @@ class ReticulumMeshChat: @routes.get("/api/v1/rnstatus") async def rnstatus(request): - include_link_stats = request.query.get("include_link_stats", "false") in ("true", "1") + include_link_stats = request.query.get("include_link_stats", "false") in ( + "true", + "1", + ) sorting = request.query.get("sorting") sort_reverse = request.query.get("sort_reverse", "false") in ("true", "1") @@ -3453,13 +3631,17 @@ class ReticulumMeshChat: async def translator_languages(request): try: libretranslate_url = request.query.get("libretranslate_url") - languages = self.translator_handler.get_supported_languages(libretranslate_url=libretranslate_url) - return web.json_response({ - "languages": languages, - "has_argos": self.translator_handler.has_argos, - "has_argos_lib": self.translator_handler.has_argos_lib, - "has_argos_cli": self.translator_handler.has_argos_cli, - }) + languages = self.translator_handler.get_supported_languages( + libretranslate_url=libretranslate_url + ) + return web.json_response( + { + "languages": languages, + "has_argos": self.translator_handler.has_argos, + "has_argos_lib": self.translator_handler.has_argos_lib, + "has_argos_cli": self.translator_handler.has_argos_cli, + } + ) except Exception as e: return web.json_response( {"message": str(e)}, @@ -3575,7 +3757,9 @@ class ReticulumMeshChat: lxmf_stamp_cost = None announce = self.database.announces.get_announce_by_hash(destination_hash) if announce is not None: - lxmf_stamp_cost = ReticulumMeshChat.parse_lxmf_stamp_cost(announce["app_data"]) + lxmf_stamp_cost = ReticulumMeshChat.parse_lxmf_stamp_cost( + announce["app_data"] + ) # get outbound ticket expiry for this lxmf destination lxmf_outbound_ticket_expiry = ( @@ -3760,7 +3944,9 @@ class ReticulumMeshChat: # get lxmf message from database lxmf_message = None - db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(message_hash) + db_lxmf_message = self.database.messages.get_lxmf_message_by_hash( + message_hash + ) if db_lxmf_message is not None: lxmf_message = self.convert_db_lxmf_message_to_dict(db_lxmf_message) @@ -3864,7 +4050,9 @@ class ReticulumMeshChat: file_index = request.query.get("file_index") # find message from database - db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(message_hash) + db_lxmf_message = self.database.messages.get_lxmf_message_by_hash( + message_hash + ) if db_lxmf_message is None: return web.json_response({"message": "Message not found"}, status=404) @@ -3959,14 +4147,16 @@ class ReticulumMeshChat: latest_message_title = db_message["title"] latest_message_preview = db_message["content"] latest_message_timestamp = db_message["timestamp"] - latest_message_has_attachments = ( - self.message_fields_have_attachments(db_message["fields"]) + latest_message_has_attachments = self.message_fields_have_attachments( + db_message["fields"] ) # using timestamp (sent time) for updated_at as it is more reliable across restarts # and represents the actual time the message was created by the sender. # we convert it to ISO format for the frontend. - updated_at = datetime.fromtimestamp(latest_message_timestamp, UTC).isoformat() + updated_at = datetime.fromtimestamp( + latest_message_timestamp, UTC + ).isoformat() # check if conversation has attachments has_attachments = self.conversation_has_attachments(other_user_hash) @@ -4260,7 +4450,12 @@ class ReticulumMeshChat: self.map_manager.close() self.config.map_offline_path.set(file_path) self.config.map_offline_enabled.set(True) - return web.json_response({"message": "Active map updated", "metadata": self.map_manager.get_metadata()}) + return web.json_response( + { + "message": "Active map updated", + "metadata": self.map_manager.get_metadata(), + } + ) return web.json_response({"error": "File not found"}, status=404) # upload offline map @@ -4274,7 +4469,9 @@ class ReticulumMeshChat: filename = field.filename if not filename.endswith(".mbtiles"): - return web.json_response({"error": "Invalid file format, must be .mbtiles"}, status=400) + return web.json_response( + {"error": "Invalid file format, must be .mbtiles"}, status=400 + ) # save to mbtiles dir mbtiles_dir = self.map_manager.get_mbtiles_dir() @@ -4307,12 +4504,19 @@ class ReticulumMeshChat: os.remove(dest_path) self.config.map_offline_path.set(None) self.config.map_offline_enabled.set(False) - return web.json_response({"error": "Invalid MBTiles file or unsupported format (vector maps not supported)"}, status=400) + return web.json_response( + { + "error": "Invalid MBTiles file or unsupported format (vector maps not supported)" + }, + status=400, + ) - return web.json_response({ - "message": "Map uploaded successfully", - "metadata": metadata, - }) + return web.json_response( + { + "message": "Map uploaded successfully", + "metadata": metadata, + } + ) except Exception as e: RNS.log(f"Error uploading map: {e}", RNS.LOG_ERROR) return web.json_response({"error": str(e)}, status=500) @@ -4322,7 +4526,7 @@ class ReticulumMeshChat: async def start_map_export(request): try: data = await request.json() - bbox = data.get("bbox") # [min_lon, min_lat, max_lon, max_lat] + bbox = data.get("bbox") # [min_lon, min_lat, max_lon, max_lat] min_zoom = int(data.get("min_zoom", 0)) max_zoom = int(data.get("max_zoom", 10)) name = data.get("name", "Exported Map") @@ -4360,7 +4564,9 @@ class ReticulumMeshChat: "Content-Disposition": f'attachment; filename="map_export_{export_id}.mbtiles"', }, ) - return web.json_response({"error": "File not ready or not found"}, status=404) + return web.json_response( + {"error": "File not ready or not found"}, status=404 + ) # MIME type fix middleware - ensures JavaScript files have correct Content-Type @web.middleware @@ -4433,7 +4639,9 @@ class ReticulumMeshChat: ) # add other middlewares - app.middlewares.extend([auth_middleware, mime_type_middleware, security_middleware]) + app.middlewares.extend( + [auth_middleware, mime_type_middleware, security_middleware] + ) app.add_routes(routes) app.add_routes( @@ -4606,10 +4814,14 @@ class ReticulumMeshChat: self.config.page_archiver_enabled.set(bool(data["page_archiver_enabled"])) if "page_archiver_max_versions" in data: - self.config.page_archiver_max_versions.set(int(data["page_archiver_max_versions"])) + self.config.page_archiver_max_versions.set( + int(data["page_archiver_max_versions"]) + ) if "archives_max_storage_gb" in data: - self.config.archives_max_storage_gb.set(int(data["archives_max_storage_gb"])) + self.config.archives_max_storage_gb.set( + int(data["archives_max_storage_gb"]) + ) # update crawler settings if "crawler_enabled" in data: @@ -4619,7 +4831,9 @@ class ReticulumMeshChat: self.config.crawler_max_retries.set(int(data["crawler_max_retries"])) if "crawler_retry_delay_seconds" in data: - self.config.crawler_retry_delay_seconds.set(int(data["crawler_retry_delay_seconds"])) + self.config.crawler_retry_delay_seconds.set( + int(data["crawler_retry_delay_seconds"]) + ) if "crawler_max_concurrent" in data: self.config.crawler_max_concurrent.set(int(data["crawler_max_concurrent"])) @@ -4695,7 +4909,13 @@ class ReticulumMeshChat: return data # archives a page version - def archive_page(self, destination_hash: str, page_path: str, content: str, is_manual: bool = False): + def archive_page( + self, + destination_hash: str, + page_path: str, + content: str, + is_manual: bool = False, + ): if not is_manual and not self.config.page_archiver_enabled.get(): return @@ -4709,7 +4929,9 @@ class ReticulumMeshChat: # returns archived page versions for a given destination and path def get_archived_page_versions(self, destination_hash: str, page_path: str): - return self.database.misc.get_archived_page_versions(destination_hash, page_path) + return self.database.misc.get_archived_page_versions( + destination_hash, page_path + ) # flushes all archived pages def flush_all_archived_pages(self): @@ -4780,7 +5002,9 @@ class ReticulumMeshChat: { "id": archive.id, "hash": archive.hash, - "created_at": archive.created_at.isoformat() if hasattr(archive.created_at, "isoformat") else str(archive.created_at), + "created_at": archive.created_at.isoformat() + if hasattr(archive.created_at, "isoformat") + else str(archive.created_at), } for archive in archives ], @@ -5030,7 +5254,8 @@ class ReticulumMeshChat: has_archives = ( len( self.get_archived_page_versions( - destination_hash.hex(), page_path, + destination_hash.hex(), + page_path, ), ) > 0 @@ -5243,7 +5468,9 @@ class ReticulumMeshChat: identity = self.recall_identity(identity_hash) if identity is not None: # get lxmf.delivery destination hash - lxmf_destination_hash = RNS.Destination.hash(identity, "lxmf", "delivery").hex() + lxmf_destination_hash = RNS.Destination.hash( + identity, "lxmf", "delivery" + ).hex() # use custom name if available custom_name = self.database.announces.get_custom_display_name( @@ -5510,7 +5737,9 @@ class ReticulumMeshChat: # find lxmf user icon from database lxmf_user_icon = None - db_lxmf_user_icon = self.database.misc.get_user_icon(announce["destination_hash"]) + db_lxmf_user_icon = self.database.misc.get_user_icon( + announce["destination_hash"] + ) if db_lxmf_user_icon: lxmf_user_icon = { "icon_name": db_lxmf_user_icon["icon_name"], @@ -5634,7 +5863,7 @@ class ReticulumMeshChat: created_at = str(db_lxmf_message["created_at"]) if created_at and "+" not in created_at and "Z" not in created_at: created_at += "Z" - + updated_at = str(db_lxmf_message["updated_at"]) if updated_at and "+" not in updated_at and "Z" not in updated_at: updated_at += "Z" @@ -5790,7 +6019,9 @@ class ReticulumMeshChat: print(e) # find message from database - db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(lxmf_message.hash.hex()) + db_lxmf_message = self.database.messages.get_lxmf_message_by_hash( + lxmf_message.hash.hex() + ) if not db_lxmf_message: return @@ -5820,7 +6051,9 @@ class ReticulumMeshChat: destination_hash = lxmf_message.destination_hash.hex() # check if this message is for an alias identity (REPLY PATH) - mapping = self.database.messages.get_forwarding_mapping(alias_hash=destination_hash) + mapping = self.database.messages.get_forwarding_mapping( + alias_hash=destination_hash + ) if mapping: # this is a reply from User C to User B (alias). Forward to User A. @@ -5840,11 +6073,16 @@ class ReticulumMeshChat: # check if this message matches a forwarding rule (FORWARD PATH) # we check for rules that apply to the destination of this message - rules = self.database.misc.get_forwarding_rules(identity_hash=destination_hash, active_only=True) + rules = self.database.misc.get_forwarding_rules( + identity_hash=destination_hash, active_only=True + ) for rule in rules: # check source filter if set - if rule["source_filter_hash"] and rule["source_filter_hash"] != source_hash: + if ( + rule["source_filter_hash"] + and rule["source_filter_hash"] != source_hash + ): continue # find or create mapping for this (Source, Final Recipient) pair @@ -6303,7 +6541,9 @@ class ReticulumMeshChat: # resends all messages that previously failed to send to the provided destination hash async def resend_failed_messages_for_destination(self, destination_hash: str): # get messages that failed to send to this destination - failed_messages = self.database.messages.get_failed_messages_for_destination(destination_hash) + failed_messages = self.database.messages.get_failed_messages_for_destination( + destination_hash + ) # resend failed messages for failed_message in failed_messages: @@ -6361,7 +6601,9 @@ class ReticulumMeshChat: ) # remove original failed message from database - self.database.messages.delete_lxmf_message_by_hash(failed_message["hash"]) + self.database.messages.delete_lxmf_message_by_hash( + failed_message["hash"] + ) # tell all websocket clients that old failed message was deleted so it can remove from ui await self.websocket_broadcast( @@ -6439,7 +6681,9 @@ class ReticulumMeshChat: # gets the custom display name a user has set for the provided destination hash def get_custom_destination_display_name(self, destination_hash: str): - db_destination_display_name = self.database.announces.get_custom_display_name(destination_hash) + db_destination_display_name = self.database.announces.get_custom_display_name( + destination_hash + ) if db_destination_display_name is not None: return db_destination_display_name.display_name @@ -6448,10 +6692,14 @@ class ReticulumMeshChat: # get name to show for an lxmf conversation # currently, this will use the app data from the most recent announce # TODO: we should fetch this from our contacts database, when it gets implemented, and if not found, fallback to app data - def get_lxmf_conversation_name(self, destination_hash, default_name: str | None = "Anonymous Peer"): + def get_lxmf_conversation_name( + self, destination_hash, default_name: str | None = "Anonymous Peer" + ): # get lxmf.delivery announce from database for the provided destination hash results = self.database.announces.get_announces(aspect="lxmf.delivery") - lxmf_announce = next((a for a in results if a["destination_hash"] == destination_hash), None) + lxmf_announce = next( + (a for a in results if a["destination_hash"] == destination_hash), None + ) # if app data is available in database, it should be base64 encoded text that was announced # we will return the parsed lxmf display name as the conversation name @@ -7018,7 +7266,12 @@ def main(): return enable_https = not args.no_https - reticulum_meshchat.run(args.host, args.port, launch_browser=args.headless is False, enable_https=enable_https) + reticulum_meshchat.run( + args.host, + args.port, + launch_browser=args.headless is False, + enable_https=enable_https, + ) if __name__ == "__main__": diff --git a/meshchatx/src/backend/announce_manager.py b/meshchatx/src/backend/announce_manager.py index 6a9b4a7..e51d5ff 100644 --- a/meshchatx/src/backend/announce_manager.py +++ b/meshchatx/src/backend/announce_manager.py @@ -7,7 +7,15 @@ class AnnounceManager: def __init__(self, db: Database): self.db = db - def upsert_announce(self, reticulum, identity, destination_hash, aspect, app_data, announce_packet_hash): + def upsert_announce( + self, + reticulum, + identity, + destination_hash, + aspect, + app_data, + announce_packet_hash, + ): # get rssi, snr and signal quality if available rssi = reticulum.get_packet_rssi(announce_packet_hash) snr = reticulum.get_packet_snr(announce_packet_hash) @@ -15,7 +23,9 @@ class AnnounceManager: # prepare data to insert or update data = { - "destination_hash": destination_hash.hex() if isinstance(destination_hash, bytes) else destination_hash, + "destination_hash": destination_hash.hex() + if isinstance(destination_hash, bytes) + else destination_hash, "aspect": aspect, "identity_hash": identity.hash.hex(), "identity_public_key": base64.b64encode(identity.get_public_key()).decode( @@ -32,7 +42,14 @@ class AnnounceManager: self.db.announces.upsert_announce(data) - def get_filtered_announces(self, aspect=None, identity_hash=None, destination_hash=None, query=None, blocked_identity_hashes=None): + def get_filtered_announces( + self, + aspect=None, + identity_hash=None, + destination_hash=None, + query=None, + blocked_identity_hashes=None, + ): sql = "SELECT * FROM announces WHERE 1=1" params = [] @@ -56,4 +73,3 @@ class AnnounceManager: sql += " ORDER BY updated_at DESC" return self.db.provider.fetchall(sql, params) - diff --git a/meshchatx/src/backend/archiver_manager.py b/meshchatx/src/backend/archiver_manager.py index 94dadfa..0160fad 100644 --- a/meshchatx/src/backend/archiver_manager.py +++ b/meshchatx/src/backend/archiver_manager.py @@ -7,7 +7,9 @@ class ArchiverManager: def __init__(self, db: Database): self.db = db - def archive_page(self, destination_hash, page_path, content, max_versions=5, max_storage_gb=1): + def archive_page( + self, destination_hash, page_path, content, max_versions=5, max_storage_gb=1 + ): content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest() # Check if already exists @@ -27,18 +29,25 @@ class ArchiverManager: # Delete older versions to_delete = versions[max_versions:] for version in to_delete: - self.db.provider.execute("DELETE FROM archived_pages WHERE id = ?", (version["id"],)) + self.db.provider.execute( + "DELETE FROM archived_pages WHERE id = ?", (version["id"],) + ) # Enforce total storage limit (approximate) - total_size_row = self.db.provider.fetchone("SELECT SUM(LENGTH(content)) as total_size FROM archived_pages") + total_size_row = self.db.provider.fetchone( + "SELECT SUM(LENGTH(content)) as total_size FROM archived_pages" + ) total_size = total_size_row["total_size"] or 0 max_bytes = max_storage_gb * 1024 * 1024 * 1024 while total_size > max_bytes: - oldest = self.db.provider.fetchone("SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1") + oldest = self.db.provider.fetchone( + "SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1" + ) if oldest: - self.db.provider.execute("DELETE FROM archived_pages WHERE id = ?", (oldest["id"],)) + self.db.provider.execute( + "DELETE FROM archived_pages WHERE id = ?", (oldest["id"],) + ) total_size -= oldest["size"] else: break - diff --git a/meshchatx/src/backend/config_manager.py b/meshchatx/src/backend/config_manager.py index e7bc911..413084a 100644 --- a/meshchatx/src/backend/config_manager.py +++ b/meshchatx/src/backend/config_manager.py @@ -1,4 +1,3 @@ - class ConfigManager: def __init__(self, db): self.db = db @@ -6,75 +5,139 @@ class ConfigManager: # all possible config items self.database_version = self.IntConfig(self, "database_version", None) self.display_name = self.StringConfig(self, "display_name", "Anonymous Peer") - self.auto_announce_enabled = self.BoolConfig(self, "auto_announce_enabled", False) - self.auto_announce_interval_seconds = self.IntConfig(self, "auto_announce_interval_seconds", 0) + self.auto_announce_enabled = self.BoolConfig( + self, "auto_announce_enabled", False + ) + self.auto_announce_interval_seconds = self.IntConfig( + self, "auto_announce_interval_seconds", 0 + ) self.last_announced_at = self.IntConfig(self, "last_announced_at", None) self.theme = self.StringConfig(self, "theme", "light") self.language = self.StringConfig(self, "language", "en") self.auto_resend_failed_messages_when_announce_received = self.BoolConfig( - self, "auto_resend_failed_messages_when_announce_received", True, + self, + "auto_resend_failed_messages_when_announce_received", + True, ) self.allow_auto_resending_failed_messages_with_attachments = self.BoolConfig( - self, "allow_auto_resending_failed_messages_with_attachments", False, + self, + "allow_auto_resending_failed_messages_with_attachments", + False, ) self.auto_send_failed_messages_to_propagation_node = self.BoolConfig( - self, "auto_send_failed_messages_to_propagation_node", False, + self, + "auto_send_failed_messages_to_propagation_node", + False, ) self.show_suggested_community_interfaces = self.BoolConfig( - self, "show_suggested_community_interfaces", True, + self, + "show_suggested_community_interfaces", + True, ) self.lxmf_delivery_transfer_limit_in_bytes = self.IntConfig( - self, "lxmf_delivery_transfer_limit_in_bytes", 1000 * 1000 * 10, + self, + "lxmf_delivery_transfer_limit_in_bytes", + 1000 * 1000 * 10, ) # 10MB self.lxmf_preferred_propagation_node_destination_hash = self.StringConfig( - self, "lxmf_preferred_propagation_node_destination_hash", None, + self, + "lxmf_preferred_propagation_node_destination_hash", + None, ) - self.lxmf_preferred_propagation_node_auto_sync_interval_seconds = self.IntConfig( - self, "lxmf_preferred_propagation_node_auto_sync_interval_seconds", 0, + self.lxmf_preferred_propagation_node_auto_sync_interval_seconds = ( + self.IntConfig( + self, + "lxmf_preferred_propagation_node_auto_sync_interval_seconds", + 0, + ) ) self.lxmf_preferred_propagation_node_last_synced_at = self.IntConfig( - self, "lxmf_preferred_propagation_node_last_synced_at", None, + self, + "lxmf_preferred_propagation_node_last_synced_at", + None, ) self.lxmf_local_propagation_node_enabled = self.BoolConfig( - self, "lxmf_local_propagation_node_enabled", False, + self, + "lxmf_local_propagation_node_enabled", + False, ) self.lxmf_user_icon_name = self.StringConfig(self, "lxmf_user_icon_name", None) self.lxmf_user_icon_foreground_colour = self.StringConfig( - self, "lxmf_user_icon_foreground_colour", None, + self, + "lxmf_user_icon_foreground_colour", + None, ) self.lxmf_user_icon_background_colour = self.StringConfig( - self, "lxmf_user_icon_background_colour", None, + self, + "lxmf_user_icon_background_colour", + None, ) self.lxmf_inbound_stamp_cost = self.IntConfig( - self, "lxmf_inbound_stamp_cost", 8, + self, + "lxmf_inbound_stamp_cost", + 8, ) # for direct delivery messages self.lxmf_propagation_node_stamp_cost = self.IntConfig( - self, "lxmf_propagation_node_stamp_cost", 16, + self, + "lxmf_propagation_node_stamp_cost", + 16, ) # for propagation node messages - self.page_archiver_enabled = self.BoolConfig(self, "page_archiver_enabled", True) - self.page_archiver_max_versions = self.IntConfig(self, "page_archiver_max_versions", 5) - self.archives_max_storage_gb = self.IntConfig(self, "archives_max_storage_gb", 1) + self.page_archiver_enabled = self.BoolConfig( + self, "page_archiver_enabled", True + ) + self.page_archiver_max_versions = self.IntConfig( + self, "page_archiver_max_versions", 5 + ) + self.archives_max_storage_gb = self.IntConfig( + self, "archives_max_storage_gb", 1 + ) self.crawler_enabled = self.BoolConfig(self, "crawler_enabled", False) self.crawler_max_retries = self.IntConfig(self, "crawler_max_retries", 3) - self.crawler_retry_delay_seconds = self.IntConfig(self, "crawler_retry_delay_seconds", 3600) + self.crawler_retry_delay_seconds = self.IntConfig( + self, "crawler_retry_delay_seconds", 3600 + ) self.crawler_max_concurrent = self.IntConfig(self, "crawler_max_concurrent", 1) self.auth_enabled = self.BoolConfig(self, "auth_enabled", False) self.auth_password_hash = self.StringConfig(self, "auth_password_hash", None) self.auth_session_secret = self.StringConfig(self, "auth_session_secret", None) + # voicemail config + self.voicemail_enabled = self.BoolConfig(self, "voicemail_enabled", False) + self.voicemail_greeting = self.StringConfig( + self, + "voicemail_greeting", + "Hello, I am not available right now. Please leave a message after the beep.", + ) + self.voicemail_auto_answer_delay_seconds = self.IntConfig( + self, + "voicemail_auto_answer_delay_seconds", + 20, + ) + self.voicemail_max_recording_seconds = self.IntConfig( + self, + "voicemail_max_recording_seconds", + 60, + ) + # map config self.map_offline_enabled = self.BoolConfig(self, "map_offline_enabled", False) self.map_offline_path = self.StringConfig(self, "map_offline_path", None) self.map_mbtiles_dir = self.StringConfig(self, "map_mbtiles_dir", None) - self.map_tile_cache_enabled = self.BoolConfig(self, "map_tile_cache_enabled", True) + self.map_tile_cache_enabled = self.BoolConfig( + self, "map_tile_cache_enabled", True + ) self.map_default_lat = self.StringConfig(self, "map_default_lat", "0.0") self.map_default_lon = self.StringConfig(self, "map_default_lon", "0.0") self.map_default_zoom = self.IntConfig(self, "map_default_zoom", 2) self.map_tile_server_url = self.StringConfig( - self, "map_tile_server_url", "https://tile.openstreetmap.org/{z}/{x}/{y}.png", + self, + "map_tile_server_url", + "https://tile.openstreetmap.org/{z}/{x}/{y}.png", ) self.map_nominatim_api_url = self.StringConfig( - self, "map_nominatim_api_url", "https://nominatim.openstreetmap.org", + self, + "map_nominatim_api_url", + "https://nominatim.openstreetmap.org", ) def get(self, key: str, default_value=None) -> str | None: @@ -128,4 +191,3 @@ class ConfigManager: def set(self, value: int): self.manager.set(self.key, str(value)) - diff --git a/meshchatx/src/backend/database/__init__.py b/meshchatx/src/backend/database/__init__.py index afc943d..d462b3e 100644 --- a/meshchatx/src/backend/database/__init__.py +++ b/meshchatx/src/backend/database/__init__.py @@ -6,6 +6,7 @@ from .misc import MiscDAO from .provider import DatabaseProvider from .schema import DatabaseSchema from .telephone import TelephoneDAO +from .voicemails import VoicemailDAO class Database: @@ -17,12 +18,15 @@ class Database: self.announces = AnnounceDAO(self.provider) self.misc = MiscDAO(self.provider) self.telephone = TelephoneDAO(self.provider) + self.voicemails = VoicemailDAO(self.provider) def initialize(self): self.schema.initialize() def migrate_from_legacy(self, reticulum_config_dir, identity_hash_hex): - migrator = LegacyMigrator(self.provider, reticulum_config_dir, identity_hash_hex) + migrator = LegacyMigrator( + self.provider, reticulum_config_dir, identity_hash_hex + ) if migrator.should_migrate(): return migrator.migrate() return False @@ -32,4 +36,3 @@ class Database: def close(self): self.provider.close() - diff --git a/meshchatx/src/backend/database/announces.py b/meshchatx/src/backend/database/announces.py index 93bfd8e..70e13bb 100644 --- a/meshchatx/src/backend/database/announces.py +++ b/meshchatx/src/backend/database/announces.py @@ -13,16 +13,26 @@ class AnnounceDAO: data = dict(data) fields = [ - "destination_hash", "aspect", "identity_hash", "identity_public_key", - "app_data", "rssi", "snr", "quality", + "destination_hash", + "aspect", + "identity_hash", + "identity_public_key", + "app_data", + "rssi", + "snr", + "quality", ] # These are safe as they are from a hardcoded list columns = ", ".join(fields) placeholders = ", ".join(["?"] * len(fields)) - update_set = ", ".join([f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"]) + update_set = ", ".join( + [f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"] + ) - query = f"INSERT INTO announces ({columns}, updated_at) VALUES ({placeholders}, ?) " \ - f"ON CONFLICT(destination_hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at" # noqa: S608 + query = ( + f"INSERT INTO announces ({columns}, updated_at) VALUES ({placeholders}, ?) " + f"ON CONFLICT(destination_hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at" + ) # noqa: S608 params = [data.get(f) for f in fields] params.append(datetime.now(UTC)) @@ -30,13 +40,19 @@ class AnnounceDAO: def get_announces(self, aspect=None): if aspect: - return self.provider.fetchall("SELECT * FROM announces WHERE aspect = ?", (aspect,)) + return self.provider.fetchall( + "SELECT * FROM announces WHERE aspect = ?", (aspect,) + ) return self.provider.fetchall("SELECT * FROM announces") def get_announce_by_hash(self, destination_hash): - return self.provider.fetchone("SELECT * FROM announces WHERE destination_hash = ?", (destination_hash,)) + return self.provider.fetchone( + "SELECT * FROM announces WHERE destination_hash = ?", (destination_hash,) + ) - def get_filtered_announces(self, aspect=None, search_term=None, limit=None, offset=0): + def get_filtered_announces( + self, aspect=None, search_term=None, limit=None, offset=0 + ): query = "SELECT * FROM announces WHERE 1=1" params = [] if aspect: @@ -58,33 +74,49 @@ class AnnounceDAO: # Custom Display Names def upsert_custom_display_name(self, destination_hash, display_name): now = datetime.now(UTC) - self.provider.execute(""" + self.provider.execute( + """ INSERT INTO custom_destination_display_names (destination_hash, display_name, updated_at) VALUES (?, ?, ?) ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, updated_at = EXCLUDED.updated_at - """, (destination_hash, display_name, now)) + """, + (destination_hash, display_name, now), + ) def get_custom_display_name(self, destination_hash): - row = self.provider.fetchone("SELECT display_name FROM custom_destination_display_names WHERE destination_hash = ?", (destination_hash,)) + row = self.provider.fetchone( + "SELECT display_name FROM custom_destination_display_names WHERE destination_hash = ?", + (destination_hash,), + ) return row["display_name"] if row else None def delete_custom_display_name(self, destination_hash): - self.provider.execute("DELETE FROM custom_destination_display_names WHERE destination_hash = ?", (destination_hash,)) + self.provider.execute( + "DELETE FROM custom_destination_display_names WHERE destination_hash = ?", + (destination_hash,), + ) # Favourites def upsert_favourite(self, destination_hash, display_name, aspect): now = datetime.now(UTC) - self.provider.execute(""" + self.provider.execute( + """ INSERT INTO favourite_destinations (destination_hash, display_name, aspect, updated_at) VALUES (?, ?, ?, ?) ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, aspect = EXCLUDED.aspect, updated_at = EXCLUDED.updated_at - """, (destination_hash, display_name, aspect, now)) + """, + (destination_hash, display_name, aspect, now), + ) def get_favourites(self, aspect=None): if aspect: - return self.provider.fetchall("SELECT * FROM favourite_destinations WHERE aspect = ?", (aspect,)) + return self.provider.fetchall( + "SELECT * FROM favourite_destinations WHERE aspect = ?", (aspect,) + ) return self.provider.fetchall("SELECT * FROM favourite_destinations") def delete_favourite(self, destination_hash): - self.provider.execute("DELETE FROM favourite_destinations WHERE destination_hash = ?", (destination_hash,)) - + self.provider.execute( + "DELETE FROM favourite_destinations WHERE destination_hash = ?", + (destination_hash,), + ) diff --git a/meshchatx/src/backend/database/config.py b/meshchatx/src/backend/database/config.py index 7c7925d..384f11b 100644 --- a/meshchatx/src/backend/database/config.py +++ b/meshchatx/src/backend/database/config.py @@ -24,4 +24,3 @@ class ConfigDAO: def delete(self, key): self.provider.execute("DELETE FROM config WHERE key = ?", (key,)) - diff --git a/meshchatx/src/backend/database/legacy_migrator.py b/meshchatx/src/backend/database/legacy_migrator.py index db23a6c..5c49329 100644 --- a/meshchatx/src/backend/database/legacy_migrator.py +++ b/meshchatx/src/backend/database/legacy_migrator.py @@ -8,8 +8,7 @@ class LegacyMigrator: self.identity_hash_hex = identity_hash_hex def get_legacy_db_path(self): - """Detect the path to the legacy database based on the Reticulum config directory. - """ + """Detect the path to the legacy database based on the Reticulum config directory.""" possible_dirs = [] if self.reticulum_config_dir: possible_dirs.append(self.reticulum_config_dir) @@ -21,7 +20,9 @@ class LegacyMigrator: # Check each directory for config_dir in possible_dirs: - legacy_path = os.path.join(config_dir, "identities", self.identity_hash_hex, "database.db") + legacy_path = os.path.join( + config_dir, "identities", self.identity_hash_hex, "database.db" + ) if os.path.exists(legacy_path): # Ensure it's not the same as our current DB path # (though this is unlikely given the different base directories) @@ -58,8 +59,7 @@ class LegacyMigrator: return True def migrate(self): - """Perform the migration from the legacy database. - """ + """Perform the migration from the legacy database.""" legacy_path = self.get_legacy_db_path() if not legacy_path: return False @@ -100,11 +100,23 @@ class LegacyMigrator: if res: # Get columns from both databases to ensure compatibility # These PRAGMA calls are safe as they use controlled table/alias names - legacy_columns = [row["name"] for row in self.provider.fetchall(f"PRAGMA {alias}.table_info({table})")] - current_columns = [row["name"] for row in self.provider.fetchall(f"PRAGMA table_info({table})")] + legacy_columns = [ + row["name"] + for row in self.provider.fetchall( + f"PRAGMA {alias}.table_info({table})" + ) + ] + current_columns = [ + row["name"] + for row in self.provider.fetchall( + f"PRAGMA table_info({table})" + ) + ] # Find common columns - common_columns = [col for col in legacy_columns if col in current_columns] + common_columns = [ + col for col in legacy_columns if col in current_columns + ] if common_columns: cols_str = ", ".join(common_columns) @@ -112,9 +124,13 @@ class LegacyMigrator: # The table and columns are controlled by us migrate_query = f"INSERT OR IGNORE INTO {table} ({cols_str}) SELECT {cols_str} FROM {alias}.{table}" # noqa: S608 self.provider.execute(migrate_query) - print(f" - Migrated table: {table} ({len(common_columns)} columns)") + print( + f" - Migrated table: {table} ({len(common_columns)} columns)" + ) else: - print(f" - Skipping table {table}: No common columns found") + print( + f" - Skipping table {table}: No common columns found" + ) except Exception as e: print(f" - Failed to migrate table {table}: {e}") diff --git a/meshchatx/src/backend/database/messages.py b/meshchatx/src/backend/database/messages.py index cdd2819..6aa41b2 100644 --- a/meshchatx/src/backend/database/messages.py +++ b/meshchatx/src/backend/database/messages.py @@ -15,17 +15,33 @@ class MessageDAO: # Ensure all required fields are present and handle defaults fields = [ - "hash", "source_hash", "destination_hash", "state", "progress", - "is_incoming", "method", "delivery_attempts", "next_delivery_attempt_at", - "title", "content", "fields", "timestamp", "rssi", "snr", "quality", "is_spam", + "hash", + "source_hash", + "destination_hash", + "state", + "progress", + "is_incoming", + "method", + "delivery_attempts", + "next_delivery_attempt_at", + "title", + "content", + "fields", + "timestamp", + "rssi", + "snr", + "quality", + "is_spam", ] columns = ", ".join(fields) placeholders = ", ".join(["?"] * len(fields)) update_set = ", ".join([f"{f} = EXCLUDED.{f}" for f in fields if f != "hash"]) - query = f"INSERT INTO lxmf_messages ({columns}, updated_at) VALUES ({placeholders}, ?) " \ - f"ON CONFLICT(hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at" # noqa: S608 + query = ( + f"INSERT INTO lxmf_messages ({columns}, updated_at) VALUES ({placeholders}, ?) " + f"ON CONFLICT(hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at" + ) # noqa: S608 params = [] for f in fields: @@ -38,10 +54,14 @@ class MessageDAO: self.provider.execute(query, params) def get_lxmf_message_by_hash(self, message_hash): - return self.provider.fetchone("SELECT * FROM lxmf_messages WHERE hash = ?", (message_hash,)) + return self.provider.fetchone( + "SELECT * FROM lxmf_messages WHERE hash = ?", (message_hash,) + ) def delete_lxmf_message_by_hash(self, message_hash): - self.provider.execute("DELETE FROM lxmf_messages WHERE hash = ?", (message_hash,)) + self.provider.execute( + "DELETE FROM lxmf_messages WHERE hash = ?", (message_hash,) + ) def get_conversation_messages(self, destination_hash, limit=100, offset=0): return self.provider.fetchall( @@ -73,13 +93,16 @@ class MessageDAO: ) def is_conversation_unread(self, destination_hash): - row = self.provider.fetchone(""" + row = self.provider.fetchone( + """ SELECT m.timestamp, r.last_read_at FROM lxmf_messages m LEFT JOIN lxmf_conversation_read_state r ON r.destination_hash = ? WHERE (m.destination_hash = ? OR m.source_hash = ?) ORDER BY m.timestamp DESC LIMIT 1 - """, (destination_hash, destination_hash, destination_hash)) + """, + (destination_hash, destination_hash, destination_hash), + ) if not row: return False @@ -93,13 +116,16 @@ class MessageDAO: return row["timestamp"] > last_read_at.timestamp() def mark_stuck_messages_as_failed(self): - self.provider.execute(""" + self.provider.execute( + """ UPDATE lxmf_messages SET state = 'failed', updated_at = ? WHERE state = 'outbound' OR (state = 'sent' AND method = 'opportunistic') OR state = 'sending' - """, (datetime.now(UTC).isoformat(),)) + """, + (datetime.now(UTC).isoformat(),), + ) def get_failed_messages_for_destination(self, destination_hash): return self.provider.fetchall( @@ -115,9 +141,14 @@ class MessageDAO: return row["count"] if row else 0 # Forwarding Mappings - def get_forwarding_mapping(self, alias_hash=None, original_sender_hash=None, final_recipient_hash=None): + def get_forwarding_mapping( + self, alias_hash=None, original_sender_hash=None, final_recipient_hash=None + ): if alias_hash: - return self.provider.fetchone("SELECT * FROM lxmf_forwarding_mappings WHERE alias_hash = ?", (alias_hash,)) + return self.provider.fetchone( + "SELECT * FROM lxmf_forwarding_mappings WHERE alias_hash = ?", + (alias_hash,), + ) if original_sender_hash and final_recipient_hash: return self.provider.fetchone( "SELECT * FROM lxmf_forwarding_mappings WHERE original_sender_hash = ? AND final_recipient_hash = ?", @@ -131,8 +162,11 @@ class MessageDAO: data = dict(data) fields = [ - "alias_identity_private_key", "alias_hash", "original_sender_hash", - "final_recipient_hash", "original_destination_hash", + "alias_identity_private_key", + "alias_hash", + "original_sender_hash", + "final_recipient_hash", + "original_destination_hash", ] columns = ", ".join(fields) placeholders = ", ".join(["?"] * len(fields)) @@ -143,4 +177,3 @@ class MessageDAO: def get_all_forwarding_mappings(self): return self.provider.fetchall("SELECT * FROM lxmf_forwarding_mappings") - diff --git a/meshchatx/src/backend/database/misc.py b/meshchatx/src/backend/database/misc.py index 49cf6d1..52d04d7 100644 --- a/meshchatx/src/backend/database/misc.py +++ b/meshchatx/src/backend/database/misc.py @@ -15,13 +15,22 @@ class MiscDAO: ) def is_destination_blocked(self, destination_hash): - return self.provider.fetchone("SELECT 1 FROM blocked_destinations WHERE destination_hash = ?", (destination_hash,)) is not None + return ( + self.provider.fetchone( + "SELECT 1 FROM blocked_destinations WHERE destination_hash = ?", + (destination_hash,), + ) + is not None + ) def get_blocked_destinations(self): return self.provider.fetchall("SELECT * FROM blocked_destinations") def delete_blocked_destination(self, destination_hash): - self.provider.execute("DELETE FROM blocked_destinations WHERE destination_hash = ?", (destination_hash,)) + self.provider.execute( + "DELETE FROM blocked_destinations WHERE destination_hash = ?", + (destination_hash,), + ) # Spam Keywords def add_spam_keyword(self, keyword): @@ -45,9 +54,12 @@ class MiscDAO: return False # User Icons - def update_lxmf_user_icon(self, destination_hash, icon_name, foreground_colour, background_colour): + def update_lxmf_user_icon( + self, destination_hash, icon_name, foreground_colour, background_colour + ): now = datetime.now(UTC) - self.provider.execute(""" + self.provider.execute( + """ INSERT INTO lxmf_user_icons (destination_hash, icon_name, foreground_colour, background_colour, updated_at) VALUES (?, ?, ?, ?, ?) ON CONFLICT(destination_hash) DO UPDATE SET @@ -55,10 +67,15 @@ class MiscDAO: foreground_colour = EXCLUDED.foreground_colour, background_colour = EXCLUDED.background_colour, updated_at = EXCLUDED.updated_at - """, (destination_hash, icon_name, foreground_colour, background_colour, now)) + """, + (destination_hash, icon_name, foreground_colour, background_colour, now), + ) def get_user_icon(self, destination_hash): - return self.provider.fetchone("SELECT * FROM lxmf_user_icons WHERE destination_hash = ?", (destination_hash,)) + return self.provider.fetchone( + "SELECT * FROM lxmf_user_icons WHERE destination_hash = ?", + (destination_hash,), + ) # Forwarding Rules def get_forwarding_rules(self, identity_hash=None, active_only=False): @@ -71,18 +88,31 @@ class MiscDAO: query += " AND is_active = 1" return self.provider.fetchall(query, params) - def create_forwarding_rule(self, identity_hash, forward_to_hash, source_filter_hash, is_active=True): + def create_forwarding_rule( + self, identity_hash, forward_to_hash, source_filter_hash, is_active=True + ): now = datetime.now(UTC) self.provider.execute( "INSERT INTO lxmf_forwarding_rules (identity_hash, forward_to_hash, source_filter_hash, is_active, updated_at) VALUES (?, ?, ?, ?, ?)", - (identity_hash, forward_to_hash, source_filter_hash, 1 if is_active else 0, now), + ( + identity_hash, + forward_to_hash, + source_filter_hash, + 1 if is_active else 0, + now, + ), ) def delete_forwarding_rule(self, rule_id): - self.provider.execute("DELETE FROM lxmf_forwarding_rules WHERE id = ?", (rule_id,)) + self.provider.execute( + "DELETE FROM lxmf_forwarding_rules WHERE id = ?", (rule_id,) + ) def toggle_forwarding_rule(self, rule_id): - self.provider.execute("UPDATE lxmf_forwarding_rules SET is_active = NOT is_active WHERE id = ?", (rule_id,)) + self.provider.execute( + "UPDATE lxmf_forwarding_rules SET is_active = NOT is_active WHERE id = ?", + (rule_id,), + ) # Archived Pages def archive_page(self, destination_hash, page_path, content, page_hash): @@ -105,7 +135,9 @@ class MiscDAO: params.append(destination_hash) if query: like_term = f"%{query}%" - sql += " AND (destination_hash LIKE ? OR page_path LIKE ? OR content LIKE ?)" + sql += ( + " AND (destination_hash LIKE ? OR page_path LIKE ? OR content LIKE ?)" + ) params.extend([like_term, like_term, like_term]) sql += " ORDER BY created_at DESC" @@ -113,25 +145,41 @@ class MiscDAO: def delete_archived_pages(self, destination_hash=None, page_path=None): if destination_hash and page_path: - self.provider.execute("DELETE FROM archived_pages WHERE destination_hash = ? AND page_path = ?", (destination_hash, page_path)) + self.provider.execute( + "DELETE FROM archived_pages WHERE destination_hash = ? AND page_path = ?", + (destination_hash, page_path), + ) else: self.provider.execute("DELETE FROM archived_pages") # Crawl Tasks - def upsert_crawl_task(self, destination_hash, page_path, status="pending", retry_count=0): - self.provider.execute(""" + def upsert_crawl_task( + self, destination_hash, page_path, status="pending", retry_count=0 + ): + self.provider.execute( + """ INSERT INTO crawl_tasks (destination_hash, page_path, status, retry_count) VALUES (?, ?, ?, ?) ON CONFLICT(destination_hash, page_path) DO UPDATE SET status = EXCLUDED.status, retry_count = EXCLUDED.retry_count - """, (destination_hash, page_path, status, retry_count)) + """, + (destination_hash, page_path, status, retry_count), + ) def get_pending_crawl_tasks(self): - return self.provider.fetchall("SELECT * FROM crawl_tasks WHERE status = 'pending'") + return self.provider.fetchall( + "SELECT * FROM crawl_tasks WHERE status = 'pending'" + ) def update_crawl_task(self, task_id, **kwargs): - allowed_keys = {"destination_hash", "page_path", "status", "retry_count", "updated_at"} + allowed_keys = { + "destination_hash", + "page_path", + "status", + "retry_count", + "updated_at", + } filtered_kwargs = {k: v for k, v in kwargs.items() if k in allowed_keys} if not filtered_kwargs: @@ -150,5 +198,6 @@ class MiscDAO: ) def get_archived_page_by_id(self, archive_id): - return self.provider.fetchone("SELECT * FROM archived_pages WHERE id = ?", (archive_id,)) - + return self.provider.fetchone( + "SELECT * FROM archived_pages WHERE id = ?", (archive_id,) + ) diff --git a/meshchatx/src/backend/database/provider.py b/meshchatx/src/backend/database/provider.py index 4291f60..3fcfcc1 100644 --- a/meshchatx/src/backend/database/provider.py +++ b/meshchatx/src/backend/database/provider.py @@ -23,7 +23,9 @@ class DatabaseProvider: @property def connection(self): if not hasattr(self._local, "connection"): - self._local.connection = sqlite3.connect(self.db_path, check_same_thread=False) + self._local.connection = sqlite3.connect( + self.db_path, check_same_thread=False + ) self._local.connection.row_factory = sqlite3.Row # Enable WAL mode for better concurrency self._local.connection.execute("PRAGMA journal_mode=WAL") @@ -62,4 +64,3 @@ class DatabaseProvider: def checkpoint(self): return self.fetchall("PRAGMA wal_checkpoint(TRUNCATE)") - diff --git a/meshchatx/src/backend/database/schema.py b/meshchatx/src/backend/database/schema.py index a304256..302c0a3 100644 --- a/meshchatx/src/backend/database/schema.py +++ b/meshchatx/src/backend/database/schema.py @@ -2,7 +2,7 @@ from .provider import DatabaseProvider class DatabaseSchema: - LATEST_VERSION = 12 + LATEST_VERSION = 13 def __init__(self, provider: DatabaseProvider): self.provider = provider @@ -16,7 +16,9 @@ class DatabaseSchema: self.migrate(current_version) def _get_current_version(self): - row = self.provider.fetchone("SELECT value FROM config WHERE key = ?", ("database_version",)) + row = self.provider.fetchone( + "SELECT value FROM config WHERE key = ?", ("database_version",) + ) if row: return int(row["value"]) return 0 @@ -189,21 +191,45 @@ class DatabaseSchema: created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) """, + "voicemails": """ + CREATE TABLE IF NOT EXISTS voicemails ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + remote_identity_hash TEXT, + remote_identity_name TEXT, + filename TEXT, + duration_seconds INTEGER, + is_read INTEGER DEFAULT 0, + timestamp REAL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP + ) + """, } for table_name, create_sql in tables.items(): self.provider.execute(create_sql) # Create indexes that were present if table_name == "announces": - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)" + ) elif table_name == "lxmf_messages": - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)" + ) elif table_name == "blocked_destinations": - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)" + ) elif table_name == "spam_keywords": - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)" + ) def migrate(self, current_version): if current_version < 7: @@ -217,9 +243,15 @@ class DatabaseSchema: created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) """) - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)" + ) if current_version < 8: self.provider.execute(""" @@ -234,8 +266,12 @@ class DatabaseSchema: created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) """) - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)" + ) if current_version < 9: self.provider.execute(""" @@ -249,7 +285,9 @@ class DatabaseSchema: updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ) """) - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)" + ) self.provider.execute(""" CREATE TABLE IF NOT EXISTS lxmf_forwarding_mappings ( @@ -262,9 +300,15 @@ class DatabaseSchema: created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) """) - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)" + ) if current_version < 10: # Ensure unique constraints exist for ON CONFLICT clauses @@ -272,26 +316,56 @@ class DatabaseSchema: # but a UNIQUE index works for ON CONFLICT. # Clean up duplicates before adding unique indexes - self.provider.execute("DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)") - self.provider.execute("DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)") - self.provider.execute("DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)") - self.provider.execute("DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)") - self.provider.execute("DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)") - self.provider.execute("DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)") - self.provider.execute("DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)") + self.provider.execute( + "DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)" + ) + self.provider.execute( + "DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)" + ) + self.provider.execute( + "DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)" + ) + self.provider.execute( + "DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)" + ) + self.provider.execute( + "DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)" + ) + self.provider.execute( + "DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)" + ) + self.provider.execute( + "DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)" + ) - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)") - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)") - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)") - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)") - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)") - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)") - self.provider.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)") + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)" + ) + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)" + ) + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)" + ) + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)" + ) + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)" + ) + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)" + ) + self.provider.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)" + ) if current_version < 11: # Add is_spam column to lxmf_messages if it doesn't exist try: - self.provider.execute("ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0") + self.provider.execute( + "ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0" + ) except Exception: # Column might already exist if table was created with newest schema pass @@ -309,9 +383,35 @@ class DatabaseSchema: created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) """) - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)") - self.provider.execute("CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)") + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)" + ) + + if current_version < 13: + self.provider.execute(""" + CREATE TABLE IF NOT EXISTS voicemails ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + remote_identity_hash TEXT, + remote_identity_name TEXT, + filename TEXT, + duration_seconds INTEGER, + is_read INTEGER DEFAULT 0, + timestamp REAL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP + ) + """) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_voicemails_remote_hash ON voicemails(remote_identity_hash)" + ) + self.provider.execute( + "CREATE INDEX IF NOT EXISTS idx_voicemails_timestamp ON voicemails(timestamp)" + ) # Update version in config - self.provider.execute("INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)", ("database_version", str(self.LATEST_VERSION))) - + self.provider.execute( + "INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)", + ("database_version", str(self.LATEST_VERSION)), + ) diff --git a/meshchatx/src/backend/database/telephone.py b/meshchatx/src/backend/database/telephone.py index c3caa84..823a5eb 100644 --- a/meshchatx/src/backend/database/telephone.py +++ b/meshchatx/src/backend/database/telephone.py @@ -1,4 +1,3 @@ - from .provider import DatabaseProvider @@ -41,4 +40,3 @@ class TelephoneDAO: "SELECT * FROM call_history ORDER BY timestamp DESC LIMIT ?", (limit,), ) - diff --git a/meshchatx/src/backend/database/voicemails.py b/meshchatx/src/backend/database/voicemails.py new file mode 100644 index 0000000..04a1612 --- /dev/null +++ b/meshchatx/src/backend/database/voicemails.py @@ -0,0 +1,63 @@ +from .provider import DatabaseProvider + + +class VoicemailDAO: + def __init__(self, provider: DatabaseProvider): + self.provider = provider + + def add_voicemail( + self, + remote_identity_hash, + remote_identity_name, + filename, + duration_seconds, + timestamp, + ): + self.provider.execute( + """ + INSERT INTO voicemails ( + remote_identity_hash, + remote_identity_name, + filename, + duration_seconds, + timestamp + ) VALUES (?, ?, ?, ?, ?) + """, + ( + remote_identity_hash, + remote_identity_name, + filename, + duration_seconds, + timestamp, + ), + ) + + def get_voicemails(self, limit=50, offset=0): + return self.provider.fetchall( + "SELECT * FROM voicemails ORDER BY timestamp DESC LIMIT ? OFFSET ?", + (limit, offset), + ) + + def get_voicemail(self, voicemail_id): + return self.provider.fetchone( + "SELECT * FROM voicemails WHERE id = ?", + (voicemail_id,), + ) + + def mark_as_read(self, voicemail_id): + self.provider.execute( + "UPDATE voicemails SET is_read = 1 WHERE id = ?", + (voicemail_id,), + ) + + def delete_voicemail(self, voicemail_id): + self.provider.execute( + "DELETE FROM voicemails WHERE id = ?", + (voicemail_id,), + ) + + def get_unread_count(self): + row = self.provider.fetchone( + "SELECT COUNT(*) as count FROM voicemails WHERE is_read = 0" + ) + return row["count"] if row else 0 diff --git a/meshchatx/src/backend/forwarding_manager.py b/meshchatx/src/backend/forwarding_manager.py index 4d112cd..b95676c 100644 --- a/meshchatx/src/backend/forwarding_manager.py +++ b/meshchatx/src/backend/forwarding_manager.py @@ -15,14 +15,20 @@ class ForwardingManager: mappings = self.db.messages.get_all_forwarding_mappings() for mapping in mappings: try: - private_key_bytes = base64.b64decode(mapping["alias_identity_private_key"]) + private_key_bytes = base64.b64decode( + mapping["alias_identity_private_key"] + ) alias_identity = RNS.Identity.from_bytes(private_key_bytes) - alias_destination = self.message_router.register_delivery_identity(identity=alias_identity) + alias_destination = self.message_router.register_delivery_identity( + identity=alias_identity + ) self.forwarding_destinations[mapping["alias_hash"]] = alias_destination except Exception as e: print(f"Failed to load forwarding alias {mapping['alias_hash']}: {e}") - def get_or_create_mapping(self, source_hash, final_recipient_hash, original_destination_hash): + def get_or_create_mapping( + self, source_hash, final_recipient_hash, original_destination_hash + ): mapping = self.db.messages.get_forwarding_mapping( original_sender_hash=source_hash, final_recipient_hash=final_recipient_hash, @@ -32,11 +38,15 @@ class ForwardingManager: alias_identity = RNS.Identity() alias_hash = alias_identity.hash.hex() - alias_destination = self.message_router.register_delivery_identity(alias_identity) + alias_destination = self.message_router.register_delivery_identity( + alias_identity + ) self.forwarding_destinations[alias_hash] = alias_destination data = { - "alias_identity_private_key": base64.b64encode(alias_identity.get_private_key()).decode(), + "alias_identity_private_key": base64.b64encode( + alias_identity.get_private_key() + ).decode(), "alias_hash": alias_hash, "original_sender_hash": source_hash, "final_recipient_hash": final_recipient_hash, @@ -45,4 +55,3 @@ class ForwardingManager: self.db.messages.create_forwarding_mapping(data) return data return mapping - diff --git a/meshchatx/src/backend/map_manager.py b/meshchatx/src/backend/map_manager.py index 1e197c3..44f0dc6 100644 --- a/meshchatx/src/backend/map_manager.py +++ b/meshchatx/src/backend/map_manager.py @@ -55,13 +55,15 @@ class MapManager: if f.endswith(".mbtiles"): full_path = os.path.join(mbtiles_dir, f) stats = os.stat(full_path) - files.append({ - "name": f, - "path": full_path, - "size": stats.st_size, - "mtime": stats.st_mtime, - "is_active": full_path == self.get_offline_path(), - }) + files.append( + { + "name": f, + "path": full_path, + "size": stats.st_size, + "mtime": stats.st_mtime, + "is_active": full_path == self.get_offline_path(), + } + ) return sorted(files, key=lambda x: x["mtime"], reverse=True) def delete_mbtiles(self, filename): @@ -97,7 +99,10 @@ class MapManager: # Basic validation: ensure it's raster (format is not pbf) if metadata.get("format") == "pbf": - RNS.log("MBTiles file is in vector (PBF) format, which is not supported.", RNS.LOG_ERROR) + RNS.log( + "MBTiles file is in vector (PBF) format, which is not supported.", + RNS.LOG_ERROR, + ) return None self._metadata_cache = metadata @@ -176,8 +181,12 @@ class MapManager: # create schema cursor.execute("CREATE TABLE metadata (name text, value text)") - cursor.execute("CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)") - cursor.execute("CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)") + cursor.execute( + "CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)" + ) + cursor.execute( + "CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)" + ) # insert metadata metadata = [ @@ -205,7 +214,11 @@ class MapManager: # wait a bit to be nice to OSM time.sleep(0.1) - response = requests.get(tile_url, headers={"User-Agent": "MeshChatX/1.0 MapExporter"}, timeout=10) + response = requests.get( + tile_url, + headers={"User-Agent": "MeshChatX/1.0 MapExporter"}, + timeout=10, + ) if response.status_code == 200: # MBTiles uses TMS (y flipped) tms_y = (1 << z) - 1 - y @@ -214,11 +227,16 @@ class MapManager: (z, x, tms_y, response.content), ) except Exception as e: - RNS.log(f"Export failed to download tile {z}/{x}/{y}: {e}", RNS.LOG_ERROR) + RNS.log( + f"Export failed to download tile {z}/{x}/{y}: {e}", + RNS.LOG_ERROR, + ) current_count += 1 self._export_progress[export_id]["current"] = current_count - self._export_progress[export_id]["progress"] = int((current_count / total_tiles) * 100) + self._export_progress[export_id]["progress"] = int( + (current_count / total_tiles) * 100 + ) # commit after each zoom level conn.commit() @@ -236,9 +254,13 @@ class MapManager: def _lonlat_to_tile(self, lon, lat, zoom): lat_rad = math.radians(lat) - n = 2.0 ** zoom + n = 2.0**zoom x = int((lon + 180.0) / 360.0 * n) - y = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n) + y = int( + (1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) + / 2.0 + * n + ) return x, y def close(self): diff --git a/meshchatx/src/backend/message_handler.py b/meshchatx/src/backend/message_handler.py index 076b08b..73afe21 100644 --- a/meshchatx/src/backend/message_handler.py +++ b/meshchatx/src/backend/message_handler.py @@ -5,7 +5,15 @@ class MessageHandler: def __init__(self, db: Database): self.db = db - def get_conversation_messages(self, local_hash, destination_hash, limit=100, offset=0, after_id=None, before_id=None): + def get_conversation_messages( + self, + local_hash, + destination_hash, + limit=100, + offset=0, + after_id=None, + before_id=None, + ): query = """ SELECT * FROM lxmf_messages WHERE ((source_hash = ? AND destination_hash = ?) @@ -31,7 +39,9 @@ class MessageHandler: WHERE ((source_hash = ? AND destination_hash = ?) OR (destination_hash = ? AND source_hash = ?)) """ - self.db.provider.execute(query, [local_hash, destination_hash, local_hash, destination_hash]) + self.db.provider.execute( + query, [local_hash, destination_hash, local_hash, destination_hash] + ) def search_messages(self, local_hash, search_term): like_term = f"%{search_term}%" @@ -61,6 +71,12 @@ class MessageHandler: WHERE m1.source_hash = ? OR m1.destination_hash = ? ORDER BY m1.timestamp DESC """ - params = [local_hash, local_hash, local_hash, local_hash, local_hash, local_hash] + params = [ + local_hash, + local_hash, + local_hash, + local_hash, + local_hash, + local_hash, + ] return self.db.provider.fetchall(query, params) - diff --git a/meshchatx/src/backend/rncp_handler.py b/meshchatx/src/backend/rncp_handler.py index 295f0a2..02a56ae 100644 --- a/meshchatx/src/backend/rncp_handler.py +++ b/meshchatx/src/backend/rncp_handler.py @@ -22,9 +22,17 @@ class RNCPHandler: self.allow_overwrite_on_receive = False self.allowed_identity_hashes = [] - def setup_receive_destination(self, allowed_hashes=None, fetch_allowed=False, fetch_jail=None, allow_overwrite=False): + def setup_receive_destination( + self, + allowed_hashes=None, + fetch_allowed=False, + fetch_jail=None, + allow_overwrite=False, + ): if allowed_hashes: - self.allowed_identity_hashes = [bytes.fromhex(h) if isinstance(h, str) else h for h in allowed_hashes] + self.allowed_identity_hashes = [ + bytes.fromhex(h) if isinstance(h, str) else h for h in allowed_hashes + ] self.fetch_jail = fetch_jail self.allow_overwrite_on_receive = allow_overwrite @@ -44,7 +52,9 @@ class RNCPHandler: "receive", ) - self.receive_destination.set_link_established_callback(self._client_link_established) + self.receive_destination.set_link_established_callback( + self._client_link_established + ) if fetch_allowed: self.receive_destination.register_request_handler( @@ -86,7 +96,9 @@ class RNCPHandler: if resource.status == RNS.Resource.COMPLETE: if resource.metadata: try: - filename = os.path.basename(resource.metadata["name"].decode("utf-8")) + filename = os.path.basename( + resource.metadata["name"].decode("utf-8") + ) save_dir = os.path.join(self.storage_dir, "rncp_received") os.makedirs(save_dir, exist_ok=True) @@ -105,13 +117,17 @@ class RNCPHandler: while os.path.isfile(saved_filename): counter += 1 base, ext = os.path.splitext(filename) - saved_filename = os.path.join(save_dir, f"{base}.{counter}{ext}") + saved_filename = os.path.join( + save_dir, f"{base}.{counter}{ext}" + ) shutil.move(resource.data.name, saved_filename) if transfer_id in self.active_transfers: self.active_transfers[transfer_id]["status"] = "completed" - self.active_transfers[transfer_id]["saved_path"] = saved_filename + self.active_transfers[transfer_id]["saved_path"] = ( + saved_filename + ) self.active_transfers[transfer_id]["filename"] = filename except Exception as e: if transfer_id in self.active_transfers: @@ -120,7 +136,9 @@ class RNCPHandler: elif transfer_id in self.active_transfers: self.active_transfers[transfer_id]["status"] = "failed" - def _fetch_request(self, path, data, request_id, link_id, remote_identity, requested_at): + def _fetch_request( + self, path, data, request_id, link_id, remote_identity, requested_at + ): if self.fetch_jail: if data.startswith(self.fetch_jail + "/"): data = data.replace(self.fetch_jail + "/", "") @@ -171,7 +189,9 @@ class RNCPHandler: RNS.Transport.request_path(destination_hash) timeout_after = time.time() + timeout - while not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after: + while ( + not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after + ): await asyncio.sleep(0.1) if not RNS.Transport.has_path(destination_hash): @@ -257,7 +277,9 @@ class RNCPHandler: RNS.Transport.request_path(destination_hash) timeout_after = time.time() + timeout - while not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after: + while ( + not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after + ): await asyncio.sleep(0.1) if not RNS.Transport.has_path(destination_hash): @@ -326,7 +348,9 @@ class RNCPHandler: if resource.status == RNS.Resource.COMPLETE: if resource.metadata: try: - filename = os.path.basename(resource.metadata["name"].decode("utf-8")) + filename = os.path.basename( + resource.metadata["name"].decode("utf-8") + ) if save_path: save_dir = os.path.abspath(os.path.expanduser(save_path)) os.makedirs(save_dir, exist_ok=True) @@ -367,7 +391,12 @@ class RNCPHandler: link.set_resource_strategy(RNS.Link.ACCEPT_ALL) link.set_resource_started_callback(fetch_resource_started) link.set_resource_concluded_callback(fetch_resource_concluded) - link.request("fetch_file", data=file_path, response_callback=request_response, failed_callback=request_failed) + link.request( + "fetch_file", + data=file_path, + response_callback=request_response, + failed_callback=request_failed, + ) while not request_resolved: await asyncio.sleep(0.1) @@ -418,4 +447,3 @@ class RNCPHandler: "error": transfer.get("error"), } return None - diff --git a/meshchatx/src/backend/rnprobe_handler.py b/meshchatx/src/backend/rnprobe_handler.py index a71d15b..8fb3e31 100644 --- a/meshchatx/src/backend/rnprobe_handler.py +++ b/meshchatx/src/backend/rnprobe_handler.py @@ -31,8 +31,14 @@ class RNProbeHandler: if not RNS.Transport.has_path(destination_hash): RNS.Transport.request_path(destination_hash) - timeout_after = time.time() + (timeout or self.DEFAULT_TIMEOUT + self.reticulum.get_first_hop_timeout(destination_hash)) - while not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after: + timeout_after = time.time() + ( + timeout + or self.DEFAULT_TIMEOUT + + self.reticulum.get_first_hop_timeout(destination_hash) + ) + while ( + not RNS.Transport.has_path(destination_hash) and time.time() < timeout_after + ): await asyncio.sleep(0.1) if not RNS.Transport.has_path(destination_hash): @@ -70,8 +76,14 @@ class RNProbeHandler: if_name = self.reticulum.get_next_hop_if_name(destination_hash) if_str = f" on {if_name}" if if_name and if_name != "None" else "" - timeout_after = time.time() + (timeout or self.DEFAULT_TIMEOUT + self.reticulum.get_first_hop_timeout(destination_hash)) - while receipt.status == RNS.PacketReceipt.SENT and time.time() < timeout_after: + timeout_after = time.time() + ( + timeout + or self.DEFAULT_TIMEOUT + + self.reticulum.get_first_hop_timeout(destination_hash) + ) + while ( + receipt.status == RNS.PacketReceipt.SENT and time.time() < timeout_after + ): await asyncio.sleep(0.1) result: dict = { @@ -96,9 +108,15 @@ class RNProbeHandler: reception_stats = {} if self.reticulum.is_connected_to_shared_instance: - reception_rssi = self.reticulum.get_packet_rssi(receipt.proof_packet.packet_hash) - reception_snr = self.reticulum.get_packet_snr(receipt.proof_packet.packet_hash) - reception_q = self.reticulum.get_packet_q(receipt.proof_packet.packet_hash) + reception_rssi = self.reticulum.get_packet_rssi( + receipt.proof_packet.packet_hash + ) + reception_snr = self.reticulum.get_packet_snr( + receipt.proof_packet.packet_hash + ) + reception_q = self.reticulum.get_packet_q( + receipt.proof_packet.packet_hash + ) if reception_rssi is not None: reception_stats["rssi"] = reception_rssi @@ -134,4 +152,3 @@ class RNProbeHandler: "timeouts": sum(1 for r in results if r["status"] == "timeout"), "failed": sum(1 for r in results if r["status"] == "failed"), } - diff --git a/meshchatx/src/backend/rnstatus_handler.py b/meshchatx/src/backend/rnstatus_handler.py index 40ec894..790bf39 100644 --- a/meshchatx/src/backend/rnstatus_handler.py +++ b/meshchatx/src/backend/rnstatus_handler.py @@ -25,7 +25,12 @@ class RNStatusHandler: def __init__(self, reticulum_instance): self.reticulum = reticulum_instance - def get_status(self, include_link_stats: bool = False, sorting: str | None = None, sort_reverse: bool = False): + def get_status( + self, + include_link_stats: bool = False, + sorting: str | None = None, + sort_reverse: bool = False, + ): stats = None link_count = None @@ -53,15 +58,25 @@ class RNStatusHandler: if sorting and isinstance(sorting, str): sorting = sorting.lower() if sorting in ("rate", "bitrate"): - interfaces.sort(key=lambda i: i.get("bitrate", 0) or 0, reverse=sort_reverse) + interfaces.sort( + key=lambda i: i.get("bitrate", 0) or 0, reverse=sort_reverse + ) elif sorting == "rx": - interfaces.sort(key=lambda i: i.get("rxb", 0) or 0, reverse=sort_reverse) + interfaces.sort( + key=lambda i: i.get("rxb", 0) or 0, reverse=sort_reverse + ) elif sorting == "tx": - interfaces.sort(key=lambda i: i.get("txb", 0) or 0, reverse=sort_reverse) + interfaces.sort( + key=lambda i: i.get("txb", 0) or 0, reverse=sort_reverse + ) elif sorting == "rxs": - interfaces.sort(key=lambda i: i.get("rxs", 0) or 0, reverse=sort_reverse) + interfaces.sort( + key=lambda i: i.get("rxs", 0) or 0, reverse=sort_reverse + ) elif sorting == "txs": - interfaces.sort(key=lambda i: i.get("txs", 0) or 0, reverse=sort_reverse) + interfaces.sort( + key=lambda i: i.get("txs", 0) or 0, reverse=sort_reverse + ) elif sorting == "traffic": interfaces.sort( key=lambda i: (i.get("rxb", 0) or 0) + (i.get("txb", 0) or 0), @@ -84,13 +99,19 @@ class RNStatusHandler: reverse=sort_reverse, ) elif sorting == "held": - interfaces.sort(key=lambda i: i.get("held_announces", 0) or 0, reverse=sort_reverse) + interfaces.sort( + key=lambda i: i.get("held_announces", 0) or 0, reverse=sort_reverse + ) formatted_interfaces = [] for ifstat in interfaces: name = ifstat.get("name", "") - if name.startswith("LocalInterface[") or name.startswith("TCPInterface[Client") or name.startswith("BackboneInterface[Client on"): + if ( + name.startswith("LocalInterface[") + or name.startswith("TCPInterface[Client") + or name.startswith("BackboneInterface[Client on") + ): continue formatted_if: dict[str, Any] = { @@ -165,9 +186,13 @@ class RNStatusHandler: formatted_if["peers"] = ifstat["peers"] if "incoming_announce_frequency" in ifstat: - formatted_if["incoming_announce_frequency"] = ifstat["incoming_announce_frequency"] + formatted_if["incoming_announce_frequency"] = ifstat[ + "incoming_announce_frequency" + ] if "outgoing_announce_frequency" in ifstat: - formatted_if["outgoing_announce_frequency"] = ifstat["outgoing_announce_frequency"] + formatted_if["outgoing_announce_frequency"] = ifstat[ + "outgoing_announce_frequency" + ] if "held_announces" in ifstat: formatted_if["held_announces"] = ifstat["held_announces"] @@ -181,4 +206,3 @@ class RNStatusHandler: "link_count": link_count, "timestamp": time.time(), } - diff --git a/meshchatx/src/backend/telephone_manager.py b/meshchatx/src/backend/telephone_manager.py index 19b9651..61129f5 100644 --- a/meshchatx/src/backend/telephone_manager.py +++ b/meshchatx/src/backend/telephone_manager.py @@ -76,7 +76,9 @@ class TelephoneManager: destination_identity = RNS.Identity.recall(destination_hash) if destination_identity is None: # If not found by identity hash, try as destination hash - destination_identity = RNS.Identity.recall(destination_hash) # Identity.recall takes identity hash + destination_identity = RNS.Identity.recall( + destination_hash + ) # Identity.recall takes identity hash if destination_identity is None: msg = "Destination identity not found" @@ -92,4 +94,3 @@ class TelephoneManager: self.call_is_incoming = False await asyncio.to_thread(self.telephone.call, destination_identity) return self.telephone.active_call - diff --git a/meshchatx/src/backend/translator_handler.py b/meshchatx/src/backend/translator_handler.py index e735fab..6a00dcd 100644 --- a/meshchatx/src/backend/translator_handler.py +++ b/meshchatx/src/backend/translator_handler.py @@ -6,12 +6,14 @@ from typing import Any try: import requests + HAS_REQUESTS = True except ImportError: HAS_REQUESTS = False try: from argostranslate import package, translate + HAS_ARGOS_LIB = True except ImportError: HAS_ARGOS_LIB = False @@ -63,7 +65,9 @@ LANGUAGE_CODE_TO_NAME = { class TranslatorHandler: def __init__(self, libretranslate_url: str | None = None): - self.libretranslate_url = libretranslate_url or os.getenv("LIBRETRANSLATE_URL", "http://localhost:5000") + self.libretranslate_url = libretranslate_url or os.getenv( + "LIBRETRANSLATE_URL", "http://localhost:5000" + ) self.has_argos = HAS_ARGOS self.has_argos_lib = HAS_ARGOS_LIB self.has_argos_cli = HAS_ARGOS_CLI @@ -136,7 +140,12 @@ class TranslatorHandler: if self.has_requests: try: url = libretranslate_url or self.libretranslate_url - return self._translate_libretranslate(text, source_lang=source_lang, target_lang=target_lang, libretranslate_url=url) + return self._translate_libretranslate( + text, + source_lang=source_lang, + target_lang=target_lang, + libretranslate_url=url, + ) except Exception as e: if self.has_argos: return self._translate_argos(text, source_lang, target_lang) @@ -148,7 +157,13 @@ class TranslatorHandler: msg = "No translation backend available. Install requests for LibreTranslate or argostranslate for local translation." raise RuntimeError(msg) - def _translate_libretranslate(self, text: str, source_lang: str, target_lang: str, libretranslate_url: str | None = None) -> dict[str, Any]: + def _translate_libretranslate( + self, + text: str, + source_lang: str, + target_lang: str, + libretranslate_url: str | None = None, + ) -> dict[str, Any]: if not self.has_requests: msg = "requests library not available" raise RuntimeError(msg) @@ -172,12 +187,16 @@ class TranslatorHandler: result = response.json() return { "translated_text": result.get("translatedText", ""), - "source_lang": result.get("detectedLanguage", {}).get("language", source_lang), + "source_lang": result.get("detectedLanguage", {}).get( + "language", source_lang + ), "target_lang": target_lang, "source": "libretranslate", } - def _translate_argos(self, text: str, source_lang: str, target_lang: str) -> dict[str, Any]: + def _translate_argos( + self, text: str, source_lang: str, target_lang: str + ) -> dict[str, Any]: if source_lang == "auto": if self.has_argos_lib: detected_lang = self._detect_language(text) @@ -200,7 +219,9 @@ class TranslatorHandler: msg = "Argos Translate not available (neither library nor CLI)" raise RuntimeError(msg) - def _translate_argos_lib(self, text: str, source_lang: str, target_lang: str) -> dict[str, Any]: + def _translate_argos_lib( + self, text: str, source_lang: str, target_lang: str + ) -> dict[str, Any]: try: installed_packages = package.get_installed_packages() translation_package = None @@ -228,7 +249,9 @@ class TranslatorHandler: msg = f"Argos Translate error: {e}" raise RuntimeError(msg) - def _translate_argos_cli(self, text: str, source_lang: str, target_lang: str) -> dict[str, Any]: + def _translate_argos_cli( + self, text: str, source_lang: str, target_lang: str + ) -> dict[str, Any]: if source_lang == "auto" or not source_lang: msg = "Auto-detection is not supported with CLI. Please select a source language manually." raise ValueError(msg) @@ -251,7 +274,14 @@ class TranslatorHandler: raise RuntimeError(msg) try: - args = [executable, "--from-lang", source_lang, "--to-lang", target_lang, text] + args = [ + executable, + "--from-lang", + source_lang, + "--to-lang", + target_lang, + text, + ] result = subprocess.run(args, capture_output=True, text=True, check=True) # noqa: S603 translated_text = result.stdout.strip() if not translated_text: @@ -264,7 +294,11 @@ class TranslatorHandler: "source": "argos", } except subprocess.CalledProcessError as e: - error_msg = e.stderr.decode() if isinstance(e.stderr, bytes) else (e.stderr or str(e)) + error_msg = ( + e.stderr.decode() + if isinstance(e.stderr, bytes) + else (e.stderr or str(e)) + ) msg = f"Argos Translate CLI error: {error_msg}" raise RuntimeError(msg) except Exception as e: @@ -333,7 +367,9 @@ class TranslatorHandler: return languages - def install_language_package(self, package_name: str = "translate") -> dict[str, Any]: + def install_language_package( + self, package_name: str = "translate" + ) -> dict[str, Any]: argospm = shutil.which("argospm") if not argospm: msg = "argospm not found in PATH. Install argostranslate first." diff --git a/meshchatx/src/backend/voicemail_manager.py b/meshchatx/src/backend/voicemail_manager.py new file mode 100644 index 0000000..b8911b9 --- /dev/null +++ b/meshchatx/src/backend/voicemail_manager.py @@ -0,0 +1,301 @@ +import os +import platform +import shutil +import subprocess +import threading +import time + +import LXST +import RNS +from LXST.Codecs import Null +from LXST.Pipeline import Pipeline +from LXST.Sinks import OpusFileSink +from LXST.Sources import OpusFileSource + + +class VoicemailManager: + def __init__(self, db, telephone_manager, storage_dir): + self.db = db + self.telephone_manager = telephone_manager + self.storage_dir = os.path.join(storage_dir, "voicemails") + self.greetings_dir = os.path.join(self.storage_dir, "greetings") + self.recordings_dir = os.path.join(self.storage_dir, "recordings") + + # Ensure directories exist + os.makedirs(self.greetings_dir, exist_ok=True) + os.makedirs(self.recordings_dir, exist_ok=True) + + self.is_recording = False + self.recording_pipeline = None + self.recording_sink = None + self.recording_start_time = None + self.recording_remote_identity = None + self.recording_filename = None + + # Paths to executables + self.espeak_path = self._find_espeak() + self.ffmpeg_path = self._find_ffmpeg() + + # Check for presence + self.has_espeak = self.espeak_path is not None + self.has_ffmpeg = self.ffmpeg_path is not None + + if self.has_espeak: + RNS.log(f"Voicemail: Found eSpeak at {self.espeak_path}", RNS.LOG_DEBUG) + else: + RNS.log("Voicemail: eSpeak not found", RNS.LOG_ERROR) + + if self.has_ffmpeg: + RNS.log(f"Voicemail: Found ffmpeg at {self.ffmpeg_path}", RNS.LOG_DEBUG) + else: + RNS.log("Voicemail: ffmpeg not found", RNS.LOG_ERROR) + + def _find_espeak(self): + # Try standard name first + path = shutil.which("espeak-ng") + if path: + return path + + # Try without -ng suffix + path = shutil.which("espeak") + if path: + return path + + # Windows common install locations if not in PATH + if platform.system() == "Windows": + common_paths = [ + os.path.expandvars(r"%ProgramFiles%\eSpeak NG\espeak-ng.exe"), + os.path.expandvars(r"%ProgramFiles(x86)%\eSpeak NG\espeak-ng.exe"), + os.path.expandvars(r"%ProgramFiles%\eSpeak\espeak.exe"), + ] + for p in common_paths: + if os.path.exists(p): + return p + + return None + + def _find_ffmpeg(self): + path = shutil.which("ffmpeg") + if path: + return path + + # Windows common install locations + if platform.system() == "Windows": + common_paths = [ + os.path.expandvars(r"%ProgramFiles%\ffmpeg\bin\ffmpeg.exe"), + os.path.expandvars(r"%ProgramFiles(x86)%\ffmpeg\bin\ffmpeg.exe"), + ] + for p in common_paths: + if os.path.exists(p): + return p + + return None + + def generate_greeting(self, text): + if not self.has_espeak or not self.has_ffmpeg: + msg = "espeak-ng and ffmpeg are required for greeting generation" + raise RuntimeError(msg) + + wav_path = os.path.join(self.greetings_dir, "greeting.wav") + opus_path = os.path.join(self.greetings_dir, "greeting.opus") + + try: + # espeak-ng to WAV + subprocess.run([self.espeak_path, "-w", wav_path, text], check=True) + + # ffmpeg to Opus + if os.path.exists(opus_path): + os.remove(opus_path) + + subprocess.run( + [ + self.ffmpeg_path, + "-i", + wav_path, + "-c:a", + "libopus", + "-b:a", + "16k", + "-vbr", + "on", + opus_path, + ], + check=True, + ) + + return opus_path + finally: + if os.path.exists(wav_path): + os.remove(wav_path) + + def handle_incoming_call(self, caller_identity): + if not self.db.config.voicemail_enabled.get(): + return + + delay = self.db.config.voicemail_auto_answer_delay_seconds.get() + + def voicemail_job(): + time.sleep(delay) + + # Check if still ringing and no other active call + telephone = self.telephone_manager.telephone + if ( + telephone + and telephone.active_call + and telephone.active_call.get_remote_identity() == caller_identity + and telephone.call_status == LXST.Signalling.STATUS_RINGING + ): + RNS.log( + f"Auto-answering call from {RNS.prettyhexrep(caller_identity.hash)} for voicemail", + RNS.LOG_DEBUG, + ) + self.start_voicemail_session(caller_identity) + + threading.Thread(target=voicemail_job, daemon=True).start() + + def start_voicemail_session(self, caller_identity): + telephone = self.telephone_manager.telephone + if not telephone: + return + + # Answer the call + if not telephone.answer(caller_identity): + return + + # Stop microphone if it's active to prevent local noise being sent or recorded + if telephone.audio_input: + telephone.audio_input.stop() + + # Play greeting + greeting_path = os.path.join(self.greetings_dir, "greeting.opus") + if not os.path.exists(greeting_path): + # Fallback if no greeting generated yet + self.generate_greeting(self.db.config.voicemail_greeting.get()) + + def session_job(): + try: + # 1. Play greeting + greeting_source = OpusFileSource(greeting_path, target_frame_ms=60) + # Attach to transmit mixer + greeting_pipeline = Pipeline( + source=greeting_source, codec=Null(), sink=telephone.transmit_mixer + ) + greeting_pipeline.start() + + # Wait for greeting to finish + while greeting_source.running: + time.sleep(0.1) + if not telephone.active_call: + return + + greeting_pipeline.stop() + + # 2. Play beep + beep_source = LXST.ToneSource( + frequency=800, + gain=0.1, + target_frame_ms=60, + codec=Null(), + sink=telephone.transmit_mixer, + ) + beep_source.start() + time.sleep(0.5) + beep_source.stop() + + # 3. Start recording + self.start_recording(caller_identity) + + # 4. Wait for max recording time or hangup + max_time = self.db.config.voicemail_max_recording_seconds.get() + start_wait = time.time() + while self.is_recording and (time.time() - start_wait < max_time): + time.sleep(0.5) + if not telephone.active_call: + break + + # 5. End session + if telephone.active_call: + telephone.hangup() + + self.stop_recording() + + except Exception as e: + RNS.log(f"Error during voicemail session: {e}", RNS.LOG_ERROR) + if self.is_recording: + self.stop_recording() + + threading.Thread(target=session_job, daemon=True).start() + + def start_recording(self, caller_identity): + telephone = self.telephone_manager.telephone + if not telephone or not telephone.active_call: + return + + timestamp = time.time() + filename = f"voicemail_{caller_identity.hash.hex()}_{int(timestamp)}.opus" + filepath = os.path.join(self.recordings_dir, filename) + + try: + self.recording_sink = OpusFileSink(filepath) + # Connect the caller's audio source to our sink + # active_call.audio_source is a LinkSource that feeds into receive_mixer + # We want to record what we receive. + self.recording_pipeline = Pipeline( + source=telephone.active_call.audio_source, + codec=Null(), + sink=self.recording_sink, + ) + self.recording_pipeline.start() + + self.is_recording = True + self.recording_start_time = timestamp + self.recording_remote_identity = caller_identity + self.recording_filename = filename + + RNS.log( + f"Started recording voicemail from {RNS.prettyhexrep(caller_identity.hash)}", + RNS.LOG_DEBUG, + ) + except Exception as e: + RNS.log(f"Failed to start recording: {e}", RNS.LOG_ERROR) + + def stop_recording(self): + if not self.is_recording: + return + + try: + duration = int(time.time() - self.recording_start_time) + self.recording_pipeline.stop() + self.recording_sink = None + self.recording_pipeline = None + + # Save to database if long enough + if duration >= 1: + remote_name = self.telephone_manager.get_name_for_identity_hash( + self.recording_remote_identity.hash.hex() + ) + self.db.voicemails.add_voicemail( + remote_identity_hash=self.recording_remote_identity.hash.hex(), + remote_identity_name=remote_name, + filename=self.recording_filename, + duration_seconds=duration, + timestamp=self.recording_start_time, + ) + RNS.log( + f"Saved voicemail from {RNS.prettyhexrep(self.recording_remote_identity.hash)} ({duration}s)", + RNS.LOG_DEBUG, + ) + else: + # Delete short/empty recording + filepath = os.path.join(self.recordings_dir, self.recording_filename) + if os.path.exists(filepath): + os.remove(filepath) + + self.is_recording = False + self.recording_start_time = None + self.recording_remote_identity = None + self.recording_filename = None + + except Exception as e: + RNS.log(f"Error stopping recording: {e}", RNS.LOG_ERROR) + self.is_recording = False diff --git a/meshchatx/src/frontend/components/App.vue b/meshchatx/src/frontend/components/App.vue index 7ddcf9a..1e91833 100644 --- a/meshchatx/src/frontend/components/App.vue +++ b/meshchatx/src/frontend/components/App.vue @@ -26,7 +26,7 @@
- +
diff --git a/meshchatx/src/frontend/components/auth/AuthPage.vue b/meshchatx/src/frontend/components/auth/AuthPage.vue index c708aa8..95ce38a 100644 --- a/meshchatx/src/frontend/components/auth/AuthPage.vue +++ b/meshchatx/src/frontend/components/auth/AuthPage.vue @@ -5,7 +5,7 @@ class="bg-white dark:bg-zinc-900 rounded-2xl shadow-lg border border-gray-200 dark:border-zinc-800 p-8" >
- +

{{ isSetup ? "Initial Setup" : "Authentication Required" }}

diff --git a/meshchatx/src/frontend/components/call/CallOverlay.vue b/meshchatx/src/frontend/components/call/CallOverlay.vue index 40d8a97..3f5107b 100644 --- a/meshchatx/src/frontend/components/call/CallOverlay.vue +++ b/meshchatx/src/frontend/components/call/CallOverlay.vue @@ -7,12 +7,17 @@
-
+
- {{ isEnded ? "Call Ended" : (activeCall.status === 6 ? "Active Call" : "Call Status") }} + {{ + isEnded + ? "Call Ended" + : activeCall.is_voicemail + ? "Recording Voicemail" + : activeCall.status === 6 + ? "Active Call" + : "Call Status" + }}