Compare commits
61 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
3438b271a5
|
|||
| d6228d6d63 | |||
|
ccf954681b
|
|||
|
4ec44900cf
|
|||
|
d4099fb9a2
|
|||
|
1571b315b2
|
|||
|
71bd49bd7d
|
|||
|
382413dc08
|
|||
|
0621facc7d
|
|||
|
50cbfed5fa
|
|||
|
36d9a3350b
|
|||
|
515a9d9dbf
|
|||
|
3c27b4f9b8
|
|||
|
851c8c05d4
|
|||
|
8002a75e26
|
|||
|
06e6b55ecc
|
|||
|
48e47bd0bd
|
|||
|
9c074a0333
|
|||
|
f2314f862c
|
|||
|
6e57536650
|
|||
|
5fd7551874
|
|||
|
62d592c4d0
|
|||
|
8af2a9abbb
|
|||
|
64ca8bd4d2
|
|||
|
f1d025bd0e
|
|||
|
087ff563a2
|
|||
|
882dacf2bb
|
|||
|
a2efdb136a
|
|||
|
001613b4fa
|
|||
|
74564d0ef2
|
|||
|
81142ad194
|
|||
|
fee1d2e2d6
|
|||
|
7c93fdb71d
|
|||
| 9e435eeebc | |||
| 5dfcc1f2ce | |||
| 2def60b457 | |||
| f708ad4ee1 | |||
| f7568d81aa | |||
| 251f9bacef | |||
| 07892dbfee | |||
| 54e6849968 | |||
| ea27c380cb | |||
|
|
a338be85e1 | ||
|
|
e31cb3418b | ||
|
|
798725dca6 | ||
|
|
6f393497f0 | ||
|
|
14b5aabf2b | ||
| fb36907447 | |||
| 62fde2617b | |||
| 9f5ea23eb7 | |||
| 19fad61706 | |||
| c900cf38c9 | |||
| 014ebc25c6 | |||
|
|
d5e9308fb5 | ||
|
|
7d5e891261 | ||
|
|
c382ed790f | ||
| cb72e57da9 | |||
|
|
aaf5ad23e2 | ||
|
|
ce1b1dad7d | ||
|
|
67ebc7e556 | ||
|
|
b31fb748b8 |
8
.github/workflows/docker-test.yml
vendored
8
.github/workflows/docker-test.yml
vendored
@@ -15,13 +15,13 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
run: docker build . --file Dockerfile --build-arg PYTHON_VERSION=${{ matrix.python-version }} --tag lxmfy-test:${{ matrix.python-version }}
|
run: docker build . --file docker/Dockerfile --build-arg PYTHON_VERSION=${{ matrix.python-version }} --tag lxmfy-test:${{ matrix.python-version }}
|
||||||
|
|||||||
19
.github/workflows/docker.yml
vendored
19
.github/workflows/docker.yml
vendored
@@ -20,18 +20,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392
|
||||||
with:
|
with:
|
||||||
platforms: amd64,arm64
|
platforms: amd64,arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
|
||||||
|
|
||||||
- name: Log in to the Container registry
|
- name: Log in to the Container registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
tags: |
|
tags: |
|
||||||
@@ -51,9 +51,10 @@ jobs:
|
|||||||
type=sha,format=short
|
type=sha,format=short
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
|
file: ./docker/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
@@ -63,7 +64,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (rootless)
|
- name: Extract metadata (tags, labels) for Docker (rootless)
|
||||||
id: meta_rootless
|
id: meta_rootless
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-rootless
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-rootless
|
||||||
tags: |
|
tags: |
|
||||||
@@ -74,10 +75,10 @@ jobs:
|
|||||||
type=sha,format=short,suffix=-rootless
|
type=sha,format=short,suffix=-rootless
|
||||||
|
|
||||||
- name: Build and push rootless Docker image
|
- name: Build and push rootless Docker image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile.rootless
|
file: ./docker/Dockerfile.rootless
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.meta_rootless.outputs.tags }}
|
tags: ${{ steps.meta_rootless.outputs.tags }}
|
||||||
|
|||||||
50
.github/workflows/publish.yml
vendored
50
.github/workflows/publish.yml
vendored
@@ -1,5 +1,14 @@
|
|||||||
name: Publish Python 🐍 distribution 📦 to PyPI
|
name: Publish Python 🐍 distribution 📦 to PyPI
|
||||||
|
|
||||||
|
# This workflow creates immutable releases:
|
||||||
|
# 1. Build packages
|
||||||
|
# 2. Publish to PyPI (only on tag push)
|
||||||
|
# 3. After successful PyPI publish:
|
||||||
|
# - Sign artifacts
|
||||||
|
# - Check if GitHub release exists (idempotent)
|
||||||
|
# - Create release with all artifacts atomically
|
||||||
|
# This ensures releases cannot be modified once published.
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
@@ -23,11 +32,11 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.13"
|
python-version: "3.13"
|
||||||
- name: Install pypa/build
|
- name: Install pypa/build
|
||||||
@@ -35,7 +44,7 @@ jobs:
|
|||||||
- name: Build a binary wheel and a source tarball
|
- name: Build a binary wheel and a source tarball
|
||||||
run: python3 -m build
|
run: python3 -m build
|
||||||
- name: Store the distribution packages
|
- name: Store the distribution packages
|
||||||
uses: actions/upload-artifact@v4.5.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: python-package-distributions
|
name: python-package-distributions
|
||||||
path: dist/
|
path: dist/
|
||||||
@@ -55,12 +64,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download all the dists
|
- name: Download all the dists
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: python-package-distributions
|
name: python-package-distributions
|
||||||
path: dist/
|
path: dist/
|
||||||
- name: Publish distribution 📦 to PyPI
|
- name: Publish distribution 📦 to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@v1.12.3
|
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e
|
||||||
|
|
||||||
github-release:
|
github-release:
|
||||||
name: Sign the Python 🐍 distribution 📦 and create GitHub Release
|
name: Sign the Python 🐍 distribution 📦 and create GitHub Release
|
||||||
@@ -73,28 +82,37 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download all the dists
|
- name: Download all the dists
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: python-package-distributions
|
name: python-package-distributions
|
||||||
path: dist/
|
path: dist/
|
||||||
- name: Sign the dists with Sigstore
|
- name: Sign the dists with Sigstore
|
||||||
uses: sigstore/gh-action-sigstore-python@v3.0.0
|
uses: sigstore/gh-action-sigstore-python@f7ad0af51a5648d09a20d00370f0a91c3bdf8f84 # v3.0.1
|
||||||
with:
|
with:
|
||||||
inputs: >-
|
inputs: >-
|
||||||
./dist/*.tar.gz
|
./dist/*.tar.gz
|
||||||
./dist/*.whl
|
./dist/*.whl
|
||||||
- name: Create GitHub Release
|
- name: Check if release exists
|
||||||
|
id: check_release
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
if gh release view "$GITHUB_REF_NAME" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then
|
||||||
|
echo "exists=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "Release $GITHUB_REF_NAME already exists, skipping creation"
|
||||||
|
else
|
||||||
|
echo "exists=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "Release $GITHUB_REF_NAME does not exist, will create"
|
||||||
|
fi
|
||||||
|
continue-on-error: true
|
||||||
|
- name: Create GitHub Release with artifacts
|
||||||
|
if: steps.check_release.outputs.exists != 'true'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
run: >-
|
run: >-
|
||||||
gh release create
|
gh release create
|
||||||
"$GITHUB_REF_NAME"
|
"$GITHUB_REF_NAME"
|
||||||
--repo "$GITHUB_REPOSITORY"
|
--repo "$GITHUB_REPOSITORY"
|
||||||
--notes ""
|
--title "Release $GITHUB_REF_NAME"
|
||||||
- name: Upload artifact signatures to GitHub Release
|
--notes "PyPI: https://pypi.org/project/rns-page-node/$GITHUB_REF_NAME/"
|
||||||
env:
|
dist/*
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
|
||||||
run: >-
|
|
||||||
gh release upload
|
|
||||||
"$GITHUB_REF_NAME" dist/**
|
|
||||||
--repo "$GITHUB_REPOSITORY"
|
|
||||||
17
.github/workflows/safety.yml
vendored
Normal file
17
.github/workflows/safety.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name: Safety
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 0' # weekly
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@main
|
||||||
|
- name: Run Safety CLI to check for vulnerabilities
|
||||||
|
uses: pyupio/safety-action@7baf6605473beffc874c1313ddf2db085c0cacf2 # v1
|
||||||
|
with:
|
||||||
|
api-key: ${{ secrets.SAFETY_API_KEY }}
|
||||||
49
.github/workflows/tests.yml
vendored
Normal file
49
.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: Run Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: ["ubuntu-latest", "windows-latest"]
|
||||||
|
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
cd tests
|
||||||
|
chmod +x run_tests.sh
|
||||||
|
timeout 120 ./run_tests.sh
|
||||||
|
|
||||||
|
- name: Upload test logs on failure
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: test-logs-${{ matrix.os }}-${{ matrix.python-version }}
|
||||||
|
path: tests/node.log
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -3,3 +3,12 @@ node-config/
|
|||||||
files/
|
files/
|
||||||
.ruff_cache/
|
.ruff_cache/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
dist/
|
||||||
|
*.egg-info/
|
||||||
|
.ruff_cache/
|
||||||
|
.venv/
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
15
Makefile
15
Makefile
@@ -2,19 +2,20 @@
|
|||||||
|
|
||||||
# Detect if docker buildx is available
|
# Detect if docker buildx is available
|
||||||
DOCKER_BUILD := $(shell docker buildx version >/dev/null 2>&1 && echo "docker buildx build" || echo "docker build")
|
DOCKER_BUILD := $(shell docker buildx version >/dev/null 2>&1 && echo "docker buildx build" || echo "docker build")
|
||||||
|
DOCKER_BUILD_LOAD := $(shell docker buildx version >/dev/null 2>&1 && echo "docker buildx build --load" || echo "docker build")
|
||||||
|
|
||||||
.PHONY: all build sdist wheel clean install lint format docker-wheels docker-build docker-run docker-build-rootless docker-run-rootless help test docker-test
|
.PHONY: all build sdist wheel clean install lint format docker-wheels docker-build docker-run docker-build-rootless docker-run-rootless help test docker-test
|
||||||
|
|
||||||
all: build
|
all: build
|
||||||
|
|
||||||
build: clean
|
build: clean
|
||||||
python3 setup.py sdist bdist_wheel
|
python3 -m build
|
||||||
|
|
||||||
sdist:
|
sdist:
|
||||||
python3 setup.py sdist
|
python3 -m build --sdist
|
||||||
|
|
||||||
wheel:
|
wheel:
|
||||||
python3 setup.py bdist_wheel
|
python3 -m build --wheel
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf build dist *.egg-info
|
rm -rf build dist *.egg-info
|
||||||
@@ -29,13 +30,13 @@ format:
|
|||||||
ruff check --fix .
|
ruff check --fix .
|
||||||
|
|
||||||
docker-wheels:
|
docker-wheels:
|
||||||
$(DOCKER_BUILD) --target builder -f Dockerfile.build -t rns-page-node-builder .
|
$(DOCKER_BUILD) --target builder -f docker/Dockerfile.build -t rns-page-node-builder .
|
||||||
docker create --name builder-container rns-page-node-builder true
|
docker create --name builder-container rns-page-node-builder true
|
||||||
docker cp builder-container:/src/dist ./dist
|
docker cp builder-container:/src/dist ./dist
|
||||||
docker rm builder-container
|
docker rm builder-container
|
||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
$(DOCKER_BUILD) $(BUILD_ARGS) -f Dockerfile -t rns-page-node:latest .
|
$(DOCKER_BUILD_LOAD) $(BUILD_ARGS) -f docker/Dockerfile -t rns-page-node:latest .
|
||||||
|
|
||||||
docker-run:
|
docker-run:
|
||||||
docker run --rm -it \
|
docker run --rm -it \
|
||||||
@@ -50,7 +51,7 @@ docker-run:
|
|||||||
--announce-interval 360
|
--announce-interval 360
|
||||||
|
|
||||||
docker-build-rootless:
|
docker-build-rootless:
|
||||||
$(DOCKER_BUILD) $(BUILD_ARGS) -f Dockerfile.rootless -t rns-page-node-rootless:latest .
|
$(DOCKER_BUILD_LOAD) $(BUILD_ARGS) -f docker/Dockerfile.rootless -t rns-page-node-rootless:latest .
|
||||||
|
|
||||||
docker-run-rootless:
|
docker-run-rootless:
|
||||||
docker run --rm -it \
|
docker run --rm -it \
|
||||||
@@ -68,7 +69,7 @@ test:
|
|||||||
bash tests/run_tests.sh
|
bash tests/run_tests.sh
|
||||||
|
|
||||||
docker-test:
|
docker-test:
|
||||||
$(DOCKER_BUILD) -f tests/Dockerfile.tests -t rns-page-node-tests .
|
$(DOCKER_BUILD_LOAD) -f docker/Dockerfile.tests -t rns-page-node-tests .
|
||||||
docker run --rm rns-page-node-tests
|
docker run --rm rns-page-node-tests
|
||||||
|
|
||||||
help:
|
help:
|
||||||
|
|||||||
85
README.md
85
README.md
@@ -1,31 +1,82 @@
|
|||||||
# RNS Page Node
|
# RNS Page Node
|
||||||
|
|
||||||
|
[Русская](README.ru.md)
|
||||||
|
|
||||||
[](https://github.com/Sudo-Ivan/rns-page-node/actions/workflows/docker.yml)
|
[](https://github.com/Sudo-Ivan/rns-page-node/actions/workflows/docker.yml)
|
||||||
[](https://github.com/Sudo-Ivan/rns-page-node/actions/workflows/docker-test.yml)
|
[](https://github.com/Sudo-Ivan/rns-page-node/actions/workflows/docker-test.yml)
|
||||||
[](https://app.deepsource.com/gh/Sudo-Ivan/rns-page-node/)
|
[](https://app.deepsource.com/gh/Sudo-Ivan/rns-page-node/)
|
||||||
|
|
||||||
A simple way to serve pages and files over the [Reticulum network](https://reticulum.network/). Drop-in replacement for [NomadNet](https://github.com/markqvist/NomadNet) nodes that primarily serve pages and files.
|
A simple way to serve pages and files over the [Reticulum network](https://reticulum.network/). Drop-in replacement for [NomadNet](https://github.com/markqvist/NomadNet) nodes that primarily serve pages and files.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Serves pages and files over RNS
|
||||||
|
- Dynamic page support with environment variables
|
||||||
|
- Form data and request parameter parsing
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Pip
|
||||||
|
# May require --break-system-packages
|
||||||
|
|
||||||
pip install rns-page-node
|
pip install rns-page-node
|
||||||
|
|
||||||
|
# Pipx
|
||||||
|
|
||||||
|
pipx install rns-page-node
|
||||||
|
|
||||||
|
# uv
|
||||||
|
|
||||||
|
uv venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
uv pip install rns-page-node
|
||||||
|
|
||||||
|
# Pipx via Git
|
||||||
|
|
||||||
|
pipx install git+https://github.com/Sudo-Ivan/rns-page-node.git
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# will use current directory for pages and files
|
||||||
rns-page-node
|
rns-page-node
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
or with command-line options:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
rns-page-node --node-name "Page Node" --pages-dir ./pages --files-dir ./files --identity-dir ./node-config --announce-interval 360
|
rns-page-node --node-name "Page Node" --pages-dir ./pages --files-dir ./files --identity-dir ./node-config --announce-interval 360
|
||||||
```
|
```
|
||||||
|
|
||||||
|
or with a config file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
rns-page-node /path/to/config.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration File
|
||||||
|
|
||||||
|
You can use a configuration file to persist settings. See `config.example` for an example.
|
||||||
|
|
||||||
|
Config file format is simple `key=value` pairs:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Comment lines start with #
|
||||||
|
node-name=My Page Node
|
||||||
|
pages-dir=./pages
|
||||||
|
files-dir=./files
|
||||||
|
identity-dir=./node-config
|
||||||
|
announce-interval=360
|
||||||
|
```
|
||||||
|
|
||||||
|
Priority order: Command-line arguments > Config file > Defaults
|
||||||
|
|
||||||
### Docker/Podman
|
### Docker/Podman
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config:/app/node-config -v ./config:/app/config ghcr.io/sudo-ivan/rns-page-node:latest
|
docker run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config:/app/node-config -v ./config:/root/.reticulum ghcr.io/sudo-ivan/rns-page-node:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
### Docker/Podman Rootless
|
### Docker/Podman Rootless
|
||||||
@@ -58,20 +109,30 @@ make docker-wheels
|
|||||||
|
|
||||||
## Pages
|
## Pages
|
||||||
|
|
||||||
Supports Micron `.mu` and dynamic pages with `#!` in the micron files.
|
Supports dynamic executable pages with full request data parsing. Pages can receive:
|
||||||
|
- Form fields via `field_*` environment variables
|
||||||
|
- Link variables via `var_*` environment variables
|
||||||
|
- Remote identity via `remote_identity` environment variable
|
||||||
|
- Link ID via `link_id` environment variable
|
||||||
|
|
||||||
|
This enables forums, chats, and other interactive applications compatible with NomadNet clients.
|
||||||
|
|
||||||
## Options
|
## Options
|
||||||
|
|
||||||
```
|
```
|
||||||
-c, --config: The path to the Reticulum config file.
|
Positional arguments:
|
||||||
-n, --node-name: The name of the node.
|
node_config Path to rns-page-node config file
|
||||||
-p, --pages-dir: The directory to serve pages from.
|
|
||||||
-f, --files-dir: The directory to serve files from.
|
Optional arguments:
|
||||||
-i, --identity-dir: The directory to persist the node's identity.
|
-c, --config Path to the Reticulum config file
|
||||||
-a, --announce-interval: The interval to announce the node's presence.
|
-n, --node-name Name of the node
|
||||||
-r, --page-refresh-interval: The interval to refresh pages.
|
-p, --pages-dir Directory to serve pages from
|
||||||
-f, --file-refresh-interval: The interval to refresh files.
|
-f, --files-dir Directory to serve files from
|
||||||
-l, --log-level: The logging level.
|
-i, --identity-dir Directory to persist the node's identity
|
||||||
|
-a, --announce-interval Interval to announce the node's presence (in minutes, default: 360 = 6 hours)
|
||||||
|
--page-refresh-interval Interval to refresh pages (in seconds, 0 = disabled)
|
||||||
|
--file-refresh-interval Interval to refresh files (in seconds, 0 = disabled)
|
||||||
|
-l, --log-level Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|||||||
124
README.ru.md
Normal file
124
README.ru.md
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# RNS Page Node
|
||||||
|
|
||||||
|
[English](README.md)
|
||||||
|
|
||||||
|
Простой способ для раздачи страниц и файлов через сеть [Reticulum](https://reticulum.network/). Прямая замена для узлов [NomadNet](https://github.com/markqvist/NomadNet), которые в основном служат для раздачи страниц и файлов.
|
||||||
|
|
||||||
|
## Особенности
|
||||||
|
|
||||||
|
- Раздача страниц и файлов через RNS
|
||||||
|
- Поддержка динамических страниц с переменными окружения
|
||||||
|
- Разбор данных форм и параметров запросов
|
||||||
|
|
||||||
|
## Установка
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Pip
|
||||||
|
# Может потребоваться --break-system-packages
|
||||||
|
pip install rns-page-node
|
||||||
|
|
||||||
|
# Pipx
|
||||||
|
pipx install rns-page-node
|
||||||
|
|
||||||
|
# uv
|
||||||
|
uv venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
uv pip install rns-page-node
|
||||||
|
|
||||||
|
# Pipx через Git
|
||||||
|
pipx install git+https://github.com/Sudo-Ivan/rns-page-node.git
|
||||||
|
|
||||||
|
```
|
||||||
|
## Использование
|
||||||
|
```bash
|
||||||
|
# будет использовать текущий каталог для страниц и файлов
|
||||||
|
rns-page-node
|
||||||
|
```
|
||||||
|
|
||||||
|
или с параметрами командной строки:
|
||||||
|
```bash
|
||||||
|
rns-page-node --node-name "Page Node" --pages-dir ./pages --files-dir ./files --identity-dir ./node-config --announce-interval 360
|
||||||
|
```
|
||||||
|
|
||||||
|
или с файлом конфигурации:
|
||||||
|
```bash
|
||||||
|
rns-page-node /путь/к/config.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
### Файл Конфигурации
|
||||||
|
|
||||||
|
Вы можете использовать файл конфигурации для сохранения настроек. См. `config.example` для примера.
|
||||||
|
|
||||||
|
Формат файла конфигурации - простые пары `ключ=значение`:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Строки комментариев начинаются с #
|
||||||
|
node-name=Мой Page Node
|
||||||
|
pages-dir=./pages
|
||||||
|
files-dir=./files
|
||||||
|
identity-dir=./node-config
|
||||||
|
announce-interval=360
|
||||||
|
```
|
||||||
|
|
||||||
|
Порядок приоритета: Аргументы командной строки > Файл конфигурации > Значения по умолчанию
|
||||||
|
|
||||||
|
### Docker/Podman
|
||||||
|
```bash
|
||||||
|
docker run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config:/app/node-config -v ./config:/root/.reticulum ghcr.io/sudo-ivan/rns-page-node:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker/Podman без root-доступа
|
||||||
|
```bash
|
||||||
|
mkdir -p ./pages ./files ./node-config ./config
|
||||||
|
chown -R 1000:1000 ./pages ./files ./node-config ./config
|
||||||
|
podman run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config:/app/node-config -v ./config:/app/config ghcr.io/sudo-ivan/rns-page-node:latest-rootless
|
||||||
|
```
|
||||||
|
|
||||||
|
Монтирование томов необязательно, вы также можете скопировать страницы и файлы в контейнер с помощью `podman cp` или `docker cp`.
|
||||||
|
|
||||||
|
## Сборка
|
||||||
|
```bash
|
||||||
|
make build
|
||||||
|
```
|
||||||
|
|
||||||
|
Сборка wheels:
|
||||||
|
```bash
|
||||||
|
make wheel
|
||||||
|
```
|
||||||
|
|
||||||
|
### Сборка Wheels в Docker
|
||||||
|
```bash
|
||||||
|
make docker-wheels
|
||||||
|
```
|
||||||
|
|
||||||
|
## Страницы
|
||||||
|
|
||||||
|
Поддержка динамических исполняемых страниц с полным разбором данных запросов. Страницы могут получать:
|
||||||
|
- Поля форм через переменные окружения `field_*`
|
||||||
|
- Переменные ссылок через переменные окружения `var_*`
|
||||||
|
- Удаленную идентификацию через переменную окружения `remote_identity`
|
||||||
|
- ID соединения через переменную окружения `link_id`
|
||||||
|
|
||||||
|
Это позволяет создавать форумы, чаты и другие интерактивные приложения, совместимые с клиентами NomadNet.
|
||||||
|
|
||||||
|
## Параметры
|
||||||
|
|
||||||
|
```
|
||||||
|
Позиционные аргументы:
|
||||||
|
node_config Путь к файлу конфигурации rns-page-node
|
||||||
|
|
||||||
|
Необязательные аргументы:
|
||||||
|
-c, --config Путь к файлу конфигурации Reticulum
|
||||||
|
-n, --node-name Имя узла
|
||||||
|
-p, --pages-dir Каталог для раздачи страниц
|
||||||
|
-f, --files-dir Каталог для раздачи файлов
|
||||||
|
-i, --identity-dir Каталог для сохранения идентификационных данных узла
|
||||||
|
-a, --announce-interval Интервал анонсирования присутствия узла (в минутах, по умолчанию: 360 = 6 часов)
|
||||||
|
--page-refresh-interval Интервал обновления страниц (в секундах, 0 = отключено)
|
||||||
|
--file-refresh-interval Интервал обновления файлов (в секундах, 0 = отключено)
|
||||||
|
-l, --log-level Уровень логирования (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Лицензия
|
||||||
|
|
||||||
|
Этот проект включает части кодовой базы [NomadNet](https://github.com/markqvist/NomadNet), которая лицензирована под GNU General Public License v3.0 (GPL-3.0). Как производная работа, этот проект также распространяется на условиях GPL-3.0. Полный текст лицензии смотрите в файле [LICENSE](LICENSE).
|
||||||
31
config.example
Normal file
31
config.example
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# rns-page-node configuration file
|
||||||
|
# Lines starting with # are comments
|
||||||
|
# Format: key=value
|
||||||
|
|
||||||
|
# Reticulum config directory path
|
||||||
|
# reticulum-config=/path/to/reticulum/config
|
||||||
|
|
||||||
|
# Node display name
|
||||||
|
node-name=My Page Node
|
||||||
|
|
||||||
|
# Pages directory
|
||||||
|
pages-dir=./pages
|
||||||
|
|
||||||
|
# Files directory
|
||||||
|
files-dir=./files
|
||||||
|
|
||||||
|
# Node identity directory
|
||||||
|
identity-dir=./node-config
|
||||||
|
|
||||||
|
# Announce interval in minutes (default: 360 = 6 hours)
|
||||||
|
announce-interval=360
|
||||||
|
|
||||||
|
# Page refresh interval in seconds (0 = disabled)
|
||||||
|
page-refresh-interval=300
|
||||||
|
|
||||||
|
# File refresh interval in seconds (0 = disabled)
|
||||||
|
file-refresh-interval=300
|
||||||
|
|
||||||
|
# Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||||
|
log-level=INFO
|
||||||
|
|
||||||
1543
poetry.lock
generated
1543
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,25 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "rns-page-node"
|
name = "rns-page-node"
|
||||||
version = "1.0.0"
|
version = "1.3.0"
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
description = "A simple way to serve pages and files over the Reticulum network."
|
description = "A simple way to serve pages and files over the Reticulum network."
|
||||||
authors = [
|
authors = [
|
||||||
{name = "Sudo-Ivan"}
|
{name = "Sudo-Ivan"}
|
||||||
]
|
]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">3.9.0,<3.9.1 || >3.9.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rns (>=1.0.0,<1.5.0)"
|
"rns (>=1.0.4,<1.5.0)",
|
||||||
|
"cryptography>=46.0.3"
|
||||||
]
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://github.com/Sudo-Ivan/rns-page-node"
|
||||||
|
Repository = "https://github.com/Sudo-Ivan/rns-page-node"
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
rns-page-node = "rns_page_node.main:main"
|
rns-page-node = "rns_page_node.main:main"
|
||||||
@@ -20,6 +29,4 @@ requires = ["poetry-core>=2.0.0,<3.0.0"]
|
|||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
ruff = "^0.12.3"
|
ruff = "^0.13.3"
|
||||||
safety = "^3.6.0"
|
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
rns=1.0.0
|
rns=1.0.4
|
||||||
@@ -1,2 +1,6 @@
|
|||||||
# rns_page_node package
|
"""RNS Page Node package.
|
||||||
|
|
||||||
|
A minimal Reticulum page node that serves .mu pages and files over RNS.
|
||||||
|
"""
|
||||||
|
|
||||||
__all__ = ["main"]
|
__all__ = ["main"]
|
||||||
|
|||||||
@@ -1,22 +1,21 @@
|
|||||||
#!/usr/bin/env python3
|
"""Minimal Reticulum Page Node.
|
||||||
"""Minimal Reticulum Page Node
|
|
||||||
Serves .mu pages and files over RNS.
|
Serves .mu pages and files over RNS.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import RNS
|
import RNS
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DEFAULT_INDEX = """>Default Home Page
|
DEFAULT_INDEX = """>Default Home Page
|
||||||
|
|
||||||
This node is serving pages using page node, but the home page file (index.mu) was not found in the pages directory. Please add an index.mu file to customize the home page.
|
This node is serving pages using rns-page-node, but index.mu was not found.
|
||||||
|
Please add an index.mu file to customize the home page.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
DEFAULT_NOTALLOWED = """>Request Not Allowed
|
DEFAULT_NOTALLOWED = """>Request Not Allowed
|
||||||
@@ -25,7 +24,49 @@ You are not authorised to carry out the request.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(config_file):
|
||||||
|
"""Load configuration from a plain text config file.
|
||||||
|
|
||||||
|
Config format is simple key=value pairs, one per line.
|
||||||
|
Lines starting with # are comments and are ignored.
|
||||||
|
Empty lines are ignored.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_file: Path to the config file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of configuration values
|
||||||
|
|
||||||
|
"""
|
||||||
|
config = {}
|
||||||
|
try:
|
||||||
|
with open(config_file, encoding="utf-8") as f:
|
||||||
|
for line_num, line in enumerate(f, 1):
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line.startswith("#"):
|
||||||
|
continue
|
||||||
|
if "=" not in line:
|
||||||
|
RNS.log(
|
||||||
|
f"Invalid config line {line_num} in {config_file}: {line}",
|
||||||
|
RNS.LOG_WARNING,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
key, value = line.split("=", 1)
|
||||||
|
key = key.strip()
|
||||||
|
value = value.strip()
|
||||||
|
if key and value:
|
||||||
|
config[key] = value
|
||||||
|
RNS.log(f"Loaded configuration from {config_file}", RNS.LOG_INFO)
|
||||||
|
except FileNotFoundError:
|
||||||
|
RNS.log(f"Config file not found: {config_file}", RNS.LOG_ERROR)
|
||||||
|
except Exception as e:
|
||||||
|
RNS.log(f"Error reading config file {config_file}: {e}", RNS.LOG_ERROR)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
class PageNode:
|
class PageNode:
|
||||||
|
"""A Reticulum page node that serves .mu pages and files over RNS."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
identity,
|
identity,
|
||||||
@@ -36,15 +77,30 @@ class PageNode:
|
|||||||
page_refresh_interval=0,
|
page_refresh_interval=0,
|
||||||
file_refresh_interval=0,
|
file_refresh_interval=0,
|
||||||
):
|
):
|
||||||
|
"""Initialize the PageNode.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
identity: RNS Identity for the node
|
||||||
|
pagespath: Path to directory containing .mu pages
|
||||||
|
filespath: Path to directory containing files to serve
|
||||||
|
announce_interval: Minutes between announcements (default: 360) == 6 hours
|
||||||
|
name: Display name for the node (optional)
|
||||||
|
page_refresh_interval: Seconds between page rescans (0 = disabled)
|
||||||
|
file_refresh_interval: Seconds between file rescans (0 = disabled)
|
||||||
|
|
||||||
|
"""
|
||||||
self._stop_event = threading.Event()
|
self._stop_event = threading.Event()
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
self.logger = logging.getLogger(f"{__name__}.PageNode")
|
|
||||||
self.identity = identity
|
self.identity = identity
|
||||||
self.name = name
|
self.name = name
|
||||||
self.pagespath = pagespath
|
self.pagespath = pagespath
|
||||||
self.filespath = filespath
|
self.filespath = filespath
|
||||||
self.destination = RNS.Destination(
|
self.destination = RNS.Destination(
|
||||||
identity, RNS.Destination.IN, RNS.Destination.SINGLE, "nomadnetwork", "node"
|
identity,
|
||||||
|
RNS.Destination.IN,
|
||||||
|
RNS.Destination.SINGLE,
|
||||||
|
"nomadnetwork",
|
||||||
|
"node",
|
||||||
)
|
)
|
||||||
self.announce_interval = announce_interval
|
self.announce_interval = announce_interval
|
||||||
self.last_announce = 0
|
self.last_announce = 0
|
||||||
@@ -59,27 +115,36 @@ class PageNode:
|
|||||||
self.destination.set_link_established_callback(self.on_connect)
|
self.destination.set_link_established_callback(self.on_connect)
|
||||||
|
|
||||||
self._announce_thread = threading.Thread(
|
self._announce_thread = threading.Thread(
|
||||||
target=self._announce_loop, daemon=True
|
target=self._announce_loop,
|
||||||
|
daemon=True,
|
||||||
)
|
)
|
||||||
self._announce_thread.start()
|
self._announce_thread.start()
|
||||||
self._refresh_thread = threading.Thread(target=self._refresh_loop, daemon=True)
|
self._refresh_thread = threading.Thread(target=self._refresh_loop, daemon=True)
|
||||||
self._refresh_thread.start()
|
self._refresh_thread.start()
|
||||||
|
|
||||||
def register_pages(self):
|
def register_pages(self):
|
||||||
with self._lock:
|
"""Scan pages directory and register request handlers for all .mu files."""
|
||||||
self.servedpages = []
|
pages = self._scan_pages(self.pagespath)
|
||||||
self._scan_pages(self.pagespath)
|
|
||||||
|
|
||||||
if not os.path.isfile(os.path.join(self.pagespath, "index.mu")):
|
with self._lock:
|
||||||
|
self.servedpages = pages
|
||||||
|
|
||||||
|
pagespath = Path(self.pagespath).resolve()
|
||||||
|
|
||||||
|
if not (pagespath / "index.mu").is_file():
|
||||||
self.destination.register_request_handler(
|
self.destination.register_request_handler(
|
||||||
"/page/index.mu",
|
"/page/index.mu",
|
||||||
response_generator=self.serve_default_index,
|
response_generator=self.serve_default_index,
|
||||||
allow=RNS.Destination.ALLOW_ALL,
|
allow=RNS.Destination.ALLOW_ALL,
|
||||||
)
|
)
|
||||||
|
|
||||||
for full_path in self.servedpages:
|
for full_path in pages:
|
||||||
rel = full_path[len(self.pagespath) :]
|
page_path = Path(full_path).resolve()
|
||||||
request_path = f"/page{rel}"
|
try:
|
||||||
|
rel = page_path.relative_to(pagespath).as_posix()
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
request_path = f"/page/{rel}"
|
||||||
self.destination.register_request_handler(
|
self.destination.register_request_handler(
|
||||||
request_path,
|
request_path,
|
||||||
response_generator=self.serve_page,
|
response_generator=self.serve_page,
|
||||||
@@ -87,13 +152,21 @@ class PageNode:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def register_files(self):
|
def register_files(self):
|
||||||
with self._lock:
|
"""Scan files directory and register request handlers for all files."""
|
||||||
self.servedfiles = []
|
files = self._scan_files(self.filespath)
|
||||||
self._scan_files(self.filespath)
|
|
||||||
|
|
||||||
for full_path in self.servedfiles:
|
with self._lock:
|
||||||
rel = full_path[len(self.filespath) :]
|
self.servedfiles = files
|
||||||
request_path = f"/file{rel}"
|
|
||||||
|
filespath = Path(self.filespath).resolve()
|
||||||
|
|
||||||
|
for full_path in files:
|
||||||
|
file_path = Path(full_path).resolve()
|
||||||
|
try:
|
||||||
|
rel = file_path.relative_to(filespath).as_posix()
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
request_path = f"/file/{rel}"
|
||||||
self.destination.register_request_handler(
|
self.destination.register_request_handler(
|
||||||
request_path,
|
request_path,
|
||||||
response_generator=self.serve_file,
|
response_generator=self.serve_file,
|
||||||
@@ -102,140 +175,278 @@ class PageNode:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _scan_pages(self, base):
|
def _scan_pages(self, base):
|
||||||
for entry in os.listdir(base):
|
"""Return a list of page paths under the given directory, excluding .allowed files."""
|
||||||
if entry.startswith("."):
|
base_path = Path(base)
|
||||||
|
if not base_path.exists():
|
||||||
|
return []
|
||||||
|
served = []
|
||||||
|
for entry in base_path.iterdir():
|
||||||
|
if entry.name.startswith("."):
|
||||||
continue
|
continue
|
||||||
path = os.path.join(base, entry)
|
if entry.is_dir():
|
||||||
if os.path.isdir(path):
|
served.extend(self._scan_pages(entry))
|
||||||
self._scan_pages(path)
|
elif entry.is_file() and not entry.name.endswith(".allowed"):
|
||||||
elif os.path.isfile(path) and not entry.endswith(".allowed"):
|
served.append(str(entry))
|
||||||
self.servedpages.append(path)
|
return served
|
||||||
|
|
||||||
def _scan_files(self, base):
|
def _scan_files(self, base):
|
||||||
for entry in os.listdir(base):
|
"""Return all file paths under the given directory."""
|
||||||
if entry.startswith("."):
|
base_path = Path(base)
|
||||||
|
if not base_path.exists():
|
||||||
|
return []
|
||||||
|
served = []
|
||||||
|
for entry in base_path.iterdir():
|
||||||
|
if entry.name.startswith("."):
|
||||||
continue
|
continue
|
||||||
path = os.path.join(base, entry)
|
if entry.is_dir():
|
||||||
if os.path.isdir(path):
|
served.extend(self._scan_files(entry))
|
||||||
self._scan_files(path)
|
elif entry.is_file():
|
||||||
elif os.path.isfile(path):
|
served.append(str(entry))
|
||||||
self.servedfiles.append(path)
|
return served
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serve_default_index(
|
def serve_default_index(
|
||||||
path, data, request_id, link_id, remote_identity, requested_at
|
_path,
|
||||||
|
_data,
|
||||||
|
_request_id,
|
||||||
|
_link_id,
|
||||||
|
_remote_identity,
|
||||||
|
_requested_at,
|
||||||
):
|
):
|
||||||
|
"""Serve the default index page when no index.mu file exists."""
|
||||||
return DEFAULT_INDEX.encode("utf-8")
|
return DEFAULT_INDEX.encode("utf-8")
|
||||||
|
|
||||||
def serve_page(
|
def serve_page(
|
||||||
self, path, data, request_id, link_id, remote_identity, requested_at
|
self,
|
||||||
|
path,
|
||||||
|
data,
|
||||||
|
_request_id,
|
||||||
|
_link_id,
|
||||||
|
remote_identity,
|
||||||
|
_requested_at,
|
||||||
):
|
):
|
||||||
file_path = path.replace("/page", self.pagespath, 1)
|
"""Serve a .mu page file, executing it as a script if it has a shebang."""
|
||||||
|
pagespath = Path(self.pagespath).resolve()
|
||||||
|
relative_path = path[6:] if path.startswith("/page/") else path[5:]
|
||||||
|
file_path = (pagespath / relative_path).resolve()
|
||||||
|
|
||||||
|
if not str(file_path).startswith(str(pagespath)):
|
||||||
|
return DEFAULT_NOTALLOWED.encode("utf-8")
|
||||||
|
is_script = False
|
||||||
|
file_content = None
|
||||||
try:
|
try:
|
||||||
with open(file_path, "rb") as _f:
|
with file_path.open("rb") as file_handle:
|
||||||
first_line = _f.readline()
|
first_line = file_handle.readline()
|
||||||
is_script = first_line.startswith(b"#!")
|
is_script = first_line.startswith(b"#!")
|
||||||
except Exception:
|
file_handle.seek(0)
|
||||||
is_script = False
|
if not is_script:
|
||||||
if is_script and os.access(file_path, os.X_OK):
|
return file_handle.read()
|
||||||
# Note: The execution of file_path is intentional here, as some pages are designed to be executable scripts.
|
file_content = file_handle.read()
|
||||||
# This is acknowledged as a potential security risk if untrusted input can control file_path.
|
except FileNotFoundError:
|
||||||
|
return DEFAULT_NOTALLOWED.encode("utf-8")
|
||||||
|
except OSError as err:
|
||||||
|
RNS.log(f"Error reading page {file_path}: {err}", RNS.LOG_ERROR)
|
||||||
|
return DEFAULT_NOTALLOWED.encode("utf-8")
|
||||||
|
|
||||||
|
if is_script and os.access(str(file_path), os.X_OK):
|
||||||
try:
|
try:
|
||||||
result = subprocess.run([file_path], stdout=subprocess.PIPE, check=True) # noqa: S603
|
env_map = os.environ.copy()
|
||||||
|
if _link_id is not None:
|
||||||
|
env_map["link_id"] = RNS.hexrep(_link_id, delimit=False)
|
||||||
|
if remote_identity is not None:
|
||||||
|
env_map["remote_identity"] = RNS.hexrep(
|
||||||
|
remote_identity.hash,
|
||||||
|
delimit=False,
|
||||||
|
)
|
||||||
|
if data is not None and isinstance(data, dict):
|
||||||
|
for e in data:
|
||||||
|
if isinstance(e, str) and (
|
||||||
|
e.startswith("field_") or e.startswith("var_")
|
||||||
|
):
|
||||||
|
env_map[e] = data[e]
|
||||||
|
result = subprocess.run( # noqa: S603
|
||||||
|
[str(file_path)],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
check=True,
|
||||||
|
env=env_map,
|
||||||
|
)
|
||||||
return result.stdout
|
return result.stdout
|
||||||
except Exception:
|
except Exception as e:
|
||||||
self.logger.exception("Error executing script page")
|
RNS.log(f"Error executing script page: {e}", RNS.LOG_ERROR)
|
||||||
with open(file_path, "rb") as f:
|
if file_content is not None:
|
||||||
return f.read()
|
return file_content
|
||||||
|
try:
|
||||||
|
return self._read_file_bytes(file_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return DEFAULT_NOTALLOWED.encode("utf-8")
|
||||||
|
except OSError as err:
|
||||||
|
RNS.log(f"Error reading page fallback {file_path}: {err}", RNS.LOG_ERROR)
|
||||||
|
return DEFAULT_NOTALLOWED.encode("utf-8")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read_file_bytes(file_path):
|
||||||
|
"""Read a file's bytes and return the contents."""
|
||||||
|
with file_path.open("rb") as file_handle:
|
||||||
|
return file_handle.read()
|
||||||
|
|
||||||
def serve_file(
|
def serve_file(
|
||||||
self, path, data, request_id, link_id, remote_identity, requested_at
|
self,
|
||||||
|
path,
|
||||||
|
_data,
|
||||||
|
_request_id,
|
||||||
|
_link_id,
|
||||||
|
_remote_identity,
|
||||||
|
_requested_at,
|
||||||
):
|
):
|
||||||
file_path = path.replace("/file", self.filespath, 1)
|
"""Serve a file from the files directory."""
|
||||||
|
filespath = Path(self.filespath).resolve()
|
||||||
|
relative_path = path[6:] if path.startswith("/file/") else path[5:]
|
||||||
|
file_path = (filespath / relative_path).resolve()
|
||||||
|
|
||||||
|
if not str(file_path).startswith(str(filespath)):
|
||||||
|
return DEFAULT_NOTALLOWED.encode("utf-8")
|
||||||
|
|
||||||
return [
|
return [
|
||||||
open(file_path, "rb"),
|
file_path.open("rb"),
|
||||||
{"name": os.path.basename(file_path).encode("utf-8")},
|
{"name": file_path.name.encode("utf-8")},
|
||||||
]
|
]
|
||||||
|
|
||||||
def on_connect(self, link):
|
def on_connect(self, link):
|
||||||
pass
|
"""Handle new link connections."""
|
||||||
|
|
||||||
def _announce_loop(self):
|
def _announce_loop(self):
|
||||||
|
"""Periodically announce the node until shutdown is requested."""
|
||||||
|
interval_seconds = max(self.announce_interval, 0) * 60
|
||||||
try:
|
try:
|
||||||
while not self._stop_event.is_set():
|
while not self._stop_event.is_set():
|
||||||
if time.time() - self.last_announce > self.announce_interval:
|
now = time.time()
|
||||||
if self.name:
|
if (
|
||||||
self.destination.announce(app_data=self.name.encode("utf-8"))
|
self.last_announce == 0
|
||||||
else:
|
or now - self.last_announce >= interval_seconds
|
||||||
self.destination.announce()
|
):
|
||||||
self.last_announce = time.time()
|
try:
|
||||||
time.sleep(1)
|
if self.name:
|
||||||
except Exception:
|
self.destination.announce(
|
||||||
self.logger.exception("Error in announce loop")
|
app_data=self.name.encode("utf-8"),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.destination.announce()
|
||||||
|
self.last_announce = time.time()
|
||||||
|
except (TypeError, ValueError) as announce_error:
|
||||||
|
RNS.log(
|
||||||
|
f"Error during announce: {announce_error}",
|
||||||
|
RNS.LOG_ERROR,
|
||||||
|
)
|
||||||
|
wait_time = max(
|
||||||
|
(self.last_announce + interval_seconds) - time.time()
|
||||||
|
if self.last_announce
|
||||||
|
else 0,
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
self._stop_event.wait(min(wait_time, 60))
|
||||||
|
except Exception as e:
|
||||||
|
RNS.log(f"Error in announce loop: {e}", RNS.LOG_ERROR)
|
||||||
|
|
||||||
def _refresh_loop(self):
|
def _refresh_loop(self):
|
||||||
|
"""Refresh page and file registrations at configured intervals."""
|
||||||
try:
|
try:
|
||||||
while not self._stop_event.is_set():
|
while not self._stop_event.is_set():
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if (
|
if (
|
||||||
self.page_refresh_interval > 0
|
self.page_refresh_interval > 0
|
||||||
and now - self.last_page_refresh > self.page_refresh_interval
|
and now - self.last_page_refresh >= self.page_refresh_interval
|
||||||
):
|
):
|
||||||
self.register_pages()
|
self.register_pages()
|
||||||
self.last_page_refresh = now
|
self.last_page_refresh = time.time()
|
||||||
if (
|
if (
|
||||||
self.file_refresh_interval > 0
|
self.file_refresh_interval > 0
|
||||||
and now - self.last_file_refresh > self.file_refresh_interval
|
and now - self.last_file_refresh >= self.file_refresh_interval
|
||||||
):
|
):
|
||||||
self.register_files()
|
self.register_files()
|
||||||
self.last_file_refresh = now
|
self.last_file_refresh = time.time()
|
||||||
time.sleep(1)
|
|
||||||
except Exception:
|
wait_candidates = []
|
||||||
self.logger.exception("Error in refresh loop")
|
if self.page_refresh_interval > 0:
|
||||||
|
wait_candidates.append(
|
||||||
|
max(
|
||||||
|
(self.last_page_refresh + self.page_refresh_interval)
|
||||||
|
- time.time(),
|
||||||
|
0.5,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if self.file_refresh_interval > 0:
|
||||||
|
wait_candidates.append(
|
||||||
|
max(
|
||||||
|
(self.last_file_refresh + self.file_refresh_interval)
|
||||||
|
- time.time(),
|
||||||
|
0.5,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
wait_time = min(wait_candidates) if wait_candidates else 1.0
|
||||||
|
self._stop_event.wait(min(wait_time, 60))
|
||||||
|
except Exception as e:
|
||||||
|
RNS.log(f"Error in refresh loop: {e}", RNS.LOG_ERROR)
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.logger.info("Shutting down PageNode...")
|
"""Gracefully shutdown the PageNode and cleanup resources."""
|
||||||
|
RNS.log("Shutting down PageNode...", RNS.LOG_INFO)
|
||||||
self._stop_event.set()
|
self._stop_event.set()
|
||||||
try:
|
try:
|
||||||
self._announce_thread.join(timeout=5)
|
self._announce_thread.join(timeout=5)
|
||||||
self._refresh_thread.join(timeout=5)
|
self._refresh_thread.join(timeout=5)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
self.logger.exception("Error waiting for threads to shut down")
|
RNS.log(f"Error waiting for threads to shut down: {e}", RNS.LOG_ERROR)
|
||||||
try:
|
try:
|
||||||
if hasattr(self.destination, "close"):
|
if hasattr(self.destination, "close"):
|
||||||
self.destination.close()
|
self.destination.close()
|
||||||
except Exception:
|
except Exception as e:
|
||||||
self.logger.exception("Error closing RNS destination")
|
RNS.log(f"Error closing RNS destination: {e}", RNS.LOG_ERROR)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
"""Run the RNS page node application."""
|
||||||
parser = argparse.ArgumentParser(description="Minimal Reticulum Page Node")
|
parser = argparse.ArgumentParser(description="Minimal Reticulum Page Node")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-c", "--config", dest="configpath", help="Reticulum config path", default=None
|
"node_config",
|
||||||
|
nargs="?",
|
||||||
|
help="Path to rns-page-node config file",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--config",
|
||||||
|
dest="configpath",
|
||||||
|
help="Reticulum config path",
|
||||||
|
default=None,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p",
|
"-p",
|
||||||
"--pages-dir",
|
"--pages-dir",
|
||||||
dest="pages_dir",
|
dest="pages_dir",
|
||||||
help="Pages directory",
|
help="Pages directory",
|
||||||
default=os.path.join(os.getcwd(), "pages"),
|
default=str(Path.cwd() / "pages"),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--files-dir",
|
"--files-dir",
|
||||||
dest="files_dir",
|
dest="files_dir",
|
||||||
help="Files directory",
|
help="Files directory",
|
||||||
default=os.path.join(os.getcwd(), "files"),
|
default=str(Path.cwd() / "files"),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-n", "--node-name", dest="node_name", help="Node display name", default=None
|
"-n",
|
||||||
|
"--node-name",
|
||||||
|
dest="node_name",
|
||||||
|
help="Node display name",
|
||||||
|
default=None,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-a",
|
"-a",
|
||||||
"--announce-interval",
|
"--announce-interval",
|
||||||
dest="announce_interval",
|
dest="announce_interval",
|
||||||
type=int,
|
type=int,
|
||||||
help="Announce interval in seconds",
|
help="Announce interval in minutes",
|
||||||
default=360,
|
default=360,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -243,7 +454,7 @@ def main():
|
|||||||
"--identity-dir",
|
"--identity-dir",
|
||||||
dest="identity_dir",
|
dest="identity_dir",
|
||||||
help="Directory to store node identity",
|
help="Directory to store node identity",
|
||||||
default=os.path.join(os.getcwd(), "node-config"),
|
default=str(Path.cwd() / "node-config"),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--page-refresh-interval",
|
"--page-refresh-interval",
|
||||||
@@ -269,30 +480,79 @@ def main():
|
|||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
configpath = args.configpath
|
config = {}
|
||||||
pages_dir = args.pages_dir
|
if args.node_config:
|
||||||
files_dir = args.files_dir
|
config = load_config(args.node_config)
|
||||||
node_name = args.node_name
|
|
||||||
announce_interval = args.announce_interval
|
def get_config_value(arg_value, arg_default, config_key, value_type=str):
|
||||||
identity_dir = args.identity_dir
|
"""Get value from CLI args, config file, or default.
|
||||||
page_refresh_interval = args.page_refresh_interval
|
|
||||||
file_refresh_interval = args.file_refresh_interval
|
Priority: CLI arg > config file > default
|
||||||
numeric_level = getattr(logging, args.log_level.upper(), logging.INFO)
|
"""
|
||||||
logging.basicConfig(
|
if arg_value != arg_default:
|
||||||
level=numeric_level, format="%(asctime)s %(name)s [%(levelname)s] %(message)s"
|
return arg_value
|
||||||
|
if config_key in config:
|
||||||
|
try:
|
||||||
|
if value_type is int:
|
||||||
|
return int(config[config_key])
|
||||||
|
return config[config_key]
|
||||||
|
except ValueError:
|
||||||
|
RNS.log(
|
||||||
|
f"Invalid {value_type.__name__} value for {config_key}: {config[config_key]}",
|
||||||
|
RNS.LOG_WARNING,
|
||||||
|
)
|
||||||
|
return arg_default
|
||||||
|
|
||||||
|
configpath = get_config_value(args.configpath, None, "reticulum-config")
|
||||||
|
pages_dir = get_config_value(args.pages_dir, str(Path.cwd() / "pages"), "pages-dir")
|
||||||
|
files_dir = get_config_value(args.files_dir, str(Path.cwd() / "files"), "files-dir")
|
||||||
|
node_name = get_config_value(args.node_name, None, "node-name")
|
||||||
|
announce_interval = get_config_value(
|
||||||
|
args.announce_interval,
|
||||||
|
360,
|
||||||
|
"announce-interval",
|
||||||
|
int,
|
||||||
)
|
)
|
||||||
|
identity_dir = get_config_value(
|
||||||
|
args.identity_dir,
|
||||||
|
str(Path.cwd() / "node-config"),
|
||||||
|
"identity-dir",
|
||||||
|
)
|
||||||
|
page_refresh_interval = get_config_value(
|
||||||
|
args.page_refresh_interval,
|
||||||
|
0,
|
||||||
|
"page-refresh-interval",
|
||||||
|
int,
|
||||||
|
)
|
||||||
|
file_refresh_interval = get_config_value(
|
||||||
|
args.file_refresh_interval,
|
||||||
|
0,
|
||||||
|
"file-refresh-interval",
|
||||||
|
int,
|
||||||
|
)
|
||||||
|
log_level = get_config_value(args.log_level, "INFO", "log-level")
|
||||||
|
|
||||||
|
# Set RNS log level based on command line argument
|
||||||
|
log_level_map = {
|
||||||
|
"CRITICAL": RNS.LOG_CRITICAL,
|
||||||
|
"ERROR": RNS.LOG_ERROR,
|
||||||
|
"WARNING": RNS.LOG_WARNING,
|
||||||
|
"INFO": RNS.LOG_INFO,
|
||||||
|
"DEBUG": RNS.LOG_DEBUG,
|
||||||
|
}
|
||||||
|
RNS.loglevel = log_level_map.get(log_level.upper(), RNS.LOG_INFO)
|
||||||
|
|
||||||
RNS.Reticulum(configpath)
|
RNS.Reticulum(configpath)
|
||||||
os.makedirs(identity_dir, exist_ok=True)
|
Path(identity_dir).mkdir(parents=True, exist_ok=True)
|
||||||
identity_file = os.path.join(identity_dir, "identity")
|
identity_file = Path(identity_dir) / "identity"
|
||||||
if os.path.isfile(identity_file):
|
if identity_file.is_file():
|
||||||
identity = RNS.Identity.from_file(identity_file)
|
identity = RNS.Identity.from_file(str(identity_file))
|
||||||
else:
|
else:
|
||||||
identity = RNS.Identity()
|
identity = RNS.Identity()
|
||||||
identity.to_file(identity_file)
|
identity.to_file(str(identity_file))
|
||||||
|
|
||||||
os.makedirs(pages_dir, exist_ok=True)
|
Path(pages_dir).mkdir(parents=True, exist_ok=True)
|
||||||
os.makedirs(files_dir, exist_ok=True)
|
Path(files_dir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
node = PageNode(
|
node = PageNode(
|
||||||
identity,
|
identity,
|
||||||
@@ -303,13 +563,14 @@ def main():
|
|||||||
page_refresh_interval,
|
page_refresh_interval,
|
||||||
file_refresh_interval,
|
file_refresh_interval,
|
||||||
)
|
)
|
||||||
logger.info("Page node running. Press Ctrl-C to exit.")
|
RNS.log("Page node running. Press Ctrl-C to exit.", RNS.LOG_INFO)
|
||||||
|
RNS.log(f"Node address: {RNS.prettyhexrep(node.destination.hash)}", RNS.LOG_INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
logger.info("Keyboard interrupt received, shutting down...")
|
RNS.log("Keyboard interrupt received, shutting down...", RNS.LOG_INFO)
|
||||||
node.shutdown()
|
node.shutdown()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
31
setup.py
31
setup.py
@@ -1,31 +0,0 @@
|
|||||||
from setuptools import find_packages, setup
|
|
||||||
|
|
||||||
with open("README.md", encoding="utf-8") as fh:
|
|
||||||
long_description = fh.read()
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="rns-page-node",
|
|
||||||
version="1.0.0",
|
|
||||||
author="Sudo-Ivan",
|
|
||||||
author_email="",
|
|
||||||
description="A simple way to serve pages and files over the Reticulum network.",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/markdown",
|
|
||||||
url="https://github.com/Sudo-Ivan/rns-page-node",
|
|
||||||
packages=find_packages(),
|
|
||||||
license="GPL-3.0",
|
|
||||||
python_requires=">=3.10",
|
|
||||||
install_requires=[
|
|
||||||
"rns>=1.0.0,<1.5.0",
|
|
||||||
],
|
|
||||||
entry_points={
|
|
||||||
"console_scripts": [
|
|
||||||
"rns-page-node=rns_page_node.main:main",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
classifiers=[
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
|
||||||
"Operating System :: OS Independent",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
28
tests/run_tests.sh
Normal file → Executable file
28
tests/run_tests.sh
Normal file → Executable file
@@ -9,11 +9,33 @@ rm -rf config node-config pages files node.log
|
|||||||
mkdir -p config node-config pages files
|
mkdir -p config node-config pages files
|
||||||
|
|
||||||
# Create a sample page and a test file
|
# Create a sample page and a test file
|
||||||
cat > pages/index.mu << EOF
|
cat > pages/index.mu << 'EOF'
|
||||||
>Test Page
|
#!/usr/bin/env python3
|
||||||
This is a test page.
|
import os
|
||||||
|
|
||||||
|
print("`F0f0`_`Test Page`_")
|
||||||
|
print("This is a test page with environment variable support.")
|
||||||
|
print()
|
||||||
|
|
||||||
|
print("`F0f0`_`Environment Variables`_")
|
||||||
|
params = []
|
||||||
|
for key, value in os.environ.items():
|
||||||
|
if key.startswith(('field_', 'var_')):
|
||||||
|
params.append(f"- `Faaa`{key}`f: `F0f0`{value}`f")
|
||||||
|
|
||||||
|
if params:
|
||||||
|
print("\n".join(params))
|
||||||
|
else:
|
||||||
|
print("- No parameters received")
|
||||||
|
|
||||||
|
print()
|
||||||
|
print("`F0f0`_`Remote Identity`_")
|
||||||
|
remote_id = os.environ.get('remote_identity', '33aff86b736acd47dca07e84630fd192') # Mock for testing
|
||||||
|
print(f"`Faaa`{remote_id}`f")
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
chmod +x pages/index.mu
|
||||||
|
|
||||||
cat > files/text.txt << EOF
|
cat > files/text.txt << EOF
|
||||||
This is a test file.
|
This is a test file.
|
||||||
EOF
|
EOF
|
||||||
|
|||||||
@@ -20,7 +20,11 @@ server_identity = RNS.Identity.from_file(identity_file)
|
|||||||
|
|
||||||
# Create a destination to the server node
|
# Create a destination to the server node
|
||||||
destination = RNS.Destination(
|
destination = RNS.Destination(
|
||||||
server_identity, RNS.Destination.OUT, RNS.Destination.SINGLE, "nomadnetwork", "node"
|
server_identity,
|
||||||
|
RNS.Destination.OUT,
|
||||||
|
RNS.Destination.SINGLE,
|
||||||
|
"nomadnetwork",
|
||||||
|
"node",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ensure we know a path to the destination
|
# Ensure we know a path to the destination
|
||||||
@@ -36,6 +40,18 @@ global_link = RNS.Link(destination)
|
|||||||
responses = {}
|
responses = {}
|
||||||
done_event = threading.Event()
|
done_event = threading.Event()
|
||||||
|
|
||||||
|
# Test data for environment variables
|
||||||
|
test_data_dict = {
|
||||||
|
"var_field_test": "dictionary_value",
|
||||||
|
"var_field_message": "hello_world",
|
||||||
|
"var_action": "test_action",
|
||||||
|
}
|
||||||
|
test_data_dict2 = {
|
||||||
|
"field_username": "testuser",
|
||||||
|
"field_message": "hello_from_form",
|
||||||
|
"var_action": "submit",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Callback for page response
|
# Callback for page response
|
||||||
def on_page(response):
|
def on_page(response):
|
||||||
@@ -44,10 +60,45 @@ def on_page(response):
|
|||||||
text = data.decode("utf-8")
|
text = data.decode("utf-8")
|
||||||
else:
|
else:
|
||||||
text = str(data)
|
text = str(data)
|
||||||
print("Received page:")
|
print("Received page (no data):")
|
||||||
print(text)
|
print(text)
|
||||||
responses["page"] = text
|
responses["page"] = text
|
||||||
if "file" in responses:
|
check_responses()
|
||||||
|
|
||||||
|
|
||||||
|
# Callback for page response with dictionary data
|
||||||
|
def on_page_dict(response):
|
||||||
|
data = response.response
|
||||||
|
if isinstance(data, bytes):
|
||||||
|
text = data.decode("utf-8")
|
||||||
|
else:
|
||||||
|
text = str(data)
|
||||||
|
print("Received page (dict data):")
|
||||||
|
print(text)
|
||||||
|
responses["page_dict"] = text
|
||||||
|
check_responses()
|
||||||
|
|
||||||
|
|
||||||
|
# Callback for page response with second dict data
|
||||||
|
def on_page_dict2(response):
|
||||||
|
data = response.response
|
||||||
|
if isinstance(data, bytes):
|
||||||
|
text = data.decode("utf-8")
|
||||||
|
else:
|
||||||
|
text = str(data)
|
||||||
|
print("Received page (dict2 data):")
|
||||||
|
print(text)
|
||||||
|
responses["page_dict2"] = text
|
||||||
|
check_responses()
|
||||||
|
|
||||||
|
|
||||||
|
def check_responses():
|
||||||
|
if (
|
||||||
|
"page" in responses
|
||||||
|
and "page_dict" in responses
|
||||||
|
and "page_dict2" in responses
|
||||||
|
and "file" in responses
|
||||||
|
):
|
||||||
done_event.set()
|
done_event.set()
|
||||||
|
|
||||||
|
|
||||||
@@ -78,27 +129,100 @@ def on_file(response):
|
|||||||
else:
|
else:
|
||||||
print("Received file (unhandled format):", data)
|
print("Received file (unhandled format):", data)
|
||||||
responses["file"] = str(data)
|
responses["file"] = str(data)
|
||||||
if "page" in responses:
|
check_responses()
|
||||||
done_event.set()
|
|
||||||
|
|
||||||
|
|
||||||
# Request the page and file once the link is established
|
# Request the pages and file once the link is established
|
||||||
def on_link_established(link):
|
def on_link_established(link):
|
||||||
|
# Test page without data
|
||||||
link.request("/page/index.mu", None, response_callback=on_page)
|
link.request("/page/index.mu", None, response_callback=on_page)
|
||||||
|
# Test page with dictionary data (simulates var_ prefixed data)
|
||||||
|
link.request("/page/index.mu", test_data_dict, response_callback=on_page_dict)
|
||||||
|
# Test page with form field data (simulates field_ prefixed data)
|
||||||
|
link.request("/page/index.mu", test_data_dict2, response_callback=on_page_dict2)
|
||||||
|
# Test file serving
|
||||||
link.request("/file/text.txt", None, response_callback=on_file)
|
link.request("/file/text.txt", None, response_callback=on_file)
|
||||||
|
|
||||||
|
|
||||||
# Register callbacks
|
# Register callbacks
|
||||||
global_link.set_link_established_callback(on_link_established)
|
global_link.set_link_established_callback(on_link_established)
|
||||||
global_link.set_link_closed_callback(lambda l: done_event.set())
|
global_link.set_link_closed_callback(lambda link: done_event.set())
|
||||||
|
|
||||||
# Wait for responses or timeout
|
# Wait for responses or timeout
|
||||||
if not done_event.wait(timeout=30):
|
if not done_event.wait(timeout=30):
|
||||||
print("Test timed out.", file=sys.stderr)
|
print("Test timed out.", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if responses.get("page") and responses.get("file"):
|
|
||||||
print("Tests passed!")
|
# Validate test results
|
||||||
|
def validate_test_results():
|
||||||
|
"""Validate that all responses contain expected content"""
|
||||||
|
# Check basic page response (no data)
|
||||||
|
if "page" not in responses:
|
||||||
|
print("ERROR: No basic page response received", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
page_content = responses["page"]
|
||||||
|
if "No parameters received" not in page_content:
|
||||||
|
print("ERROR: Basic page should show 'No parameters received'", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
if "33aff86b736acd47dca07e84630fd192" not in page_content:
|
||||||
|
print("ERROR: Basic page should show mock remote identity", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check page with dictionary data
|
||||||
|
if "page_dict" not in responses:
|
||||||
|
print("ERROR: No dictionary data page response received", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
dict_content = responses["page_dict"]
|
||||||
|
if "var_field_test" not in dict_content or "dictionary_value" not in dict_content:
|
||||||
|
print(
|
||||||
|
"ERROR: Dictionary data page should contain processed environment variables",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
if "33aff86b736acd47dca07e84630fd192" not in dict_content:
|
||||||
|
print(
|
||||||
|
"ERROR: Dictionary data page should show mock remote identity",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check page with second dictionary data (form fields)
|
||||||
|
if "page_dict2" not in responses:
|
||||||
|
print("ERROR: No dict2 data page response received", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
dict2_content = responses["page_dict2"]
|
||||||
|
if "field_username" not in dict2_content or "testuser" not in dict2_content:
|
||||||
|
print(
|
||||||
|
"ERROR: Dict2 data page should contain processed environment variables",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
if "33aff86b736acd47dca07e84630fd192" not in dict2_content:
|
||||||
|
print(
|
||||||
|
"ERROR: Dict2 data page should show mock remote identity",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check file response
|
||||||
|
if "file" not in responses:
|
||||||
|
print("ERROR: No file response received", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
file_content = responses["file"]
|
||||||
|
if "This is a test file" not in file_content:
|
||||||
|
print("ERROR: File content doesn't match expected content", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
if validate_test_results():
|
||||||
|
print("All tests passed! Environment variable processing works correctly.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
print("Tests failed.", file=sys.stderr)
|
print("Tests failed.", file=sys.stderr)
|
||||||
|
|||||||
@@ -34,7 +34,11 @@ server_identity = RNS.Identity.recall(destination_hash)
|
|||||||
print(f"Recalled server identity for {DESTINATION_HEX}")
|
print(f"Recalled server identity for {DESTINATION_HEX}")
|
||||||
|
|
||||||
destination = RNS.Destination(
|
destination = RNS.Destination(
|
||||||
server_identity, RNS.Destination.OUT, RNS.Destination.SINGLE, "nomadnetwork", "node"
|
server_identity,
|
||||||
|
RNS.Destination.OUT,
|
||||||
|
RNS.Destination.SINGLE,
|
||||||
|
"nomadnetwork",
|
||||||
|
"node",
|
||||||
)
|
)
|
||||||
link = RNS.Link(destination)
|
link = RNS.Link(destination)
|
||||||
|
|
||||||
@@ -53,9 +57,9 @@ def on_page(response):
|
|||||||
|
|
||||||
|
|
||||||
link.set_link_established_callback(
|
link.set_link_established_callback(
|
||||||
lambda l: l.request("/page/index.mu", None, response_callback=on_page)
|
lambda link: link.request("/page/index.mu", None, response_callback=on_page),
|
||||||
)
|
)
|
||||||
link.set_link_closed_callback(lambda l: done_event.set())
|
link.set_link_closed_callback(lambda link: done_event.set())
|
||||||
|
|
||||||
if not done_event.wait(timeout=30):
|
if not done_event.wait(timeout=30):
|
||||||
print("Timed out waiting for page", file=sys.stderr)
|
print("Timed out waiting for page", file=sys.stderr)
|
||||||
|
|||||||
Reference in New Issue
Block a user