Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d50dcf5b4 | ||
|
|
a8b09611a1 | ||
|
|
d0dd9e88c4 | ||
|
|
09d84c2533 | ||
|
|
06ab592cb9 | ||
|
|
843c3a1a56 | ||
|
|
37ac95753c | ||
|
|
a493c57ad2 | ||
|
|
698bfb2e81 | ||
|
|
eaf2e544c4 | ||
|
|
89f88e24ea | ||
|
|
a47b78c13d | ||
|
|
4831f5261d | ||
|
|
46f90e461f | ||
|
|
a1bbe8bc8a | ||
|
|
7092883834 | ||
|
|
74f5174254 | ||
|
|
f9699c060a | ||
|
|
a3ccd49439 | ||
|
|
ece0473beb | ||
|
|
89065f6e0a | ||
|
|
e873d8e754 | ||
|
|
5561205b3e | ||
|
|
d6601adb38 | ||
|
|
98c71a888e | ||
|
|
65bd70c05a | ||
|
|
b77b73576f |
7
.deepsource.toml
Normal file
7
.deepsource.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
version = 1
|
||||
|
||||
[[analyzers]]
|
||||
name = "python"
|
||||
|
||||
[analyzers.meta]
|
||||
runtime_version = "3.x.x"
|
||||
27
.github/workflows/docker-test.yml
vendored
Normal file
27
.github/workflows/docker-test.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Docker Build Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Build Docker Image
|
||||
run: docker build . --file Dockerfile --build-arg PYTHON_VERSION=${{ matrix.python-version }} --tag lxmfy-test:${{ matrix.python-version }}
|
||||
28
.github/workflows/docker.yml
vendored
28
.github/workflows/docker.yml
vendored
@@ -22,6 +22,11 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: amd64,arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -49,20 +54,33 @@ jobs:
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker (rootless)
|
||||
id: meta_rootless
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-rootless
|
||||
tags: |
|
||||
type=raw,value=latest-rootless,enable={{is_default_branch}}
|
||||
type=ref,event=branch,prefix=,suffix=-rootless,enable={{is_default_branch}}
|
||||
type=semver,pattern={{version}},suffix=-rootless
|
||||
type=semver,pattern={{major}}.{{minor}},suffix=-rootless
|
||||
type=sha,format=short,suffix=-rootless
|
||||
|
||||
- name: Build and push rootless Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.rootless
|
||||
file: ./Dockerfile.rootless
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}-rootless
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta_rootless.outputs.tags }}
|
||||
labels: ${{ steps.meta_rootless.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
100
.github/workflows/publish.yml
vendored
Normal file
100
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
name: Publish Python 🐍 distribution 📦 to PyPI
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (e.g., 0.6.8)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build distribution 📦
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: Install pypa/build
|
||||
run: python3 -m pip install build --user
|
||||
- name: Build a binary wheel and a source tarball
|
||||
run: python3 -m build
|
||||
- name: Store the distribution packages
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
||||
publish-to-pypi:
|
||||
name: Publish Python 🐍 distribution 📦 to PyPI
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
needs:
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: pypi
|
||||
url: https://pypi.org/p/rns-page-node
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
- name: Publish distribution 📦 to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.3
|
||||
|
||||
github-release:
|
||||
name: Sign the Python 🐍 distribution 📦 and create GitHub Release
|
||||
needs:
|
||||
- publish-to-pypi
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
- name: Sign the dists with Sigstore
|
||||
uses: sigstore/gh-action-sigstore-python@v3.0.0
|
||||
with:
|
||||
inputs: >-
|
||||
./dist/*.tar.gz
|
||||
./dist/*.whl
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
run: >-
|
||||
gh release create
|
||||
"$GITHUB_REF_NAME"
|
||||
--repo "$GITHUB_REPOSITORY"
|
||||
--notes ""
|
||||
- name: Upload artifact signatures to GitHub Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
run: >-
|
||||
gh release upload
|
||||
"$GITHUB_REF_NAME" dist/**
|
||||
--repo "$GITHUB_REPOSITORY"
|
||||
17
Dockerfile
17
Dockerfile
@@ -1,4 +1,5 @@
|
||||
FROM python:3.13-alpine
|
||||
ARG PYTHON_VERSION=3.13
|
||||
FROM python:${PYTHON_VERSION}-alpine
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/Sudo-Ivan/rns-page-node"
|
||||
LABEL org.opencontainers.image.description="A simple way to serve pages and files over the Reticulum network."
|
||||
@@ -7,11 +8,17 @@ LABEL org.opencontainers.image.authors="Sudo-Ivan"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt ./
|
||||
COPY setup.py ./
|
||||
RUN apk add --no-cache gcc python3-dev musl-dev linux-headers
|
||||
|
||||
RUN pip install poetry
|
||||
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
|
||||
|
||||
COPY pyproject.toml poetry.lock* ./
|
||||
COPY README.md ./
|
||||
COPY rns_page_node ./rns_page_node
|
||||
|
||||
RUN pip install --upgrade pip setuptools wheel && pip install -r requirements.txt .
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
|
||||
ENTRYPOINT ["rns-page-node"]
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
ENTRYPOINT ["poetry", "run", "rns-page-node"]
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
FROM python:3.13-alpine AS builder
|
||||
|
||||
RUN apk update
|
||||
RUN apk add build-base libffi-dev cargo pkgconfig
|
||||
RUN apk add --no-cache build-base libffi-dev cargo pkgconfig gcc python3-dev musl-dev linux-headers
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
COPY setup.py ./
|
||||
RUN pip install poetry
|
||||
|
||||
COPY pyproject.toml ./
|
||||
COPY README.md ./
|
||||
COPY rns_page_node ./rns_page_node
|
||||
|
||||
RUN pip install --upgrade pip setuptools wheel
|
||||
|
||||
RUN pip wheel . --no-deps --wheel-dir /src/dist
|
||||
RUN poetry build --format wheel
|
||||
|
||||
FROM scratch AS dist
|
||||
|
||||
COPY --from=builder /src/dist .
|
||||
COPY --from=builder /src/dist .
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
FROM python:3.13-alpine
|
||||
ARG PYTHON_VERSION=3.13
|
||||
FROM python:${PYTHON_VERSION}-alpine
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/Sudo-Ivan/rns-page-node"
|
||||
LABEL org.opencontainers.image.description="A simple way to serve pages and files over the Reticulum network."
|
||||
@@ -9,17 +10,19 @@ RUN addgroup -g 1000 app && adduser -D -u 1000 -G app app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt setup.py README.md ./
|
||||
RUN apk add --no-cache gcc python3-dev musl-dev linux-headers
|
||||
|
||||
RUN pip install poetry
|
||||
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
|
||||
|
||||
COPY pyproject.toml poetry.lock* ./
|
||||
COPY README.md ./
|
||||
COPY rns_page_node ./rns_page_node
|
||||
|
||||
RUN pip install --upgrade pip setuptools wheel && pip install -r requirements.txt .
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
USER app
|
||||
|
||||
ENTRYPOINT ["rns-page-node"]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
ENTRYPOINT ["poetry", "run", "rns-page-node"]
|
||||
|
||||
22
Makefile
22
Makefile
@@ -1,6 +1,9 @@
|
||||
# Makefile for rns-page-node
|
||||
|
||||
.PHONY: all build sdist wheel clean install lint format docker-wheels docker-build docker-run docker-build-rootless docker-run-rootless help
|
||||
# Detect if docker buildx is available
|
||||
DOCKER_BUILD := $(shell docker buildx version >/dev/null 2>&1 && echo "docker buildx build" || echo "docker build")
|
||||
|
||||
.PHONY: all build sdist wheel clean install lint format docker-wheels docker-build docker-run docker-build-rootless docker-run-rootless help test docker-test
|
||||
|
||||
all: build
|
||||
|
||||
@@ -26,13 +29,13 @@ format:
|
||||
ruff check --fix .
|
||||
|
||||
docker-wheels:
|
||||
docker build --target builder -f Dockerfile.build -t rns-page-node-builder .
|
||||
$(DOCKER_BUILD) --target builder -f Dockerfile.build -t rns-page-node-builder .
|
||||
docker create --name builder-container rns-page-node-builder true
|
||||
docker cp builder-container:/src/dist ./dist
|
||||
docker rm builder-container
|
||||
|
||||
docker-build:
|
||||
docker build -f Dockerfile -t rns-page-node:latest .
|
||||
$(DOCKER_BUILD) $(BUILD_ARGS) -f Dockerfile -t rns-page-node:latest .
|
||||
|
||||
docker-run:
|
||||
docker run --rm -it \
|
||||
@@ -47,7 +50,7 @@ docker-run:
|
||||
--announce-interval 360
|
||||
|
||||
docker-build-rootless:
|
||||
docker build -f Dockerfile.rootless -t rns-page-node-rootless:latest .
|
||||
$(DOCKER_BUILD) $(BUILD_ARGS) -f Dockerfile.rootless -t rns-page-node-rootless:latest .
|
||||
|
||||
docker-run-rootless:
|
||||
docker run --rm -it \
|
||||
@@ -61,6 +64,13 @@ docker-run-rootless:
|
||||
--identity-dir /app/node-config \
|
||||
--announce-interval 360
|
||||
|
||||
test:
|
||||
bash tests/run_tests.sh
|
||||
|
||||
docker-test:
|
||||
$(DOCKER_BUILD) -f tests/Dockerfile.tests -t rns-page-node-tests .
|
||||
docker run --rm rns-page-node-tests
|
||||
|
||||
help:
|
||||
@echo "Makefile commands:"
|
||||
@echo " all - alias for build"
|
||||
@@ -75,4 +85,6 @@ help:
|
||||
@echo " docker-build - build runtime Docker image"
|
||||
@echo " docker-run - run runtime Docker image"
|
||||
@echo " docker-build-rootless - build rootless runtime Docker image"
|
||||
@echo " docker-run-rootless - run rootless runtime Docker image"
|
||||
@echo " docker-run-rootless - run rootless runtime Docker image"
|
||||
@echo " test - run local integration tests"
|
||||
@echo " docker-test - build and run integration tests in Docker"
|
||||
|
||||
75
README.md
75
README.md
@@ -5,11 +5,7 @@ A simple way to serve pages and files over the [Reticulum network](https://retic
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
pip install git+https://github.com/Sudo-Ivan/rns-page-node.git
|
||||
|
||||
# or
|
||||
|
||||
pipx install git+https://github.com/Sudo-Ivan/rns-page-node.git
|
||||
pip install rns-page-node
|
||||
```
|
||||
|
||||
```bash
|
||||
@@ -38,9 +34,65 @@ podman run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config
|
||||
|
||||
Mounting volumes are optional, you can also copy pages and files to the container `podman cp` or `docker cp`.
|
||||
|
||||
## Page formats
|
||||
## Build
|
||||
|
||||
- Micron `.mu`
|
||||
```bash
|
||||
make build
|
||||
```
|
||||
|
||||
Build wheels:
|
||||
|
||||
```bash
|
||||
make wheel
|
||||
```
|
||||
|
||||
### Build Wheels in Docker
|
||||
|
||||
```bash
|
||||
make docker-wheels
|
||||
```
|
||||
|
||||
## Pages
|
||||
|
||||
Supports Micron `.mu` and dynamic pages with `#!` in the micron files.
|
||||
|
||||
## Statistics Tracking
|
||||
|
||||
The node now includes comprehensive statistics tracking for monitoring peer connections and page/file requests:
|
||||
|
||||
### Command Line Options for Stats
|
||||
|
||||
```bash
|
||||
# Print stats every 60 seconds
|
||||
rns-page-node --stats-interval 60
|
||||
|
||||
# Save stats to JSON file on shutdown
|
||||
rns-page-node --save-stats node_stats.json
|
||||
|
||||
# Actively write stats to file (live updates)
|
||||
rns-page-node --stats-file stats.json
|
||||
|
||||
# Combined: live stats file + periodic display + final save
|
||||
rns-page-node --stats-file stats.json --stats-interval 300 --save-stats final_stats.json
|
||||
```
|
||||
|
||||
### Docker Stats Usage
|
||||
|
||||
```bash
|
||||
# With periodic stats display
|
||||
docker run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config:/app/node-config -v ./config:/app/config ghcr.io/sudo-ivan/rns-page-node:latest --stats-interval 60
|
||||
|
||||
# Save stats to mounted volume
|
||||
docker run -it --rm -v ./pages:/app/pages -v ./files:/app/files -v ./node-config:/app/node-config -v ./config:/app/config -v ./stats:/app/stats ghcr.io/sudo-ivan/rns-page-node:latest --save-stats /app/stats/node_stats.json
|
||||
```
|
||||
|
||||
### Tracked Metrics
|
||||
|
||||
- **Connection Statistics**: Total connections, active connections, peer tracking
|
||||
- **Request Statistics**: Page requests, file requests, requests by path and peer
|
||||
- **Performance Metrics**: Requests per hour, uptime, response patterns
|
||||
- **Historical Data**: Recent request history, hourly/daily aggregations
|
||||
- **Top Content**: Most requested pages and files, most active peers
|
||||
|
||||
## Options
|
||||
|
||||
@@ -51,12 +103,13 @@ Mounting volumes are optional, you can also copy pages and files to the containe
|
||||
-f, --files-dir: The directory to serve files from.
|
||||
-i, --identity-dir: The directory to persist the node's identity.
|
||||
-a, --announce-interval: The interval to announce the node's presence.
|
||||
--page-refresh-interval: The interval to refresh pages (seconds, 0 disables).
|
||||
--file-refresh-interval: The interval to refresh files (seconds, 0 disables).
|
||||
-l, --log-level: The logging level.
|
||||
--stats-interval: Print stats every N seconds (0 disables).
|
||||
--save-stats: Save stats to JSON file on shutdown.
|
||||
```
|
||||
|
||||
## To-Do
|
||||
|
||||
- [ ] Pypi
|
||||
|
||||
## License
|
||||
|
||||
This project incorporates portions of the [NomadNet](https://github.com/markqvist/NomadNet) codebase, which is licensed under the GNU General Public License v3.0 (GPL-3.0). As a derivative work, this project is also distributed under the terms of the GPL-3.0. See the [LICENSE](LICENSE) file for full license.
|
||||
|
||||
1354
poetry.lock
generated
1354
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,15 @@
|
||||
[project]
|
||||
name = "rns-page-node"
|
||||
version = "0.1.0"
|
||||
license = {file = "LICENSE"}
|
||||
version = "0.2.0"
|
||||
license = "GPL-3.0-only"
|
||||
description = "A simple way to serve pages and files over the Reticulum network."
|
||||
authors = [
|
||||
{name = "Sudo-Ivan"}
|
||||
]
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"rns (>=0.9.6,<0.10.0)"
|
||||
"rns (>=1.0.0,<1.5.0)"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
@@ -20,5 +20,6 @@ requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.11.11"
|
||||
ruff = "^0.12.3"
|
||||
safety = "^3.6.0"
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
rns==0.9.6
|
||||
rns=1.0.0
|
||||
@@ -10,6 +10,12 @@ import threading
|
||||
import subprocess
|
||||
import RNS
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
from collections import defaultdict, deque
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_INDEX = '''>Default Home Page
|
||||
|
||||
@@ -22,11 +28,16 @@ You are not authorised to carry out the request.
|
||||
'''
|
||||
|
||||
class PageNode:
|
||||
def __init__(self, identity, pagespath, filespath, announce_interval=360, name=None, page_refresh_interval=0, file_refresh_interval=0):
|
||||
def __init__(self, identity, pagespath, filespath, announce_interval=360, name=None, page_refresh_interval=0, file_refresh_interval=0, stats_file=None):
|
||||
self._stop_event = threading.Event()
|
||||
self._lock = threading.Lock()
|
||||
self._stats_lock = threading.Lock()
|
||||
self.logger = logging.getLogger(f"{__name__}.PageNode")
|
||||
self.identity = identity
|
||||
self.name = name
|
||||
self.pagespath = pagespath
|
||||
self.filespath = filespath
|
||||
self.stats_file = stats_file
|
||||
self.destination = RNS.Destination(
|
||||
identity,
|
||||
RNS.Destination.IN,
|
||||
@@ -41,17 +52,23 @@ class PageNode:
|
||||
self.last_page_refresh = time.time()
|
||||
self.last_file_refresh = time.time()
|
||||
|
||||
# Initialize stats tracking
|
||||
self._init_stats()
|
||||
|
||||
self.register_pages()
|
||||
self.register_files()
|
||||
|
||||
self.destination.set_link_established_callback(self.on_connect)
|
||||
|
||||
threading.Thread(target=self._announce_loop, daemon=True).start()
|
||||
threading.Thread(target=self._refresh_loop, daemon=True).start()
|
||||
self._announce_thread = threading.Thread(target=self._announce_loop, daemon=True)
|
||||
self._announce_thread.start()
|
||||
self._refresh_thread = threading.Thread(target=self._refresh_loop, daemon=True)
|
||||
self._refresh_thread.start()
|
||||
|
||||
def register_pages(self):
|
||||
self.servedpages = []
|
||||
self._scan_pages(self.pagespath)
|
||||
with self._lock:
|
||||
self.servedpages = []
|
||||
self._scan_pages(self.pagespath)
|
||||
|
||||
if not os.path.isfile(os.path.join(self.pagespath, "index.mu")):
|
||||
self.destination.register_request_handler(
|
||||
@@ -70,8 +87,9 @@ class PageNode:
|
||||
)
|
||||
|
||||
def register_files(self):
|
||||
self.servedfiles = []
|
||||
self._scan_files(self.filespath)
|
||||
with self._lock:
|
||||
self.servedfiles = []
|
||||
self._scan_files(self.filespath)
|
||||
|
||||
for full_path in self.servedfiles:
|
||||
rel = full_path[len(self.filespath):]
|
||||
@@ -103,10 +121,163 @@ class PageNode:
|
||||
elif os.path.isfile(path):
|
||||
self.servedfiles.append(path)
|
||||
|
||||
def _init_stats(self):
|
||||
"""Initialize statistics tracking"""
|
||||
self.stats = {
|
||||
'start_time': time.time(),
|
||||
'total_connections': 0,
|
||||
'active_connections': 0,
|
||||
'total_page_requests': 0,
|
||||
'total_file_requests': 0,
|
||||
'page_requests_by_path': defaultdict(int),
|
||||
'file_requests_by_path': defaultdict(int),
|
||||
'requests_by_peer': defaultdict(int),
|
||||
'recent_requests': deque(maxlen=100), # Keep last 100 requests
|
||||
'connected_peers': {}, # link_id -> peer_info
|
||||
'hourly_stats': defaultdict(lambda: {'pages': 0, 'files': 0}),
|
||||
'daily_stats': defaultdict(lambda: {'pages': 0, 'files': 0}),
|
||||
}
|
||||
|
||||
# Initialize stats file if specified
|
||||
if self.stats_file:
|
||||
self._init_stats_file()
|
||||
|
||||
def _init_stats_file(self):
|
||||
"""Initialize the stats file with basic structure"""
|
||||
try:
|
||||
# Ensure directory exists
|
||||
dir_path = os.path.dirname(os.path.abspath(self.stats_file))
|
||||
if dir_path:
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
|
||||
# Create initial stats file
|
||||
initial_stats = {
|
||||
'node_info': {
|
||||
'name': self.name or 'Unnamed',
|
||||
'hash': RNS.hexrep(self.destination.hash, delimit=False),
|
||||
'start_time': datetime.fromtimestamp(self.stats['start_time']).isoformat()
|
||||
},
|
||||
'connections': [],
|
||||
'requests': [],
|
||||
'summary': {
|
||||
'total_connections': 0,
|
||||
'total_page_requests': 0,
|
||||
'total_file_requests': 0,
|
||||
'last_updated': datetime.now().isoformat()
|
||||
}
|
||||
}
|
||||
|
||||
with open(self.stats_file, 'w') as f:
|
||||
json.dump(initial_stats, f, indent=2)
|
||||
|
||||
self.logger.info(f"Initialized stats file: {self.stats_file}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to initialize stats file {self.stats_file}: {e}")
|
||||
|
||||
def _write_stats_event(self, event_type, event_data):
|
||||
"""Write a single stats event to the file"""
|
||||
if not self.stats_file:
|
||||
return
|
||||
|
||||
try:
|
||||
# Read current stats
|
||||
try:
|
||||
with open(self.stats_file, 'r') as f:
|
||||
stats_data = json.load(f)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
# If file doesn't exist or is corrupted, reinitialize
|
||||
self._init_stats_file()
|
||||
with open(self.stats_file, 'r') as f:
|
||||
stats_data = json.load(f)
|
||||
|
||||
# Add the new event
|
||||
if event_type == 'connection':
|
||||
stats_data['connections'].append(event_data)
|
||||
stats_data['summary']['total_connections'] += 1
|
||||
elif event_type == 'request':
|
||||
stats_data['requests'].append(event_data)
|
||||
if event_data['type'] == 'page':
|
||||
stats_data['summary']['total_page_requests'] += 1
|
||||
elif event_data['type'] == 'file':
|
||||
stats_data['summary']['total_file_requests'] += 1
|
||||
|
||||
# Update last_updated timestamp
|
||||
stats_data['summary']['last_updated'] = datetime.now().isoformat()
|
||||
|
||||
# Keep only last 1000 events to prevent file from growing too large
|
||||
if len(stats_data['connections']) > 1000:
|
||||
stats_data['connections'] = stats_data['connections'][-1000:]
|
||||
if len(stats_data['requests']) > 1000:
|
||||
stats_data['requests'] = stats_data['requests'][-1000:]
|
||||
|
||||
# Write back to file
|
||||
with open(self.stats_file, 'w') as f:
|
||||
json.dump(stats_data, f, indent=2, default=str)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to write stats event to {self.stats_file}: {e}")
|
||||
|
||||
def _record_request(self, request_type, path, remote_identity, requested_at):
|
||||
"""Record a request in statistics"""
|
||||
with self._stats_lock:
|
||||
# Get peer identity hash with better fallback
|
||||
if remote_identity:
|
||||
peer_hash = RNS.hexrep(remote_identity.hash, delimit=False)
|
||||
# Try to get app_data name if available
|
||||
try:
|
||||
app_data = RNS.Identity.recall_app_data(remote_identity.hash)
|
||||
if app_data:
|
||||
peer_display = app_data.decode('utf-8', errors='ignore')[:32] # Limit length
|
||||
else:
|
||||
peer_display = peer_hash[:16] + "..." # Show first 16 chars
|
||||
except:
|
||||
peer_display = peer_hash[:16] + "..."
|
||||
else:
|
||||
peer_hash = "anonymous"
|
||||
peer_display = "anonymous"
|
||||
|
||||
# Record basic stats
|
||||
if request_type == 'page':
|
||||
self.stats['total_page_requests'] += 1
|
||||
self.stats['page_requests_by_path'][path] += 1
|
||||
elif request_type == 'file':
|
||||
self.stats['total_file_requests'] += 1
|
||||
self.stats['file_requests_by_path'][path] += 1
|
||||
|
||||
self.stats['requests_by_peer'][peer_hash] += 1
|
||||
|
||||
# Record recent request
|
||||
request_info = {
|
||||
'type': request_type,
|
||||
'path': path,
|
||||
'peer': peer_display,
|
||||
'peer_hash': peer_hash,
|
||||
'timestamp': requested_at,
|
||||
'datetime': datetime.fromtimestamp(requested_at).isoformat()
|
||||
}
|
||||
self.stats['recent_requests'].append(request_info)
|
||||
|
||||
# Record hourly and daily stats
|
||||
dt = datetime.fromtimestamp(requested_at)
|
||||
hour_key = dt.strftime('%Y-%m-%d %H:00')
|
||||
day_key = dt.strftime('%Y-%m-%d')
|
||||
|
||||
if request_type == 'page':
|
||||
self.stats['hourly_stats'][hour_key]['pages'] += 1
|
||||
self.stats['daily_stats'][day_key]['pages'] += 1
|
||||
elif request_type == 'file':
|
||||
self.stats['hourly_stats'][hour_key]['files'] += 1
|
||||
self.stats['daily_stats'][day_key]['files'] += 1
|
||||
|
||||
# Write to stats file immediately
|
||||
self._write_stats_event('request', request_info)
|
||||
|
||||
def serve_default_index(self, path, data, request_id, link_id, remote_identity, requested_at):
|
||||
self._record_request('page', path, remote_identity, requested_at)
|
||||
return DEFAULT_INDEX.encode('utf-8')
|
||||
|
||||
def serve_page(self, path, data, request_id, link_id, remote_identity, requested_at):
|
||||
self._record_request('page', path, remote_identity, requested_at)
|
||||
file_path = path.replace("/page", self.pagespath, 1)
|
||||
try:
|
||||
with open(file_path, 'rb') as _f:
|
||||
@@ -115,7 +286,7 @@ class PageNode:
|
||||
except Exception:
|
||||
is_script = False
|
||||
if is_script and os.access(file_path, os.X_OK):
|
||||
# Note: You can remove the following try-except block and just serve the page content statically
|
||||
# Note: You can remove the following try-except block if you just serve static pages.
|
||||
try:
|
||||
result = subprocess.run([file_path], stdout=subprocess.PIPE)
|
||||
return result.stdout
|
||||
@@ -125,32 +296,246 @@ class PageNode:
|
||||
return f.read()
|
||||
|
||||
def serve_file(self, path, data, request_id, link_id, remote_identity, requested_at):
|
||||
self._record_request('file', path, remote_identity, requested_at)
|
||||
file_path = path.replace("/file", self.filespath, 1)
|
||||
return [open(file_path, 'rb'), {"name": os.path.basename(file_path).encode('utf-8')}]
|
||||
|
||||
def on_connect(self, link):
|
||||
pass
|
||||
"""Called when a new link is established"""
|
||||
connection_time = time.time()
|
||||
with self._stats_lock:
|
||||
self.stats['total_connections'] += 1
|
||||
self.stats['active_connections'] += 1
|
||||
|
||||
# Get peer info with better identification
|
||||
if link.get_remote_identity():
|
||||
peer_hash = RNS.hexrep(link.get_remote_identity().hash, delimit=False)
|
||||
# Try to get app_data name if available
|
||||
try:
|
||||
app_data = RNS.Identity.recall_app_data(link.get_remote_identity().hash)
|
||||
if app_data:
|
||||
peer_display = app_data.decode('utf-8', errors='ignore')[:32] # Limit length
|
||||
else:
|
||||
peer_display = peer_hash[:16] + "..." # Show first 16 chars
|
||||
except:
|
||||
peer_display = peer_hash[:16] + "..."
|
||||
else:
|
||||
peer_hash = "anonymous"
|
||||
peer_display = "anonymous"
|
||||
|
||||
# Convert link_id to hex string properly
|
||||
link_id_hex = RNS.hexrep(link.link_id, delimit=False) if hasattr(link, 'link_id') else "unknown"
|
||||
|
||||
self.stats['connected_peers'][link_id_hex] = {
|
||||
'peer_hash': peer_hash,
|
||||
'peer_display': peer_display,
|
||||
'connected_at': connection_time,
|
||||
'link_id': link_id_hex
|
||||
}
|
||||
|
||||
# Write connection event to stats file
|
||||
connection_info = {
|
||||
'event': 'connected',
|
||||
'peer': peer_display,
|
||||
'peer_hash': peer_hash,
|
||||
'timestamp': connection_time,
|
||||
'datetime': datetime.fromtimestamp(connection_time).isoformat(),
|
||||
'link_id': link_id_hex
|
||||
}
|
||||
self._write_stats_event('connection', connection_info)
|
||||
|
||||
self.logger.info(f"New connection established from peer {peer_display}")
|
||||
|
||||
# Set callback for when link closes
|
||||
link.set_link_closed_callback(self._on_link_closed)
|
||||
|
||||
def _on_link_closed(self, link):
|
||||
"""Called when a link is closed"""
|
||||
with self._stats_lock:
|
||||
if link.link_id in self.stats['connected_peers']:
|
||||
peer_info = self.stats['connected_peers'].pop(link.link_id)
|
||||
self.stats['active_connections'] = max(0, self.stats['active_connections'] - 1)
|
||||
self.logger.info(f"Connection closed from peer {peer_info['peer_hash'][:16]}...")
|
||||
|
||||
def _announce_loop(self):
|
||||
while True:
|
||||
if time.time() - self.last_announce > self.announce_interval:
|
||||
if self.name:
|
||||
self.destination.announce(app_data=self.name.encode('utf-8'))
|
||||
else:
|
||||
self.destination.announce()
|
||||
self.last_announce = time.time()
|
||||
time.sleep(1)
|
||||
while not self._stop_event.is_set():
|
||||
try:
|
||||
if time.time() - self.last_announce > self.announce_interval:
|
||||
if self.name:
|
||||
self.destination.announce(app_data=self.name.encode('utf-8'))
|
||||
else:
|
||||
self.destination.announce()
|
||||
self.last_announce = time.time()
|
||||
time.sleep(1)
|
||||
except Exception:
|
||||
self.logger.exception("Error in announce loop")
|
||||
|
||||
def _refresh_loop(self):
|
||||
while True:
|
||||
now = time.time()
|
||||
if self.page_refresh_interval > 0 and now - self.last_page_refresh > self.page_refresh_interval:
|
||||
self.register_pages()
|
||||
self.last_page_refresh = now
|
||||
if self.file_refresh_interval > 0 and now - self.last_file_refresh > self.file_refresh_interval:
|
||||
self.register_files()
|
||||
self.last_file_refresh = now
|
||||
time.sleep(1)
|
||||
while not self._stop_event.is_set():
|
||||
try:
|
||||
now = time.time()
|
||||
if self.page_refresh_interval > 0 and now - self.last_page_refresh > self.page_refresh_interval:
|
||||
self.register_pages()
|
||||
self.last_page_refresh = now
|
||||
if self.file_refresh_interval > 0 and now - self.last_file_refresh > self.file_refresh_interval:
|
||||
self.register_files()
|
||||
self.last_file_refresh = now
|
||||
time.sleep(1)
|
||||
except Exception:
|
||||
self.logger.exception("Error in refresh loop")
|
||||
|
||||
def get_stats(self):
|
||||
"""Get current statistics"""
|
||||
with self._stats_lock:
|
||||
# Calculate uptime
|
||||
uptime = time.time() - self.stats['start_time']
|
||||
|
||||
# Get top requested pages and files
|
||||
top_pages = sorted(self.stats['page_requests_by_path'].items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
top_files = sorted(self.stats['file_requests_by_path'].items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
top_peers = sorted(self.stats['requests_by_peer'].items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
|
||||
return {
|
||||
'uptime_seconds': uptime,
|
||||
'uptime_formatted': self._format_duration(uptime),
|
||||
'start_time': datetime.fromtimestamp(self.stats['start_time']).isoformat(),
|
||||
'total_connections': self.stats['total_connections'],
|
||||
'active_connections': self.stats['active_connections'],
|
||||
'total_page_requests': self.stats['total_page_requests'],
|
||||
'total_file_requests': self.stats['total_file_requests'],
|
||||
'total_requests': self.stats['total_page_requests'] + self.stats['total_file_requests'],
|
||||
'top_pages': top_pages,
|
||||
'top_files': top_files,
|
||||
'top_peers': [(peer[:16] + "..." if len(peer) > 16 else peer, count) for peer, count in top_peers],
|
||||
'recent_requests': list(self.stats['recent_requests'])[-10:], # Last 10 requests
|
||||
'connected_peers': len(self.stats['connected_peers']),
|
||||
'requests_per_hour': self._calculate_requests_per_hour(),
|
||||
}
|
||||
|
||||
def _format_duration(self, seconds):
|
||||
"""Format duration in human readable format"""
|
||||
days = int(seconds // 86400)
|
||||
hours = int((seconds % 86400) // 3600)
|
||||
minutes = int((seconds % 3600) // 60)
|
||||
secs = int(seconds % 60)
|
||||
|
||||
if days > 0:
|
||||
return f"{days}d {hours}h {minutes}m {secs}s"
|
||||
elif hours > 0:
|
||||
return f"{hours}h {minutes}m {secs}s"
|
||||
elif minutes > 0:
|
||||
return f"{minutes}m {secs}s"
|
||||
else:
|
||||
return f"{secs}s"
|
||||
|
||||
def _calculate_requests_per_hour(self):
|
||||
"""Calculate average requests per hour"""
|
||||
uptime_hours = (time.time() - self.stats['start_time']) / 3600
|
||||
if uptime_hours < 0.1: # Less than 6 minutes
|
||||
return 0
|
||||
total_requests = self.stats['total_page_requests'] + self.stats['total_file_requests']
|
||||
return round(total_requests / uptime_hours, 2)
|
||||
|
||||
def print_stats(self):
|
||||
"""Print formatted statistics to console"""
|
||||
stats = self.get_stats()
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("RNS PAGE NODE STATISTICS")
|
||||
print("="*60)
|
||||
print(f"Node Name: {self.name or 'Unnamed'}")
|
||||
print(f"Started: {stats['start_time']}")
|
||||
print(f"Uptime: {stats['uptime_formatted']}")
|
||||
print(f"Node Hash: {RNS.hexrep(self.destination.hash, delimit=False)}")
|
||||
print()
|
||||
|
||||
print("CONNECTION STATS:")
|
||||
print(f" Total Connections: {stats['total_connections']}")
|
||||
print(f" Active Connections: {stats['active_connections']}")
|
||||
print()
|
||||
|
||||
print("REQUEST STATS:")
|
||||
print(f" Total Requests: {stats['total_requests']}")
|
||||
print(f" Page Requests: {stats['total_page_requests']}")
|
||||
print(f" File Requests: {stats['total_file_requests']}")
|
||||
print(f" Requests/Hour: {stats['requests_per_hour']}")
|
||||
print()
|
||||
|
||||
if stats['top_pages']:
|
||||
print("TOP REQUESTED PAGES:")
|
||||
for path, count in stats['top_pages']:
|
||||
print(f" {count:4d} - {path}")
|
||||
print()
|
||||
|
||||
if stats['top_files']:
|
||||
print("TOP REQUESTED FILES:")
|
||||
for path, count in stats['top_files']:
|
||||
print(f" {count:4d} - {path}")
|
||||
print()
|
||||
|
||||
if stats['top_peers']:
|
||||
print("TOP REQUESTING PEERS:")
|
||||
for peer, count in stats['top_peers']:
|
||||
print(f" {count:4d} - {peer}")
|
||||
print()
|
||||
|
||||
if stats['recent_requests']:
|
||||
print("RECENT REQUESTS:")
|
||||
for req in stats['recent_requests']:
|
||||
print(f" {req['datetime']} - {req['type'].upper()} {req['path']} from {req['peer'][:16]}...")
|
||||
|
||||
print("="*60)
|
||||
|
||||
def save_stats_to_file(self, filepath):
|
||||
"""Save statistics to JSON file"""
|
||||
try:
|
||||
stats = self.get_stats()
|
||||
|
||||
# Ensure directory exists
|
||||
dir_path = os.path.dirname(os.path.abspath(filepath))
|
||||
if dir_path:
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
|
||||
# Convert defaultdict and other non-serializable objects to regular dicts
|
||||
with self._stats_lock:
|
||||
stats_copy = dict(stats)
|
||||
stats_copy['page_requests_by_path'] = dict(self.stats['page_requests_by_path'])
|
||||
stats_copy['file_requests_by_path'] = dict(self.stats['file_requests_by_path'])
|
||||
stats_copy['requests_by_peer'] = dict(self.stats['requests_by_peer'])
|
||||
stats_copy['hourly_stats'] = {k: dict(v) for k, v in self.stats['hourly_stats'].items()}
|
||||
stats_copy['daily_stats'] = {k: dict(v) for k, v in self.stats['daily_stats'].items()}
|
||||
stats_copy['connected_peers'] = dict(self.stats['connected_peers'])
|
||||
stats_copy['recent_requests'] = list(self.stats['recent_requests'])
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump(stats_copy, f, indent=2, default=str)
|
||||
self.logger.info(f"Statistics saved to {filepath}")
|
||||
return True
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to save statistics to {filepath}: {e}")
|
||||
import traceback
|
||||
self.logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
return False
|
||||
|
||||
def reset_stats(self):
|
||||
"""Reset all statistics"""
|
||||
with self._stats_lock:
|
||||
self._init_stats()
|
||||
self.logger.info("Statistics reset")
|
||||
|
||||
def shutdown(self):
|
||||
self.logger.info("Shutting down PageNode...")
|
||||
self._stop_event.set()
|
||||
try:
|
||||
self._announce_thread.join(timeout=5)
|
||||
self._refresh_thread.join(timeout=5)
|
||||
except Exception:
|
||||
self.logger.exception("Error waiting for threads to shut down")
|
||||
try:
|
||||
if hasattr(self.destination, 'close'):
|
||||
self.destination.close()
|
||||
except Exception:
|
||||
self.logger.exception("Error closing RNS destination")
|
||||
|
||||
|
||||
def main():
|
||||
@@ -163,6 +548,10 @@ def main():
|
||||
parser.add_argument('-i', '--identity-dir', dest='identity_dir', help='Directory to store node identity', default=os.path.join(os.getcwd(), 'node-config'))
|
||||
parser.add_argument('--page-refresh-interval', dest='page_refresh_interval', type=int, default=0, help='Page refresh interval in seconds, 0 disables auto-refresh')
|
||||
parser.add_argument('--file-refresh-interval', dest='file_refresh_interval', type=int, default=0, help='File refresh interval in seconds, 0 disables auto-refresh')
|
||||
parser.add_argument('-l', '--log-level', dest='log_level', choices=['DEBUG','INFO','WARNING','ERROR','CRITICAL'], default='INFO', help='Logging level')
|
||||
parser.add_argument('--stats-interval', dest='stats_interval', type=int, default=0, help='Print stats every N seconds (0 disables)')
|
||||
parser.add_argument('--save-stats', dest='save_stats', help='Save stats to JSON file on shutdown')
|
||||
parser.add_argument('--stats-file', dest='stats_file', help='Actively write stats to JSON file (live updates)')
|
||||
args = parser.parse_args()
|
||||
|
||||
configpath = args.configpath
|
||||
@@ -173,6 +562,11 @@ def main():
|
||||
identity_dir = args.identity_dir
|
||||
page_refresh_interval = args.page_refresh_interval
|
||||
file_refresh_interval = args.file_refresh_interval
|
||||
stats_interval = args.stats_interval
|
||||
save_stats_file = args.save_stats
|
||||
stats_file = args.stats_file
|
||||
numeric_level = getattr(logging, args.log_level.upper(), logging.INFO)
|
||||
logging.basicConfig(level=numeric_level, format='%(asctime)s %(name)s [%(levelname)s] %(message)s')
|
||||
|
||||
RNS.Reticulum(configpath)
|
||||
os.makedirs(identity_dir, exist_ok=True)
|
||||
@@ -186,14 +580,46 @@ def main():
|
||||
os.makedirs(pages_dir, exist_ok=True)
|
||||
os.makedirs(files_dir, exist_ok=True)
|
||||
|
||||
node = PageNode(identity, pages_dir, files_dir, announce_interval, node_name, page_refresh_interval, file_refresh_interval)
|
||||
print("Page node running. Press Ctrl-C to exit.")
|
||||
node = PageNode(identity, pages_dir, files_dir, announce_interval, node_name, page_refresh_interval, file_refresh_interval, stats_file)
|
||||
logger.info("Page node running. Press Ctrl-C to exit.")
|
||||
|
||||
if stats_interval > 0:
|
||||
logger.info(f"Stats will be printed every {stats_interval} seconds")
|
||||
|
||||
last_stats_time = 0
|
||||
try:
|
||||
while True:
|
||||
current_time = time.time()
|
||||
|
||||
# Print stats if interval is set and enough time has passed
|
||||
if stats_interval > 0 and current_time - last_stats_time >= stats_interval:
|
||||
node.print_stats()
|
||||
last_stats_time = current_time
|
||||
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
print("Shutting down.")
|
||||
logger.info("Keyboard interrupt received, shutting down...")
|
||||
|
||||
# Print final stats
|
||||
node.print_stats()
|
||||
|
||||
# Save stats if requested
|
||||
if save_stats_file:
|
||||
logger.info(f"Saving final statistics to {save_stats_file}")
|
||||
if node.save_stats_to_file(save_stats_file):
|
||||
logger.info(f"Statistics successfully saved to {save_stats_file}")
|
||||
else:
|
||||
logger.error(f"Failed to save statistics to {save_stats_file}")
|
||||
|
||||
node.shutdown()
|
||||
finally:
|
||||
# Ensure stats are saved even if something goes wrong
|
||||
if save_stats_file and 'node' in locals():
|
||||
try:
|
||||
node.save_stats_to_file(save_stats_file)
|
||||
logger.info(f"Final attempt: Statistics saved to {save_stats_file}")
|
||||
except Exception as e:
|
||||
logger.error(f"Final save attempt failed: {e}")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
8
setup.py
8
setup.py
@@ -5,7 +5,7 @@ with open('README.md', 'r', encoding='utf-8') as fh:
|
||||
|
||||
setup(
|
||||
name='rns-page-node',
|
||||
version='0.1.0',
|
||||
version='0.2.0',
|
||||
author='Sudo-Ivan',
|
||||
author_email='',
|
||||
description='A simple way to serve pages and files over the Reticulum network.',
|
||||
@@ -13,16 +13,16 @@ setup(
|
||||
long_description_content_type='text/markdown',
|
||||
url='https://github.com/Sudo-Ivan/rns-page-node',
|
||||
packages=find_packages(),
|
||||
python_requires='>=3.9',
|
||||
license="GPL-3.0",
|
||||
python_requires='>=3.10',
|
||||
install_requires=[
|
||||
'rns>=0.9.6,<0.10.0',
|
||||
'rns>=1.0.0,<1.5.0',
|
||||
],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'rns-page-node=rns_page_node.main:main',
|
||||
],
|
||||
},
|
||||
license='GPL-3.0',
|
||||
classifiers=[
|
||||
'Programming Language :: Python :: 3',
|
||||
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
|
||||
|
||||
4
tests/.gitignore
vendored
Normal file
4
tests/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
pages/
|
||||
node-config/
|
||||
node.log
|
||||
config/
|
||||
14
tests/Dockerfile.tests
Normal file
14
tests/Dockerfile.tests
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM python:3.10-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y build-essential libssl-dev && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . /app
|
||||
RUN pip install --no-cache-dir .
|
||||
|
||||
WORKDIR /app/tests
|
||||
|
||||
RUN chmod +x run_tests.sh
|
||||
|
||||
CMD ["bash", "run_tests.sh"]
|
||||
44
tests/run_tests.sh
Normal file
44
tests/run_tests.sh
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
# Remove previous test artifacts
|
||||
rm -rf config node-config pages files node.log
|
||||
|
||||
# Create directories for config, node identity, pages, and files
|
||||
mkdir -p config node-config pages files
|
||||
|
||||
# Create a sample page and a test file
|
||||
cat > pages/index.mu << EOF
|
||||
>Test Page
|
||||
This is a test page.
|
||||
EOF
|
||||
|
||||
cat > files/text.txt << EOF
|
||||
This is a test file.
|
||||
EOF
|
||||
|
||||
# Start the page node in the background
|
||||
python3 ../rns_page_node/main.py -c config -i node-config -p pages -f files > node.log 2>&1 &
|
||||
NODE_PID=$!
|
||||
|
||||
# Wait for node to generate its identity file
|
||||
echo "Waiting for node identity..."
|
||||
for i in {1..40}; do
|
||||
if [ -f node-config/identity ]; then
|
||||
echo "Identity file found"
|
||||
break
|
||||
fi
|
||||
sleep 0.25
|
||||
done
|
||||
if [ ! -f node-config/identity ]; then
|
||||
echo "Error: node identity file not found" >&2
|
||||
kill $NODE_PID
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run the client test
|
||||
python3 test_client.py
|
||||
|
||||
# Clean up
|
||||
kill $NODE_PID
|
||||
104
tests/test_client.py
Normal file
104
tests/test_client.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import RNS
|
||||
|
||||
# Determine base directory for tests
|
||||
dir_path = os.path.abspath(os.path.dirname(__file__))
|
||||
config_dir = os.path.join(dir_path, 'config')
|
||||
identity_dir = os.path.join(dir_path, 'node-config')
|
||||
|
||||
# Initialize Reticulum with shared config
|
||||
RNS.Reticulum(config_dir)
|
||||
|
||||
# Load server identity (created by the page node)
|
||||
identity_file = os.path.join(identity_dir, 'identity')
|
||||
server_identity = RNS.Identity.from_file(identity_file)
|
||||
|
||||
# Create a destination to the server node
|
||||
destination = RNS.Destination(
|
||||
server_identity,
|
||||
RNS.Destination.OUT,
|
||||
RNS.Destination.SINGLE,
|
||||
'nomadnetwork',
|
||||
'node'
|
||||
)
|
||||
|
||||
# Ensure we know a path to the destination
|
||||
if not RNS.Transport.has_path(destination.hash):
|
||||
RNS.Transport.request_path(destination.hash)
|
||||
while not RNS.Transport.has_path(destination.hash):
|
||||
time.sleep(0.1)
|
||||
|
||||
# Establish a link to the server
|
||||
global_link = RNS.Link(destination)
|
||||
|
||||
# Containers for responses
|
||||
responses = {}
|
||||
done_event = threading.Event()
|
||||
|
||||
# Callback for page response
|
||||
def on_page(response):
|
||||
data = response.response
|
||||
if isinstance(data, bytes):
|
||||
text = data.decode('utf-8')
|
||||
else:
|
||||
text = str(data)
|
||||
print('Received page:')
|
||||
print(text)
|
||||
responses['page'] = text
|
||||
if 'file' in responses:
|
||||
done_event.set()
|
||||
|
||||
# Callback for file response
|
||||
def on_file(response):
|
||||
data = response.response
|
||||
# Handle response as [fileobj, headers]
|
||||
if isinstance(data, list) and len(data) == 2 and hasattr(data[0], 'read'):
|
||||
fileobj, headers = data
|
||||
file_data = fileobj.read()
|
||||
filename = headers.get(b'name', b'').decode('utf-8')
|
||||
print(f'Received file ({filename}):')
|
||||
print(file_data.decode('utf-8'))
|
||||
responses['file'] = file_data.decode('utf-8')
|
||||
# Handle response as a raw file object
|
||||
elif hasattr(data, 'read'):
|
||||
file_data = data.read()
|
||||
filename = os.path.basename('text.txt')
|
||||
print(f'Received file ({filename}):')
|
||||
print(file_data.decode('utf-8'))
|
||||
responses['file'] = file_data.decode('utf-8')
|
||||
# Handle response as raw bytes
|
||||
elif isinstance(data, bytes):
|
||||
text = data.decode('utf-8')
|
||||
print('Received file:')
|
||||
print(text)
|
||||
responses['file'] = text
|
||||
else:
|
||||
print('Received file (unhandled format):', data)
|
||||
responses['file'] = str(data)
|
||||
if 'page' in responses:
|
||||
done_event.set()
|
||||
|
||||
# Request the page and file once the link is established
|
||||
def on_link_established(link):
|
||||
link.request('/page/index.mu', None, response_callback=on_page)
|
||||
link.request('/file/text.txt', None, response_callback=on_file)
|
||||
|
||||
# Register callbacks
|
||||
global_link.set_link_established_callback(on_link_established)
|
||||
global_link.set_link_closed_callback(lambda l: done_event.set())
|
||||
|
||||
# Wait for responses or timeout
|
||||
if not done_event.wait(timeout=30):
|
||||
print('Test timed out.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if responses.get('page') and responses.get('file'):
|
||||
print('Tests passed!')
|
||||
sys.exit(0)
|
||||
else:
|
||||
print('Tests failed.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
59
tests/test_client2.py
Normal file
59
tests/test_client2.py
Normal file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import RNS
|
||||
|
||||
dir_path = os.path.abspath(os.path.dirname(__file__))
|
||||
config_dir = os.path.join(dir_path, 'config')
|
||||
|
||||
RNS.Reticulum(config_dir)
|
||||
|
||||
DESTINATION_HEX = '49b2d959db8528347d0a38083aec1042' # Ivans Node that runs rns-page-node
|
||||
|
||||
dest_len = (RNS.Reticulum.TRUNCATED_HASHLENGTH // 8) * 2
|
||||
if len(DESTINATION_HEX) != dest_len:
|
||||
print(f"Invalid destination length (got {len(DESTINATION_HEX)}, expected {dest_len})", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
destination_hash = bytes.fromhex(DESTINATION_HEX)
|
||||
|
||||
if not RNS.Transport.has_path(destination_hash):
|
||||
print("Requesting path to server...")
|
||||
RNS.Transport.request_path(destination_hash)
|
||||
while not RNS.Transport.has_path(destination_hash):
|
||||
time.sleep(0.1)
|
||||
|
||||
server_identity = RNS.Identity.recall(destination_hash)
|
||||
print(f"Recalled server identity for {DESTINATION_HEX}")
|
||||
|
||||
destination = RNS.Destination(
|
||||
server_identity,
|
||||
RNS.Destination.OUT,
|
||||
RNS.Destination.SINGLE,
|
||||
'nomadnetwork',
|
||||
'node'
|
||||
)
|
||||
link = RNS.Link(destination)
|
||||
|
||||
done_event = threading.Event()
|
||||
|
||||
def on_page(response):
|
||||
data = response.response
|
||||
if isinstance(data, bytes):
|
||||
text = data.decode('utf-8')
|
||||
else:
|
||||
text = str(data)
|
||||
print('Fetched page content:')
|
||||
print(text)
|
||||
done_event.set()
|
||||
|
||||
link.set_link_established_callback(lambda l: l.request('/page/index.mu', None, response_callback=on_page))
|
||||
link.set_link_closed_callback(lambda l: done_event.set())
|
||||
|
||||
if not done_event.wait(timeout=30):
|
||||
print('Timed out waiting for page', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print('Done fetching page.')
|
||||
sys.exit(0)
|
||||
Reference in New Issue
Block a user