feat(version): update version to 3.0.0 and enhance cx_setup to include 'bin' directory if it exists

This commit is contained in:
2026-01-01 18:01:07 -06:00
parent a526e88385
commit 147e945133
27 changed files with 622 additions and 328 deletions

View File

@@ -8,7 +8,12 @@ class ArchiverManager:
self.db = db
def archive_page(
self, destination_hash, page_path, content, max_versions=5, max_storage_gb=1
self,
destination_hash,
page_path,
content,
max_versions=5,
max_storage_gb=1,
):
content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
@@ -30,23 +35,25 @@ class ArchiverManager:
to_delete = versions[max_versions:]
for version in to_delete:
self.db.provider.execute(
"DELETE FROM archived_pages WHERE id = ?", (version["id"],)
"DELETE FROM archived_pages WHERE id = ?",
(version["id"],),
)
# Enforce total storage limit (approximate)
total_size_row = self.db.provider.fetchone(
"SELECT SUM(LENGTH(content)) as total_size FROM archived_pages"
"SELECT SUM(LENGTH(content)) as total_size FROM archived_pages",
)
total_size = total_size_row["total_size"] or 0
max_bytes = max_storage_gb * 1024 * 1024 * 1024
while total_size > max_bytes:
oldest = self.db.provider.fetchone(
"SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1"
"SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1",
)
if oldest:
self.db.provider.execute(
"DELETE FROM archived_pages WHERE id = ?", (oldest["id"],)
"DELETE FROM archived_pages WHERE id = ?",
(oldest["id"],),
)
total_size -= oldest["size"]
else:

View File

@@ -6,10 +6,14 @@ class ConfigManager:
self.database_version = self.IntConfig(self, "database_version", None)
self.display_name = self.StringConfig(self, "display_name", "Anonymous Peer")
self.auto_announce_enabled = self.BoolConfig(
self, "auto_announce_enabled", False
self,
"auto_announce_enabled",
False,
)
self.auto_announce_interval_seconds = self.IntConfig(
self, "auto_announce_interval_seconds", 0
self,
"auto_announce_interval_seconds",
0,
)
self.last_announced_at = self.IntConfig(self, "last_announced_at", None)
self.theme = self.StringConfig(self, "theme", "light")
@@ -83,18 +87,26 @@ class ConfigManager:
16,
) # for propagation node messages
self.page_archiver_enabled = self.BoolConfig(
self, "page_archiver_enabled", True
self,
"page_archiver_enabled",
True,
)
self.page_archiver_max_versions = self.IntConfig(
self, "page_archiver_max_versions", 5
self,
"page_archiver_max_versions",
5,
)
self.archives_max_storage_gb = self.IntConfig(
self, "archives_max_storage_gb", 1
self,
"archives_max_storage_gb",
1,
)
self.crawler_enabled = self.BoolConfig(self, "crawler_enabled", False)
self.crawler_max_retries = self.IntConfig(self, "crawler_max_retries", 3)
self.crawler_retry_delay_seconds = self.IntConfig(
self, "crawler_retry_delay_seconds", 3600
self,
"crawler_retry_delay_seconds",
3600,
)
self.crawler_max_concurrent = self.IntConfig(self, "crawler_max_concurrent", 1)
self.auth_enabled = self.BoolConfig(self, "auth_enabled", False)
@@ -124,7 +136,9 @@ class ConfigManager:
self.map_offline_path = self.StringConfig(self, "map_offline_path", None)
self.map_mbtiles_dir = self.StringConfig(self, "map_mbtiles_dir", None)
self.map_tile_cache_enabled = self.BoolConfig(
self, "map_tile_cache_enabled", True
self,
"map_tile_cache_enabled",
True,
)
self.map_default_lat = self.StringConfig(self, "map_default_lat", "0.0")
self.map_default_lon = self.StringConfig(self, "map_default_lon", "0.0")

View File

@@ -5,8 +5,8 @@ from .messages import MessageDAO
from .misc import MiscDAO
from .provider import DatabaseProvider
from .schema import DatabaseSchema
from .telephone import TelephoneDAO
from .telemetry import TelemetryDAO
from .telephone import TelephoneDAO
from .voicemails import VoicemailDAO
@@ -27,7 +27,9 @@ class Database:
def migrate_from_legacy(self, reticulum_config_dir, identity_hash_hex):
migrator = LegacyMigrator(
self.provider, reticulum_config_dir, identity_hash_hex
self.provider,
reticulum_config_dir,
identity_hash_hex,
)
if migrator.should_migrate():
return migrator.migrate()

View File

@@ -26,32 +26,40 @@ class AnnounceDAO:
columns = ", ".join(fields)
placeholders = ", ".join(["?"] * len(fields))
update_set = ", ".join(
[f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"]
[f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"],
)
query = (
f"INSERT INTO announces ({columns}, updated_at) VALUES ({placeholders}, ?) "
f"INSERT INTO announces ({columns}, created_at, updated_at) VALUES ({placeholders}, ?, ?) "
f"ON CONFLICT(destination_hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at"
) # noqa: S608
)
params = [data.get(f) for f in fields]
params.append(datetime.now(UTC))
now = datetime.now(UTC)
params.append(now)
params.append(now)
self.provider.execute(query, params)
def get_announces(self, aspect=None):
if aspect:
return self.provider.fetchall(
"SELECT * FROM announces WHERE aspect = ?", (aspect,)
"SELECT * FROM announces WHERE aspect = ?",
(aspect,),
)
return self.provider.fetchall("SELECT * FROM announces")
def get_announce_by_hash(self, destination_hash):
return self.provider.fetchone(
"SELECT * FROM announces WHERE destination_hash = ?", (destination_hash,)
"SELECT * FROM announces WHERE destination_hash = ?",
(destination_hash,),
)
def get_filtered_announces(
self, aspect=None, search_term=None, limit=None, offset=0
self,
aspect=None,
search_term=None,
limit=None,
offset=0,
):
query = "SELECT * FROM announces WHERE 1=1"
params = []
@@ -76,11 +84,11 @@ class AnnounceDAO:
now = datetime.now(UTC)
self.provider.execute(
"""
INSERT INTO custom_destination_display_names (destination_hash, display_name, updated_at)
VALUES (?, ?, ?)
INSERT INTO custom_destination_display_names (destination_hash, display_name, created_at, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, updated_at = EXCLUDED.updated_at
""",
(destination_hash, display_name, now),
(destination_hash, display_name, now, now),
)
def get_custom_display_name(self, destination_hash):
@@ -101,17 +109,18 @@ class AnnounceDAO:
now = datetime.now(UTC)
self.provider.execute(
"""
INSERT INTO favourite_destinations (destination_hash, display_name, aspect, updated_at)
VALUES (?, ?, ?, ?)
INSERT INTO favourite_destinations (destination_hash, display_name, aspect, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, aspect = EXCLUDED.aspect, updated_at = EXCLUDED.updated_at
""",
(destination_hash, display_name, aspect, now),
(destination_hash, display_name, aspect, now, now),
)
def get_favourites(self, aspect=None):
if aspect:
return self.provider.fetchall(
"SELECT * FROM favourite_destinations WHERE aspect = ?", (aspect,)
"SELECT * FROM favourite_destinations WHERE aspect = ?",
(aspect,),
)
return self.provider.fetchall("SELECT * FROM favourite_destinations")

View File

@@ -17,9 +17,16 @@ class ConfigDAO:
if value is None:
self.provider.execute("DELETE FROM config WHERE key = ?", (key,))
else:
now = datetime.now(UTC)
self.provider.execute(
"INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, ?)",
(key, str(value), datetime.now(UTC)),
"""
INSERT INTO config (key, value, created_at, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at
""",
(key, str(value), now, now),
)
def delete(self, key):

View File

@@ -21,7 +21,10 @@ class LegacyMigrator:
# Check each directory
for config_dir in possible_dirs:
legacy_path = os.path.join(
config_dir, "identities", self.identity_hash_hex, "database.db"
config_dir,
"identities",
self.identity_hash_hex,
"database.db",
)
if os.path.exists(legacy_path):
# Ensure it's not the same as our current DB path
@@ -103,13 +106,13 @@ class LegacyMigrator:
legacy_columns = [
row["name"]
for row in self.provider.fetchall(
f"PRAGMA {alias}.table_info({table})"
f"PRAGMA {alias}.table_info({table})",
)
]
current_columns = [
row["name"]
for row in self.provider.fetchall(
f"PRAGMA table_info({table})"
f"PRAGMA table_info({table})",
)
]
@@ -125,11 +128,11 @@ class LegacyMigrator:
migrate_query = f"INSERT OR IGNORE INTO {table} ({cols_str}) SELECT {cols_str} FROM {alias}.{table}" # noqa: S608
self.provider.execute(migrate_query)
print(
f" - Migrated table: {table} ({len(common_columns)} columns)"
f" - Migrated table: {table} ({len(common_columns)} columns)",
)
else:
print(
f" - Skipping table {table}: No common columns found"
f" - Skipping table {table}: No common columns found",
)
except Exception as e:
print(f" - Failed to migrate table {table}: {e}")

View File

@@ -39,9 +39,9 @@ class MessageDAO:
update_set = ", ".join([f"{f} = EXCLUDED.{f}" for f in fields if f != "hash"])
query = (
f"INSERT INTO lxmf_messages ({columns}, updated_at) VALUES ({placeholders}, ?) "
f"INSERT INTO lxmf_messages ({columns}, created_at, updated_at) VALUES ({placeholders}, ?, ?) "
f"ON CONFLICT(hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at"
) # noqa: S608
)
params = []
for f in fields:
@@ -49,18 +49,23 @@ class MessageDAO:
if f == "fields" and isinstance(val, dict):
val = json.dumps(val)
params.append(val)
params.append(datetime.now(UTC).isoformat())
now = datetime.now(UTC).isoformat()
params.append(now)
params.append(now)
self.provider.execute(query, params)
def get_lxmf_message_by_hash(self, message_hash):
return self.provider.fetchone(
"SELECT * FROM lxmf_messages WHERE hash = ?", (message_hash,)
"SELECT * FROM lxmf_messages WHERE hash = ?",
(message_hash,),
)
def delete_lxmf_message_by_hash(self, message_hash):
self.provider.execute(
"DELETE FROM lxmf_messages WHERE hash = ?", (message_hash,)
"DELETE FROM lxmf_messages WHERE hash = ?",
(message_hash,),
)
def get_conversation_messages(self, destination_hash, limit=100, offset=0):
@@ -88,8 +93,14 @@ class MessageDAO:
def mark_conversation_as_read(self, destination_hash):
now = datetime.now(UTC).isoformat()
self.provider.execute(
"INSERT OR REPLACE INTO lxmf_conversation_read_state (destination_hash, last_read_at, updated_at) VALUES (?, ?, ?)",
(destination_hash, now, now),
"""
INSERT INTO lxmf_conversation_read_state (destination_hash, last_read_at, created_at, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(destination_hash) DO UPDATE SET
last_read_at = EXCLUDED.last_read_at,
updated_at = EXCLUDED.updated_at
""",
(destination_hash, now, now, now),
)
def is_conversation_unread(self, destination_hash):
@@ -142,7 +153,10 @@ class MessageDAO:
# Forwarding Mappings
def get_forwarding_mapping(
self, alias_hash=None, original_sender_hash=None, final_recipient_hash=None
self,
alias_hash=None,
original_sender_hash=None,
final_recipient_hash=None,
):
if alias_hash:
return self.provider.fetchone(
@@ -181,16 +195,28 @@ class MessageDAO:
def mark_notification_as_viewed(self, destination_hash):
now = datetime.now(UTC).isoformat()
self.provider.execute(
"INSERT OR REPLACE INTO notification_viewed_state (destination_hash, last_viewed_at, updated_at) VALUES (?, ?, ?)",
(destination_hash, now, now),
"""
INSERT INTO notification_viewed_state (destination_hash, last_viewed_at, created_at, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(destination_hash) DO UPDATE SET
last_viewed_at = EXCLUDED.last_viewed_at,
updated_at = EXCLUDED.updated_at
""",
(destination_hash, now, now, now),
)
def mark_all_notifications_as_viewed(self, destination_hashes):
now = datetime.now(UTC).isoformat()
for destination_hash in destination_hashes:
self.provider.execute(
"INSERT OR REPLACE INTO notification_viewed_state (destination_hash, last_viewed_at, updated_at) VALUES (?, ?, ?)",
(destination_hash, now, now),
"""
INSERT INTO notification_viewed_state (destination_hash, last_viewed_at, created_at, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(destination_hash) DO UPDATE SET
last_viewed_at = EXCLUDED.last_viewed_at,
updated_at = EXCLUDED.updated_at
""",
(destination_hash, now, now, now),
)
def is_notification_viewed(self, destination_hash, message_timestamp):

View File

@@ -9,9 +9,10 @@ class MiscDAO:
# Blocked Destinations
def add_blocked_destination(self, destination_hash):
now = datetime.now(UTC)
self.provider.execute(
"INSERT OR IGNORE INTO blocked_destinations (destination_hash, updated_at) VALUES (?, ?)",
(destination_hash, datetime.now(UTC)),
"INSERT OR IGNORE INTO blocked_destinations (destination_hash, created_at, updated_at) VALUES (?, ?, ?)",
(destination_hash, now, now),
)
def is_destination_blocked(self, destination_hash):
@@ -34,9 +35,10 @@ class MiscDAO:
# Spam Keywords
def add_spam_keyword(self, keyword):
now = datetime.now(UTC)
self.provider.execute(
"INSERT OR IGNORE INTO spam_keywords (keyword, updated_at) VALUES (?, ?)",
(keyword, datetime.now(UTC)),
"INSERT OR IGNORE INTO spam_keywords (keyword, created_at, updated_at) VALUES (?, ?, ?)",
(keyword, now, now),
)
def get_spam_keywords(self):
@@ -55,20 +57,31 @@ class MiscDAO:
# User Icons
def update_lxmf_user_icon(
self, destination_hash, icon_name, foreground_colour, background_colour
self,
destination_hash,
icon_name,
foreground_colour,
background_colour,
):
now = datetime.now(UTC)
self.provider.execute(
"""
INSERT INTO lxmf_user_icons (destination_hash, icon_name, foreground_colour, background_colour, updated_at)
VALUES (?, ?, ?, ?, ?)
INSERT INTO lxmf_user_icons (destination_hash, icon_name, foreground_colour, background_colour, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT(destination_hash) DO UPDATE SET
icon_name = EXCLUDED.icon_name,
foreground_colour = EXCLUDED.foreground_colour,
background_colour = EXCLUDED.background_colour,
updated_at = EXCLUDED.updated_at
""",
(destination_hash, icon_name, foreground_colour, background_colour, now),
(
destination_hash,
icon_name,
foreground_colour,
background_colour,
now,
now,
),
)
def get_user_icon(self, destination_hash):
@@ -89,23 +102,31 @@ class MiscDAO:
return self.provider.fetchall(query, params)
def create_forwarding_rule(
self, identity_hash, forward_to_hash, source_filter_hash, is_active=True
self,
identity_hash,
forward_to_hash,
source_filter_hash,
is_active=True,
name=None,
):
now = datetime.now(UTC)
self.provider.execute(
"INSERT INTO lxmf_forwarding_rules (identity_hash, forward_to_hash, source_filter_hash, is_active, updated_at) VALUES (?, ?, ?, ?, ?)",
"INSERT INTO lxmf_forwarding_rules (identity_hash, forward_to_hash, source_filter_hash, is_active, name, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)",
(
identity_hash,
forward_to_hash,
source_filter_hash,
1 if is_active else 0,
name,
now,
now,
),
)
def delete_forwarding_rule(self, rule_id):
self.provider.execute(
"DELETE FROM lxmf_forwarding_rules WHERE id = ?", (rule_id,)
"DELETE FROM lxmf_forwarding_rules WHERE id = ?",
(rule_id,),
)
def toggle_forwarding_rule(self, rule_id):
@@ -116,9 +137,10 @@ class MiscDAO:
# Archived Pages
def archive_page(self, destination_hash, page_path, content, page_hash):
now = datetime.now(UTC)
self.provider.execute(
"INSERT INTO archived_pages (destination_hash, page_path, content, hash) VALUES (?, ?, ?, ?)",
(destination_hash, page_path, content, page_hash),
"INSERT INTO archived_pages (destination_hash, page_path, content, hash, created_at) VALUES (?, ?, ?, ?, ?)",
(destination_hash, page_path, content, page_hash, now),
)
def get_archived_page_versions(self, destination_hash, page_path):
@@ -154,22 +176,27 @@ class MiscDAO:
# Crawl Tasks
def upsert_crawl_task(
self, destination_hash, page_path, status="pending", retry_count=0
self,
destination_hash,
page_path,
status="pending",
retry_count=0,
):
now = datetime.now(UTC)
self.provider.execute(
"""
INSERT INTO crawl_tasks (destination_hash, page_path, status, retry_count)
VALUES (?, ?, ?, ?)
INSERT INTO crawl_tasks (destination_hash, page_path, status, retry_count, created_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(destination_hash, page_path) DO UPDATE SET
status = EXCLUDED.status,
retry_count = EXCLUDED.retry_count
""",
(destination_hash, page_path, status, retry_count),
(destination_hash, page_path, status, retry_count, now),
)
def get_pending_crawl_tasks(self):
return self.provider.fetchall(
"SELECT * FROM crawl_tasks WHERE status = 'pending'"
"SELECT * FROM crawl_tasks WHERE status = 'pending'",
)
def update_crawl_task(self, task_id, **kwargs):
@@ -199,5 +226,6 @@ class MiscDAO:
def get_archived_page_by_id(self, archive_id):
return self.provider.fetchone(
"SELECT * FROM archived_pages WHERE id = ?", (archive_id,)
"SELECT * FROM archived_pages WHERE id = ?",
(archive_id,),
)

View File

@@ -24,7 +24,8 @@ class DatabaseProvider:
def connection(self):
if not hasattr(self._local, "connection"):
self._local.connection = sqlite3.connect(
self.db_path, check_same_thread=False
self.db_path,
check_same_thread=False,
)
self._local.connection.row_factory = sqlite3.Row
# Enable WAL mode for better concurrency

View File

@@ -2,7 +2,7 @@ from .provider import DatabaseProvider
class DatabaseSchema:
LATEST_VERSION = 15
LATEST_VERSION = 16
def __init__(self, provider: DatabaseProvider):
self.provider = provider
@@ -17,7 +17,8 @@ class DatabaseSchema:
def _get_current_version(self):
row = self.provider.fetchone(
"SELECT value FROM config WHERE key = ?", ("database_version",)
"SELECT value FROM config WHERE key = ?",
("database_version",),
)
if row:
return int(row["value"])
@@ -160,6 +161,7 @@ class DatabaseSchema:
"lxmf_forwarding_rules": """
CREATE TABLE IF NOT EXISTS lxmf_forwarding_rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
identity_hash TEXT,
forward_to_hash TEXT,
source_filter_hash TEXT,
@@ -232,42 +234,42 @@ class DatabaseSchema:
# Create indexes that were present
if table_name == "announces":
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)"
"CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)"
"CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)",
)
elif table_name == "lxmf_messages":
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)",
)
elif table_name == "blocked_destinations":
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)",
)
elif table_name == "spam_keywords":
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)"
"CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)",
)
elif table_name == "notification_viewed_state":
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)",
)
elif table_name == "lxmf_telemetry":
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)",
)
def migrate(self, current_version):
@@ -283,13 +285,13 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)"
"CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)"
"CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)",
)
if current_version < 8:
@@ -306,16 +308,17 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)"
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)",
)
if current_version < 9:
self.provider.execute("""
CREATE TABLE IF NOT EXISTS lxmf_forwarding_rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
identity_hash TEXT,
forward_to_hash TEXT,
source_filter_hash TEXT,
@@ -325,7 +328,7 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)",
)
self.provider.execute("""
@@ -340,13 +343,13 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)",
)
if current_version < 10:
@@ -356,54 +359,54 @@ class DatabaseSchema:
# Clean up duplicates before adding unique indexes
self.provider.execute(
"DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)"
"DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)",
)
self.provider.execute(
"DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)"
"DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)",
)
self.provider.execute(
"DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)"
"DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)",
)
self.provider.execute(
"DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)"
"DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)",
)
self.provider.execute(
"DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)"
"DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)",
)
self.provider.execute(
"DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)"
"DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)",
)
self.provider.execute(
"DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)"
"DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)",
)
if current_version < 11:
# Add is_spam column to lxmf_messages if it doesn't exist
try:
self.provider.execute(
"ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0"
"ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0",
)
except Exception:
# Column might already exist if table was created with newest schema
@@ -423,10 +426,10 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)"
"CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)"
"CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)",
)
if current_version < 13:
@@ -443,10 +446,10 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_voicemails_remote_hash ON voicemails(remote_identity_hash)"
"CREATE INDEX IF NOT EXISTS idx_voicemails_remote_hash ON voicemails(remote_identity_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_voicemails_timestamp ON voicemails(timestamp)"
"CREATE INDEX IF NOT EXISTS idx_voicemails_timestamp ON voicemails(timestamp)",
)
if current_version < 14:
@@ -460,10 +463,10 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)",
)
if current_version < 15:
@@ -481,17 +484,31 @@ class DatabaseSchema:
)
""")
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)",
)
self.provider.execute(
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)"
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)",
)
self.provider.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)"
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)",
)
if current_version < 16:
try:
self.provider.execute(
"ALTER TABLE lxmf_forwarding_rules ADD COLUMN name TEXT",
)
except Exception:
pass
# Update version in config
self.provider.execute(
"INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)",
"""
INSERT INTO config (key, value, created_at, updated_at)
VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
ON CONFLICT(key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at
""",
("database_version", str(self.LATEST_VERSION)),
)

View File

@@ -8,24 +8,26 @@ class TelemetryDAO:
def __init__(self, provider: DatabaseProvider):
self.provider = provider
def upsert_telemetry(self, destination_hash, timestamp, data, received_from=None, physical_link=None):
def upsert_telemetry(
self, destination_hash, timestamp, data, received_from=None, physical_link=None
):
now = datetime.now(UTC).isoformat()
# If physical_link is a dict, convert to json
if isinstance(physical_link, dict):
physical_link = json.dumps(physical_link)
self.provider.execute(
"""
INSERT INTO lxmf_telemetry (destination_hash, timestamp, data, received_from, physical_link, updated_at)
VALUES (?, ?, ?, ?, ?, ?)
INSERT INTO lxmf_telemetry (destination_hash, timestamp, data, received_from, physical_link, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(destination_hash, timestamp) DO UPDATE SET
data = EXCLUDED.data,
received_from = EXCLUDED.received_from,
physical_link = EXCLUDED.physical_link,
updated_at = EXCLUDED.updated_at
""",
(destination_hash, timestamp, data, received_from, physical_link, now),
(destination_hash, timestamp, data, received_from, physical_link, now, now),
)
def get_latest_telemetry(self, destination_hash):
@@ -58,4 +60,3 @@ class TelemetryDAO:
"DELETE FROM lxmf_telemetry WHERE destination_hash = ?",
(destination_hash,),
)

View File

@@ -14,6 +14,9 @@ class TelephoneDAO:
duration_seconds,
timestamp,
):
from datetime import UTC, datetime
now = datetime.now(UTC)
self.provider.execute(
"""
INSERT INTO call_history (
@@ -22,8 +25,9 @@ class TelephoneDAO:
is_incoming,
status,
duration_seconds,
timestamp
) VALUES (?, ?, ?, ?, ?, ?)
timestamp,
created_at
) VALUES (?, ?, ?, ?, ?, ?, ?)
""",
(
remote_identity_hash,
@@ -32,6 +36,7 @@ class TelephoneDAO:
status,
duration_seconds,
timestamp,
now,
),
)

View File

@@ -13,6 +13,9 @@ class VoicemailDAO:
duration_seconds,
timestamp,
):
from datetime import UTC, datetime
now = datetime.now(UTC)
self.provider.execute(
"""
INSERT INTO voicemails (
@@ -20,8 +23,9 @@ class VoicemailDAO:
remote_identity_name,
filename,
duration_seconds,
timestamp
) VALUES (?, ?, ?, ?, ?)
timestamp,
created_at
) VALUES (?, ?, ?, ?, ?, ?)
""",
(
remote_identity_hash,
@@ -29,6 +33,7 @@ class VoicemailDAO:
filename,
duration_seconds,
timestamp,
now,
),
)
@@ -58,6 +63,6 @@ class VoicemailDAO:
def get_unread_count(self):
row = self.provider.fetchone(
"SELECT COUNT(*) as count FROM voicemails WHERE is_read = 0"
"SELECT COUNT(*) as count FROM voicemails WHERE is_read = 0",
)
return row["count"] if row else 0

View File

@@ -21,19 +21,22 @@ class ForwardingManager:
for mapping in mappings:
try:
private_key_bytes = base64.b64decode(
mapping["alias_identity_private_key"]
mapping["alias_identity_private_key"],
)
alias_identity = RNS.Identity.from_bytes(private_key_bytes)
alias_hash = mapping["alias_hash"]
# create temp router for this alias
router_storage_path = os.path.join(
self.storage_path, "forwarding", alias_hash
self.storage_path,
"forwarding",
alias_hash,
)
os.makedirs(router_storage_path, exist_ok=True)
router = LXMF.LXMRouter(
identity=alias_identity, storagepath=router_storage_path
identity=alias_identity,
storagepath=router_storage_path,
)
router.PROCESSING_INTERVAL = 1
if self.config:
@@ -44,7 +47,7 @@ class ForwardingManager:
router.register_delivery_callback(self.delivery_callback)
alias_destination = router.register_delivery_identity(
identity=alias_identity
identity=alias_identity,
)
self.forwarding_destinations[alias_hash] = alias_destination
@@ -54,7 +57,10 @@ class ForwardingManager:
print(f"Failed to load forwarding alias {mapping['alias_hash']}: {e}")
def get_or_create_mapping(
self, source_hash, final_recipient_hash, original_destination_hash
self,
source_hash,
final_recipient_hash,
original_destination_hash,
):
mapping = self.db.messages.get_forwarding_mapping(
original_sender_hash=source_hash,
@@ -67,12 +73,15 @@ class ForwardingManager:
# create temp router for this alias
router_storage_path = os.path.join(
self.storage_path, "forwarding", alias_hash
self.storage_path,
"forwarding",
alias_hash,
)
os.makedirs(router_storage_path, exist_ok=True)
router = LXMF.LXMRouter(
identity=alias_identity, storagepath=router_storage_path
identity=alias_identity,
storagepath=router_storage_path,
)
router.PROCESSING_INTERVAL = 1
if self.config:
@@ -83,7 +92,7 @@ class ForwardingManager:
router.register_delivery_callback(self.delivery_callback)
alias_destination = router.register_delivery_identity(
identity=alias_identity
identity=alias_identity,
)
self.forwarding_destinations[alias_hash] = alias_destination
@@ -91,7 +100,7 @@ class ForwardingManager:
data = {
"alias_identity_private_key": base64.b64encode(
alias_identity.get_private_key()
alias_identity.get_private_key(),
).decode(),
"alias_hash": alias_hash,
"original_sender_hash": source_hash,

View File

@@ -62,7 +62,7 @@ class MapManager:
"size": stats.st_size,
"mtime": stats.st_mtime,
"is_active": full_path == self.get_offline_path(),
}
},
)
return sorted(files, key=lambda x: x["mtime"], reverse=True)
@@ -182,10 +182,10 @@ class MapManager:
# create schema
cursor.execute("CREATE TABLE metadata (name text, value text)")
cursor.execute(
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)"
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)",
)
cursor.execute(
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)"
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)",
)
# insert metadata
@@ -235,7 +235,7 @@ class MapManager:
current_count += 1
self._export_progress[export_id]["current"] = current_count
self._export_progress[export_id]["progress"] = int(
(current_count / total_tiles) * 100
(current_count / total_tiles) * 100,
)
# commit after each zoom level
@@ -259,7 +259,7 @@ class MapManager:
y = int(
(1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi)
/ 2.0
* n
* n,
)
return x, y

View File

@@ -40,7 +40,8 @@ class MessageHandler:
OR (destination_hash = ? AND source_hash = ?))
"""
self.db.provider.execute(
query, [local_hash, destination_hash, local_hash, destination_hash]
query,
[local_hash, destination_hash, local_hash, destination_hash],
)
def search_messages(self, local_hash, search_term):

View File

@@ -53,7 +53,7 @@ class RNCPHandler:
)
self.receive_destination.set_link_established_callback(
self._client_link_established
self._client_link_established,
)
if fetch_allowed:
@@ -97,7 +97,7 @@ class RNCPHandler:
if resource.metadata:
try:
filename = os.path.basename(
resource.metadata["name"].decode("utf-8")
resource.metadata["name"].decode("utf-8"),
)
save_dir = os.path.join(self.storage_dir, "rncp_received")
os.makedirs(save_dir, exist_ok=True)
@@ -118,7 +118,8 @@ class RNCPHandler:
counter += 1
base, ext = os.path.splitext(filename)
saved_filename = os.path.join(
save_dir, f"{base}.{counter}{ext}"
save_dir,
f"{base}.{counter}{ext}",
)
shutil.move(resource.data.name, saved_filename)
@@ -137,7 +138,13 @@ class RNCPHandler:
self.active_transfers[transfer_id]["status"] = "failed"
def _fetch_request(
self, path, data, request_id, link_id, remote_identity, requested_at
self,
path,
data,
request_id,
link_id,
remote_identity,
requested_at,
):
if self.fetch_jail:
if data.startswith(self.fetch_jail + "/"):
@@ -349,7 +356,7 @@ class RNCPHandler:
if resource.metadata:
try:
filename = os.path.basename(
resource.metadata["name"].decode("utf-8")
resource.metadata["name"].decode("utf-8"),
)
if save_path:
save_dir = os.path.abspath(os.path.expanduser(save_path))

View File

@@ -109,13 +109,13 @@ class RNProbeHandler:
reception_stats = {}
if self.reticulum.is_connected_to_shared_instance:
reception_rssi = self.reticulum.get_packet_rssi(
receipt.proof_packet.packet_hash
receipt.proof_packet.packet_hash,
)
reception_snr = self.reticulum.get_packet_snr(
receipt.proof_packet.packet_hash
receipt.proof_packet.packet_hash,
)
reception_q = self.reticulum.get_packet_q(
receipt.proof_packet.packet_hash
receipt.proof_packet.packet_hash,
)
if reception_rssi is not None:

View File

@@ -59,23 +59,28 @@ class RNStatusHandler:
sorting = sorting.lower()
if sorting in ("rate", "bitrate"):
interfaces.sort(
key=lambda i: i.get("bitrate", 0) or 0, reverse=sort_reverse
key=lambda i: i.get("bitrate", 0) or 0,
reverse=sort_reverse,
)
elif sorting == "rx":
interfaces.sort(
key=lambda i: i.get("rxb", 0) or 0, reverse=sort_reverse
key=lambda i: i.get("rxb", 0) or 0,
reverse=sort_reverse,
)
elif sorting == "tx":
interfaces.sort(
key=lambda i: i.get("txb", 0) or 0, reverse=sort_reverse
key=lambda i: i.get("txb", 0) or 0,
reverse=sort_reverse,
)
elif sorting == "rxs":
interfaces.sort(
key=lambda i: i.get("rxs", 0) or 0, reverse=sort_reverse
key=lambda i: i.get("rxs", 0) or 0,
reverse=sort_reverse,
)
elif sorting == "txs":
interfaces.sort(
key=lambda i: i.get("txs", 0) or 0, reverse=sort_reverse
key=lambda i: i.get("txs", 0) or 0,
reverse=sort_reverse,
)
elif sorting == "traffic":
interfaces.sort(
@@ -100,7 +105,8 @@ class RNStatusHandler:
)
elif sorting == "held":
interfaces.sort(
key=lambda i: i.get("held_announces", 0) or 0, reverse=sort_reverse
key=lambda i: i.get("held_announces", 0) or 0,
reverse=sort_reverse,
)
formatted_interfaces = []

View File

@@ -1,35 +1,38 @@
import struct
import time
import RNS.vendor.umsgpack as umsgpack
from RNS.vendor import umsgpack
class Sensor:
SID_NONE = 0x00
SID_TIME = 0x01
SID_LOCATION = 0x02
SID_PRESSURE = 0x03
SID_BATTERY = 0x04
SID_PHYSICAL_LINK = 0x05
SID_ACCELERATION = 0x06
SID_TEMPERATURE = 0x07
SID_HUMIDITY = 0x08
SID_MAGNETIC_FIELD = 0x09
SID_AMBIENT_LIGHT = 0x0A
SID_GRAVITY = 0x0B
SID_ANGULAR_VELOCITY = 0x0C
SID_PROXIMITY = 0x0E
SID_INFORMATION = 0x0F
SID_RECEIVED = 0x10
SID_NONE = 0x00
SID_TIME = 0x01
SID_LOCATION = 0x02
SID_PRESSURE = 0x03
SID_BATTERY = 0x04
SID_PHYSICAL_LINK = 0x05
SID_ACCELERATION = 0x06
SID_TEMPERATURE = 0x07
SID_HUMIDITY = 0x08
SID_MAGNETIC_FIELD = 0x09
SID_AMBIENT_LIGHT = 0x0A
SID_GRAVITY = 0x0B
SID_ANGULAR_VELOCITY = 0x0C
SID_PROXIMITY = 0x0E
SID_INFORMATION = 0x0F
SID_RECEIVED = 0x10
SID_POWER_CONSUMPTION = 0x11
SID_POWER_PRODUCTION = 0x12
SID_PROCESSOR = 0x13
SID_RAM = 0x14
SID_NVM = 0x15
SID_TANK = 0x16
SID_FUEL = 0x17
SID_RNS_TRANSPORT = 0x19
SID_LXMF_PROPAGATION = 0x18
SID_CONNECTION_MAP = 0x1A
SID_CUSTOM = 0xff
SID_POWER_PRODUCTION = 0x12
SID_PROCESSOR = 0x13
SID_RAM = 0x14
SID_NVM = 0x15
SID_TANK = 0x16
SID_FUEL = 0x17
SID_RNS_TRANSPORT = 0x19
SID_LXMF_PROPAGATION = 0x18
SID_CONNECTION_MAP = 0x1A
SID_CUSTOM = 0xFF
class Telemeter:
@staticmethod
@@ -38,27 +41,35 @@ class Telemeter:
if packed is None:
return None
return {
"latitude": struct.unpack("!i", packed[0])[0]/1e6,
"longitude": struct.unpack("!i", packed[1])[0]/1e6,
"altitude": struct.unpack("!i", packed[2])[0]/1e2,
"speed": struct.unpack("!I", packed[3])[0]/1e2,
"bearing": struct.unpack("!i", packed[4])[0]/1e2,
"accuracy": struct.unpack("!H", packed[5])[0]/1e2,
"latitude": struct.unpack("!i", packed[0])[0] / 1e6,
"longitude": struct.unpack("!i", packed[1])[0] / 1e6,
"altitude": struct.unpack("!i", packed[2])[0] / 1e2,
"speed": struct.unpack("!I", packed[3])[0] / 1e2,
"bearing": struct.unpack("!i", packed[4])[0] / 1e2,
"accuracy": struct.unpack("!H", packed[5])[0] / 1e2,
"last_update": packed[6],
}
except Exception:
return None
@staticmethod
def pack_location(latitude, longitude, altitude=0, speed=0, bearing=0, accuracy=0, last_update=None):
def pack_location(
latitude,
longitude,
altitude=0,
speed=0,
bearing=0,
accuracy=0,
last_update=None,
):
try:
return [
struct.pack("!i", int(round(latitude, 6)*1e6)),
struct.pack("!i", int(round(longitude, 6)*1e6)),
struct.pack("!i", int(round(altitude, 2)*1e2)),
struct.pack("!I", int(round(speed, 2)*1e2)),
struct.pack("!i", int(round(bearing, 2)*1e2)),
struct.pack("!H", int(round(accuracy, 2)*1e2)),
struct.pack("!i", int(round(latitude, 6) * 1e6)),
struct.pack("!i", int(round(longitude, 6) * 1e6)),
struct.pack("!i", int(round(altitude, 2) * 1e2)),
struct.pack("!I", int(round(speed, 2) * 1e2)),
struct.pack("!i", int(round(bearing, 2) * 1e2)),
struct.pack("!H", int(round(accuracy, 2) * 1e2)),
int(last_update or time.time()),
]
except Exception:
@@ -85,4 +96,3 @@ class Telemeter:
if location:
p[Sensor.SID_LOCATION] = Telemeter.pack_location(**location)
return umsgpack.packb(p)

View File

@@ -77,7 +77,7 @@ class TelephoneManager:
if destination_identity is None:
# If not found by identity hash, try as destination hash
destination_identity = RNS.Identity.recall(
destination_hash
destination_hash,
) # Identity.recall takes identity hash
if destination_identity is None:

View File

@@ -66,7 +66,8 @@ LANGUAGE_CODE_TO_NAME = {
class TranslatorHandler:
def __init__(self, libretranslate_url: str | None = None):
self.libretranslate_url = libretranslate_url or os.getenv(
"LIBRETRANSLATE_URL", "http://localhost:5000"
"LIBRETRANSLATE_URL",
"http://localhost:5000",
)
self.has_argos = HAS_ARGOS
self.has_argos_lib = HAS_ARGOS_LIB
@@ -188,14 +189,18 @@ class TranslatorHandler:
return {
"translated_text": result.get("translatedText", ""),
"source_lang": result.get("detectedLanguage", {}).get(
"language", source_lang
"language",
source_lang,
),
"target_lang": target_lang,
"source": "libretranslate",
}
def _translate_argos(
self, text: str, source_lang: str, target_lang: str
self,
text: str,
source_lang: str,
target_lang: str,
) -> dict[str, Any]:
if source_lang == "auto":
if self.has_argos_lib:
@@ -220,7 +225,10 @@ class TranslatorHandler:
raise RuntimeError(msg)
def _translate_argos_lib(
self, text: str, source_lang: str, target_lang: str
self,
text: str,
source_lang: str,
target_lang: str,
) -> dict[str, Any]:
try:
installed_packages = package.get_installed_packages()
@@ -250,7 +258,10 @@ class TranslatorHandler:
raise RuntimeError(msg)
def _translate_argos_cli(
self, text: str, source_lang: str, target_lang: str
self,
text: str,
source_lang: str,
target_lang: str,
) -> dict[str, Any]:
if source_lang == "auto" or not source_lang:
msg = "Auto-detection is not supported with CLI. Please select a source language manually."
@@ -368,7 +379,8 @@ class TranslatorHandler:
return languages
def install_language_package(
self, package_name: str = "translate"
self,
package_name: str = "translate",
) -> dict[str, Any]:
argospm = shutil.which("argospm")
if not argospm:

View File

@@ -2,6 +2,7 @@ import os
import platform
import shutil
import subprocess
import sys
import threading
import time
@@ -53,9 +54,34 @@ class VoicemailManager:
def get_name_for_identity_hash(self, identity_hash):
"""Default implementation, should be patched by ReticulumMeshChat"""
return
def _find_bundled_binary(self, name):
if getattr(sys, "frozen", False):
exe_dir = os.path.dirname(sys.executable)
# Try in bin/ subdirectory of the executable
local_bin = os.path.join(exe_dir, "bin", name)
if platform.system() == "Windows":
local_bin += ".exe"
if os.path.exists(local_bin):
return local_bin
# Try in executable directory itself
local_bin = os.path.join(exe_dir, name)
if platform.system() == "Windows":
local_bin += ".exe"
if os.path.exists(local_bin):
return local_bin
return None
def _find_espeak(self):
# Try bundled first
bundled = self._find_bundled_binary("espeak-ng")
if bundled:
return bundled
bundled = self._find_bundled_binary("espeak")
if bundled:
return bundled
# Try standard name first
path = shutil.which("espeak-ng")
if path:
@@ -80,6 +106,11 @@ class VoicemailManager:
return None
def _find_ffmpeg(self):
# Try bundled first
bundled = self._find_bundled_binary("ffmpeg")
if bundled:
return bundled
path = shutil.which("ffmpeg")
if path:
return path
@@ -149,7 +180,10 @@ class VoicemailManager:
return True
def handle_incoming_call(self, caller_identity):
RNS.log(f"Voicemail: handle_incoming_call from {RNS.prettyhexrep(caller_identity.hash)}", RNS.LOG_DEBUG)
RNS.log(
f"Voicemail: handle_incoming_call from {RNS.prettyhexrep(caller_identity.hash)}",
RNS.LOG_DEBUG,
)
if not self.config.voicemail_enabled.get():
RNS.log("Voicemail: Voicemail is disabled", RNS.LOG_DEBUG)
return
@@ -158,7 +192,10 @@ class VoicemailManager:
RNS.log(f"Voicemail: Will auto-answer in {delay} seconds", RNS.LOG_DEBUG)
def voicemail_job():
RNS.log(f"Voicemail: Auto-answer timer started for {RNS.prettyhexrep(caller_identity.hash)}", RNS.LOG_DEBUG)
RNS.log(
f"Voicemail: Auto-answer timer started for {RNS.prettyhexrep(caller_identity.hash)}",
RNS.LOG_DEBUG,
)
time.sleep(delay)
# Check if still ringing and no other active call
@@ -167,12 +204,16 @@ class VoicemailManager:
RNS.log("Voicemail: No telephone object", RNS.LOG_ERROR)
return
RNS.log(f"Voicemail: Checking status. Call status: {telephone.call_status}, Active call: {telephone.active_call}", RNS.LOG_DEBUG)
RNS.log(
f"Voicemail: Checking status. Call status: {telephone.call_status}, Active call: {telephone.active_call}",
RNS.LOG_DEBUG,
)
if (
telephone
and telephone.active_call
and telephone.active_call.get_remote_identity().hash == caller_identity.hash
and telephone.active_call.get_remote_identity().hash
== caller_identity.hash
and telephone.call_status == LXST.Signalling.STATUS_RINGING
):
RNS.log(
@@ -181,9 +222,15 @@ class VoicemailManager:
)
self.start_voicemail_session(caller_identity)
else:
RNS.log("Voicemail: Auto-answer conditions not met after delay", RNS.LOG_DEBUG)
RNS.log(
"Voicemail: Auto-answer conditions not met after delay",
RNS.LOG_DEBUG,
)
if telephone.active_call:
RNS.log(f"Voicemail: Active call remote: {RNS.prettyhexrep(telephone.active_call.get_remote_identity().hash)}", RNS.LOG_DEBUG)
RNS.log(
f"Voicemail: Active call remote: {RNS.prettyhexrep(telephone.active_call.get_remote_identity().hash)}",
RNS.LOG_DEBUG,
)
threading.Thread(target=voicemail_job, daemon=True).start()
@@ -209,20 +256,28 @@ class VoicemailManager:
def session_job():
try:
# 1. Play greeting
greeting_source = OpusFileSource(greeting_path, target_frame_ms=60)
# Attach to transmit mixer
greeting_pipeline = Pipeline(
source=greeting_source, codec=Null(), sink=telephone.transmit_mixer
)
greeting_pipeline.start()
try:
greeting_source = OpusFileSource(greeting_path, target_frame_ms=60)
# Attach to transmit mixer
greeting_pipeline = Pipeline(
source=greeting_source,
codec=Null(),
sink=telephone.transmit_mixer,
)
greeting_pipeline.start()
# Wait for greeting to finish
while greeting_source.running:
time.sleep(0.1)
if not telephone.active_call:
return
# Wait for greeting to finish
while greeting_source.running:
time.sleep(0.1)
if not telephone.active_call:
return
greeting_pipeline.stop()
greeting_pipeline.stop()
except Exception as e:
RNS.log(
f"Voicemail: Could not play greeting (libs missing?): {e}",
RNS.LOG_ERROR,
)
# 2. Play beep
beep_source = LXST.ToneSource(
@@ -306,7 +361,7 @@ class VoicemailManager:
# Save to database if long enough
if duration >= 1:
remote_name = self.get_name_for_identity_hash(
self.recording_remote_identity.hash.hex()
self.recording_remote_identity.hash.hex(),
)
self.db.voicemails.add_voicemail(
remote_identity_hash=self.recording_remote_identity.hash.hex(),

View File

@@ -1,6 +1,5 @@
"""
Auto-generated helper so Python tooling and the Electron build
"""Auto-generated helper so Python tooling and the Electron build
share the same version string.
"""
__version__ = '2.50.0'
__version__ = "3.0.0"