feat(version): update version to 3.0.0 and enhance cx_setup to include 'bin' directory if it exists
This commit is contained in:
@@ -13,6 +13,9 @@ include_files = [
|
||||
("logo", "logo"),
|
||||
]
|
||||
|
||||
if (ROOT / "bin").exists():
|
||||
include_files.append(("bin", "bin"))
|
||||
|
||||
packages = [
|
||||
"RNS",
|
||||
"RNS.Interfaces",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
"""Reticulum MeshChatX - A mesh network communications app."""
|
||||
|
||||
__version__ = "2.50.0"
|
||||
__version__ = "3.0.0"
|
||||
|
||||
@@ -55,13 +55,13 @@ from meshchatx.src.backend.lxmf_message_fields import (
|
||||
LxmfFileAttachmentsField,
|
||||
LxmfImageField,
|
||||
)
|
||||
from meshchatx.src.backend.telemetry_utils import Telemeter
|
||||
from meshchatx.src.backend.map_manager import MapManager
|
||||
from meshchatx.src.backend.message_handler import MessageHandler
|
||||
from meshchatx.src.backend.rncp_handler import RNCPHandler
|
||||
from meshchatx.src.backend.rnprobe_handler import RNProbeHandler
|
||||
from meshchatx.src.backend.rnstatus_handler import RNStatusHandler
|
||||
from meshchatx.src.backend.sideband_commands import SidebandCommands
|
||||
from meshchatx.src.backend.telemetry_utils import Telemeter
|
||||
from meshchatx.src.backend.telephone_manager import TelephoneManager
|
||||
from meshchatx.src.backend.translator_handler import TranslatorHandler
|
||||
from meshchatx.src.backend.voicemail_manager import VoicemailManager
|
||||
@@ -367,7 +367,7 @@ class ReticulumMeshChat:
|
||||
# init Translator handler
|
||||
libretranslate_url = self.config.get("libretranslate_url", None)
|
||||
self.translator_handler = TranslatorHandler(
|
||||
libretranslate_url=libretranslate_url
|
||||
libretranslate_url=libretranslate_url,
|
||||
)
|
||||
|
||||
# start background thread for auto announce loop
|
||||
@@ -720,11 +720,12 @@ class ReticulumMeshChat:
|
||||
# Proactively queue any known nodes from the database that haven't been queued yet
|
||||
# get known propagation nodes from database
|
||||
known_nodes = self.database.announces.get_announces(
|
||||
aspect="nomadnetwork.node"
|
||||
aspect="nomadnetwork.node",
|
||||
)
|
||||
for node in known_nodes:
|
||||
self.queue_crawler_task(
|
||||
node["destination_hash"], "/page/index.mu"
|
||||
node["destination_hash"],
|
||||
"/page/index.mu",
|
||||
)
|
||||
|
||||
# process pending or failed tasks
|
||||
@@ -736,7 +737,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# process tasks concurrently up to the limit
|
||||
await asyncio.gather(
|
||||
*[self.process_crawler_task(task) for task in tasks]
|
||||
*[self.process_crawler_task(task) for task in tasks],
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -749,14 +750,16 @@ class ReticulumMeshChat:
|
||||
# mark as crawling
|
||||
task_id = task["id"]
|
||||
self.database.misc.update_crawl_task(
|
||||
task_id, status="crawling", last_retry_at=datetime.now(UTC)
|
||||
task_id,
|
||||
status="crawling",
|
||||
last_retry_at=datetime.now(UTC),
|
||||
)
|
||||
|
||||
destination_hash = task["destination_hash"]
|
||||
page_path = task["page_path"]
|
||||
|
||||
print(
|
||||
f"Crawler: Archiving {destination_hash}:{page_path} (Attempt {task['retry_count'] + 1})"
|
||||
f"Crawler: Archiving {destination_hash}:{page_path} (Attempt {task['retry_count'] + 1})",
|
||||
)
|
||||
|
||||
# completion event
|
||||
@@ -802,7 +805,7 @@ class ReticulumMeshChat:
|
||||
await download_task
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Crawler: Error during download for {destination_hash}:{page_path}: {e}"
|
||||
f"Crawler: Error during download for {destination_hash}:{page_path}: {e}",
|
||||
)
|
||||
failure_reason[0] = str(e)
|
||||
done_event.set()
|
||||
@@ -810,13 +813,16 @@ class ReticulumMeshChat:
|
||||
if success[0]:
|
||||
print(f"Crawler: Successfully archived {destination_hash}:{page_path}")
|
||||
self.archive_page(
|
||||
destination_hash, page_path, content_received[0], is_manual=False
|
||||
destination_hash,
|
||||
page_path,
|
||||
content_received[0],
|
||||
is_manual=False,
|
||||
)
|
||||
task.status = "completed"
|
||||
task.save()
|
||||
else:
|
||||
print(
|
||||
f"Crawler: Failed to archive {destination_hash}:{page_path} - {failure_reason[0]}"
|
||||
f"Crawler: Failed to archive {destination_hash}:{page_path} - {failure_reason[0]}",
|
||||
)
|
||||
task.retry_count += 1
|
||||
task.status = "failed"
|
||||
@@ -957,7 +963,9 @@ class ReticulumMeshChat:
|
||||
def get_conversation_latest_message(self, destination_hash: str):
|
||||
local_hash = self.identity.hexhash
|
||||
messages = self.message_handler.get_conversation_messages(
|
||||
local_hash, destination_hash, limit=1
|
||||
local_hash,
|
||||
destination_hash,
|
||||
limit=1,
|
||||
)
|
||||
return messages[0] if messages else None
|
||||
|
||||
@@ -965,7 +973,8 @@ class ReticulumMeshChat:
|
||||
def conversation_has_attachments(self, destination_hash: str):
|
||||
local_hash = self.identity.hexhash
|
||||
messages = self.message_handler.get_conversation_messages(
|
||||
local_hash, destination_hash
|
||||
local_hash,
|
||||
destination_hash,
|
||||
)
|
||||
for message in messages:
|
||||
if self.message_fields_have_attachments(message["fields"]):
|
||||
@@ -1011,7 +1020,7 @@ class ReticulumMeshChat:
|
||||
) # Or more specific if needed
|
||||
for announce in custom_names:
|
||||
custom_name = self.database.announces.get_custom_display_name(
|
||||
announce["destination_hash"]
|
||||
announce["destination_hash"],
|
||||
)
|
||||
if custom_name and search_term.lower() in custom_name.lower():
|
||||
matches.add(announce["destination_hash"])
|
||||
@@ -1058,7 +1067,7 @@ class ReticulumMeshChat:
|
||||
self.voicemail_manager.stop_recording()
|
||||
|
||||
print(
|
||||
f"on_telephone_call_ended: {caller_identity.hash.hex() if caller_identity else 'Unknown'}"
|
||||
f"on_telephone_call_ended: {caller_identity.hash.hex() if caller_identity else 'Unknown'}",
|
||||
)
|
||||
|
||||
# Record call history
|
||||
@@ -2536,7 +2545,8 @@ class ReticulumMeshChat:
|
||||
"remote_identity_name": remote_identity_name,
|
||||
"audio_profile_id": self.telephone_manager.telephone.transmit_codec.profile
|
||||
if hasattr(
|
||||
self.telephone_manager.telephone.transmit_codec, "profile"
|
||||
self.telephone_manager.telephone.transmit_codec,
|
||||
"profile",
|
||||
)
|
||||
else None,
|
||||
"tx_packets": getattr(telephone_active_call, "tx", 0),
|
||||
@@ -2575,7 +2585,8 @@ class ReticulumMeshChat:
|
||||
|
||||
# answer call
|
||||
await asyncio.to_thread(
|
||||
self.telephone_manager.telephone.answer, caller_identity
|
||||
self.telephone_manager.telephone.answer,
|
||||
caller_identity,
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
@@ -2637,7 +2648,7 @@ class ReticulumMeshChat:
|
||||
int(profile_id),
|
||||
)
|
||||
return web.json_response(
|
||||
{"message": f"Switched to profile {profile_id}"}
|
||||
{"message": f"Switched to profile {profile_id}"},
|
||||
)
|
||||
except Exception as e:
|
||||
return web.json_response({"message": str(e)}, status=500)
|
||||
@@ -2752,7 +2763,8 @@ class ReticulumMeshChat:
|
||||
@routes.get("/api/v1/telephone/voicemail/status")
|
||||
async def telephone_voicemail_status(request):
|
||||
greeting_path = os.path.join(
|
||||
self.voicemail_manager.greetings_dir, "greeting.opus"
|
||||
self.voicemail_manager.greetings_dir,
|
||||
"greeting.opus",
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -2769,7 +2781,8 @@ class ReticulumMeshChat:
|
||||
limit = int(request.query.get("limit", 50))
|
||||
offset = int(request.query.get("offset", 0))
|
||||
voicemails = self.database.voicemails.get_voicemails(
|
||||
limit=limit, offset=offset
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -2792,7 +2805,8 @@ class ReticulumMeshChat:
|
||||
voicemail = self.database.voicemails.get_voicemail(voicemail_id)
|
||||
if voicemail:
|
||||
filepath = os.path.join(
|
||||
self.voicemail_manager.recordings_dir, voicemail["filename"]
|
||||
self.voicemail_manager.recordings_dir,
|
||||
voicemail["filename"],
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
os.remove(filepath)
|
||||
@@ -2803,10 +2817,14 @@ class ReticulumMeshChat:
|
||||
# serve greeting audio
|
||||
@routes.get("/api/v1/telephone/voicemail/greeting/audio")
|
||||
async def telephone_voicemail_greeting_audio(request):
|
||||
filepath = os.path.join(self.voicemail_manager.greetings_dir, "greeting.opus")
|
||||
filepath = os.path.join(
|
||||
self.voicemail_manager.greetings_dir, "greeting.opus"
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return web.FileResponse(filepath)
|
||||
return web.json_response({"message": "Greeting audio not found"}, status=404)
|
||||
return web.json_response(
|
||||
{"message": "Greeting audio not found"}, status=404
|
||||
)
|
||||
|
||||
# serve voicemail audio
|
||||
@routes.get("/api/v1/telephone/voicemails/{id}/audio")
|
||||
@@ -2815,12 +2833,14 @@ class ReticulumMeshChat:
|
||||
voicemail = self.database.voicemails.get_voicemail(voicemail_id)
|
||||
if voicemail:
|
||||
filepath = os.path.join(
|
||||
self.voicemail_manager.recordings_dir, voicemail["filename"]
|
||||
self.voicemail_manager.recordings_dir,
|
||||
voicemail["filename"],
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return web.FileResponse(filepath)
|
||||
return web.json_response(
|
||||
{"message": "Voicemail audio not found"}, status=404
|
||||
{"message": "Voicemail audio not found"},
|
||||
status=404,
|
||||
)
|
||||
|
||||
# generate greeting
|
||||
@@ -2829,10 +2849,11 @@ class ReticulumMeshChat:
|
||||
try:
|
||||
text = self.config.voicemail_greeting.get()
|
||||
path = await asyncio.to_thread(
|
||||
self.voicemail_manager.generate_greeting, text
|
||||
self.voicemail_manager.generate_greeting,
|
||||
text,
|
||||
)
|
||||
return web.json_response(
|
||||
{"message": "Greeting generated", "path": path}
|
||||
{"message": "Greeting generated", "path": path},
|
||||
)
|
||||
except Exception as e:
|
||||
return web.json_response({"message": str(e)}, status=500)
|
||||
@@ -2844,13 +2865,16 @@ class ReticulumMeshChat:
|
||||
reader = await request.multipart()
|
||||
field = await reader.next()
|
||||
if field.name != "file":
|
||||
return web.json_response({"message": "File field required"}, status=400)
|
||||
return web.json_response(
|
||||
{"message": "File field required"}, status=400
|
||||
)
|
||||
|
||||
filename = field.filename
|
||||
extension = os.path.splitext(filename)[1].lower()
|
||||
if extension not in [".mp3", ".ogg", ".wav", ".m4a", ".flac"]:
|
||||
return web.json_response(
|
||||
{"message": f"Unsupported file type: {extension}"}, status=400
|
||||
{"message": f"Unsupported file type: {extension}"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Save temp file
|
||||
@@ -2865,10 +2889,11 @@ class ReticulumMeshChat:
|
||||
try:
|
||||
# Convert to greeting
|
||||
path = await asyncio.to_thread(
|
||||
self.voicemail_manager.convert_to_greeting, temp_path
|
||||
self.voicemail_manager.convert_to_greeting,
|
||||
temp_path,
|
||||
)
|
||||
return web.json_response(
|
||||
{"message": "Greeting uploaded and converted", "path": path}
|
||||
{"message": "Greeting uploaded and converted", "path": path},
|
||||
)
|
||||
finally:
|
||||
if os.path.exists(temp_path):
|
||||
@@ -3005,7 +3030,9 @@ class ReticulumMeshChat:
|
||||
|
||||
# upsert favourite
|
||||
self.database.announces.upsert_favourite(
|
||||
destination_hash, display_name, aspect
|
||||
destination_hash,
|
||||
display_name,
|
||||
aspect,
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -3026,7 +3053,8 @@ class ReticulumMeshChat:
|
||||
# update display name if provided
|
||||
if len(display_name) > 0:
|
||||
self.database.announces.upsert_custom_display_name(
|
||||
destination_hash, display_name
|
||||
destination_hash,
|
||||
display_name,
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
@@ -3073,16 +3101,16 @@ class ReticulumMeshChat:
|
||||
for archive in archives_results:
|
||||
# find node name from announces or custom display names
|
||||
node_name = self.get_custom_destination_display_name(
|
||||
archive["destination_hash"]
|
||||
archive["destination_hash"],
|
||||
)
|
||||
if not node_name:
|
||||
db_announce = self.database.announces.get_announce_by_hash(
|
||||
archive["destination_hash"]
|
||||
archive["destination_hash"],
|
||||
)
|
||||
if db_announce and db_announce["aspect"] == "nomadnetwork.node":
|
||||
node_name = (
|
||||
ReticulumMeshChat.parse_nomadnetwork_node_display_name(
|
||||
db_announce["app_data"]
|
||||
db_announce["app_data"],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3095,7 +3123,7 @@ class ReticulumMeshChat:
|
||||
"content": archive["content"],
|
||||
"hash": archive["hash"],
|
||||
"created_at": archive["created_at"],
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
return web.json_response(
|
||||
@@ -3107,7 +3135,7 @@ class ReticulumMeshChat:
|
||||
"total_count": total_count,
|
||||
"total_pages": total_pages,
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@routes.get("/api/v1/lxmf/propagation-node/status")
|
||||
@@ -3337,24 +3365,27 @@ class ReticulumMeshChat:
|
||||
|
||||
# get latest announce from database for the provided destination hash
|
||||
latest_announce = self.database.announces.get_announce_by_hash(
|
||||
destination_hash
|
||||
destination_hash,
|
||||
)
|
||||
|
||||
# get latest lxmf message from database sent to us from the provided destination hash
|
||||
local_hash = self.local_lxmf_destination.hexhash
|
||||
messages = self.message_handler.get_conversation_messages(
|
||||
local_hash, destination_hash, limit=1
|
||||
local_hash,
|
||||
destination_hash,
|
||||
limit=1,
|
||||
)
|
||||
# Filter for incoming messages only
|
||||
latest_lxmf_message = next(
|
||||
(m for m in messages if m["source_hash"] == destination_hash), None
|
||||
(m for m in messages if m["source_hash"] == destination_hash),
|
||||
None,
|
||||
)
|
||||
|
||||
# determine when latest announce was received
|
||||
latest_announce_at = None
|
||||
if latest_announce is not None:
|
||||
latest_announce_at = datetime.fromisoformat(
|
||||
latest_announce["updated_at"]
|
||||
latest_announce["updated_at"],
|
||||
)
|
||||
if latest_announce_at.tzinfo is not None:
|
||||
latest_announce_at = latest_announce_at.replace(tzinfo=None)
|
||||
@@ -3704,7 +3735,7 @@ class ReticulumMeshChat:
|
||||
try:
|
||||
libretranslate_url = request.query.get("libretranslate_url")
|
||||
languages = self.translator_handler.get_supported_languages(
|
||||
libretranslate_url=libretranslate_url
|
||||
libretranslate_url=libretranslate_url,
|
||||
)
|
||||
return web.json_response(
|
||||
{
|
||||
@@ -3712,7 +3743,7 @@ class ReticulumMeshChat:
|
||||
"has_argos": self.translator_handler.has_argos,
|
||||
"has_argos_lib": self.translator_handler.has_argos_lib,
|
||||
"has_argos_cli": self.translator_handler.has_argos_cli,
|
||||
}
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
return web.json_response(
|
||||
@@ -3830,7 +3861,7 @@ class ReticulumMeshChat:
|
||||
announce = self.database.announces.get_announce_by_hash(destination_hash)
|
||||
if announce is not None:
|
||||
lxmf_stamp_cost = ReticulumMeshChat.parse_lxmf_stamp_cost(
|
||||
announce["app_data"]
|
||||
announce["app_data"],
|
||||
)
|
||||
|
||||
# get outbound ticket expiry for this lxmf destination
|
||||
@@ -4047,7 +4078,7 @@ class ReticulumMeshChat:
|
||||
# get lxmf message from database
|
||||
lxmf_message = None
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(
|
||||
message_hash
|
||||
message_hash,
|
||||
)
|
||||
if db_lxmf_message is not None:
|
||||
lxmf_message = self.convert_db_lxmf_message_to_dict(db_lxmf_message)
|
||||
@@ -4153,7 +4184,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# find message from database
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(
|
||||
message_hash
|
||||
message_hash,
|
||||
)
|
||||
if db_lxmf_message is None:
|
||||
return web.json_response({"message": "Message not found"}, status=404)
|
||||
@@ -4218,13 +4249,20 @@ class ReticulumMeshChat:
|
||||
# get query params
|
||||
search_query = request.query.get("q", None)
|
||||
filter_unread = ReticulumMeshChat.parse_bool_query_param(
|
||||
request.query.get("unread", request.query.get("filter_unread", "false")),
|
||||
request.query.get(
|
||||
"unread", request.query.get("filter_unread", "false")
|
||||
),
|
||||
)
|
||||
filter_failed = ReticulumMeshChat.parse_bool_query_param(
|
||||
request.query.get("failed", request.query.get("filter_failed", "false")),
|
||||
request.query.get(
|
||||
"failed", request.query.get("filter_failed", "false")
|
||||
),
|
||||
)
|
||||
filter_has_attachments = ReticulumMeshChat.parse_bool_query_param(
|
||||
request.query.get("has_attachments", request.query.get("filter_has_attachments", "false")),
|
||||
request.query.get(
|
||||
"has_attachments",
|
||||
request.query.get("filter_has_attachments", "false"),
|
||||
),
|
||||
)
|
||||
|
||||
local_hash = self.local_lxmf_destination.hexhash
|
||||
@@ -4250,14 +4288,15 @@ class ReticulumMeshChat:
|
||||
latest_message_preview = db_message["content"]
|
||||
latest_message_timestamp = db_message["timestamp"]
|
||||
latest_message_has_attachments = self.message_fields_have_attachments(
|
||||
db_message["fields"]
|
||||
db_message["fields"],
|
||||
)
|
||||
|
||||
# using timestamp (sent time) for updated_at as it is more reliable across restarts
|
||||
# and represents the actual time the message was created by the sender.
|
||||
# we convert it to ISO format for the frontend.
|
||||
updated_at = datetime.fromtimestamp(
|
||||
latest_message_timestamp, UTC
|
||||
latest_message_timestamp,
|
||||
UTC,
|
||||
).isoformat()
|
||||
|
||||
# check if conversation has attachments
|
||||
@@ -4346,7 +4385,8 @@ class ReticulumMeshChat:
|
||||
for c in conversations:
|
||||
message_timestamp = c["latest_message_created_at"]
|
||||
if not self.database.messages.is_notification_viewed(
|
||||
c["destination_hash"], message_timestamp
|
||||
c["destination_hash"],
|
||||
message_timestamp,
|
||||
):
|
||||
filtered_conversations.append(c)
|
||||
conversations = filtered_conversations
|
||||
@@ -4586,7 +4626,7 @@ class ReticulumMeshChat:
|
||||
{
|
||||
"message": "Active map updated",
|
||||
"metadata": self.map_manager.get_metadata(),
|
||||
}
|
||||
},
|
||||
)
|
||||
return web.json_response({"error": "File not found"}, status=404)
|
||||
|
||||
@@ -4597,13 +4637,17 @@ class ReticulumMeshChat:
|
||||
telemetry_list = []
|
||||
for r in results:
|
||||
unpacked = Telemeter.from_packed(r["data"])
|
||||
telemetry_list.append({
|
||||
"destination_hash": r["destination_hash"],
|
||||
"timestamp": r["timestamp"],
|
||||
"telemetry": unpacked,
|
||||
"physical_link": json.loads(r["physical_link"]) if r["physical_link"] else None,
|
||||
"updated_at": r["updated_at"],
|
||||
})
|
||||
telemetry_list.append(
|
||||
{
|
||||
"destination_hash": r["destination_hash"],
|
||||
"timestamp": r["timestamp"],
|
||||
"telemetry": unpacked,
|
||||
"physical_link": json.loads(r["physical_link"])
|
||||
if r["physical_link"]
|
||||
else None,
|
||||
"updated_at": r["updated_at"],
|
||||
}
|
||||
)
|
||||
return web.json_response({"telemetry": telemetry_list})
|
||||
|
||||
# get telemetry history for a destination
|
||||
@@ -4612,18 +4656,24 @@ class ReticulumMeshChat:
|
||||
destination_hash = request.match_info.get("destination_hash")
|
||||
limit = int(request.query.get("limit", 100))
|
||||
offset = int(request.query.get("offset", 0))
|
||||
|
||||
results = self.database.telemetry.get_telemetry_history(destination_hash, limit, offset)
|
||||
|
||||
results = self.database.telemetry.get_telemetry_history(
|
||||
destination_hash, limit, offset
|
||||
)
|
||||
telemetry_list = []
|
||||
for r in results:
|
||||
unpacked = Telemeter.from_packed(r["data"])
|
||||
telemetry_list.append({
|
||||
"destination_hash": r["destination_hash"],
|
||||
"timestamp": r["timestamp"],
|
||||
"telemetry": unpacked,
|
||||
"physical_link": json.loads(r["physical_link"]) if r["physical_link"] else None,
|
||||
"updated_at": r["updated_at"],
|
||||
})
|
||||
telemetry_list.append(
|
||||
{
|
||||
"destination_hash": r["destination_hash"],
|
||||
"timestamp": r["timestamp"],
|
||||
"telemetry": unpacked,
|
||||
"physical_link": json.loads(r["physical_link"])
|
||||
if r["physical_link"]
|
||||
else None,
|
||||
"updated_at": r["updated_at"],
|
||||
}
|
||||
)
|
||||
return web.json_response({"telemetry": telemetry_list})
|
||||
|
||||
# get latest telemetry for a destination
|
||||
@@ -4633,15 +4683,19 @@ class ReticulumMeshChat:
|
||||
r = self.database.telemetry.get_latest_telemetry(destination_hash)
|
||||
if not r:
|
||||
return web.json_response({"error": "No telemetry found"}, status=404)
|
||||
|
||||
|
||||
unpacked = Telemeter.from_packed(r["data"])
|
||||
return web.json_response({
|
||||
"destination_hash": r["destination_hash"],
|
||||
"timestamp": r["timestamp"],
|
||||
"telemetry": unpacked,
|
||||
"physical_link": json.loads(r["physical_link"]) if r["physical_link"] else None,
|
||||
"updated_at": r["updated_at"],
|
||||
})
|
||||
return web.json_response(
|
||||
{
|
||||
"destination_hash": r["destination_hash"],
|
||||
"timestamp": r["timestamp"],
|
||||
"telemetry": unpacked,
|
||||
"physical_link": json.loads(r["physical_link"])
|
||||
if r["physical_link"]
|
||||
else None,
|
||||
"updated_at": r["updated_at"],
|
||||
}
|
||||
)
|
||||
|
||||
# upload offline map
|
||||
@routes.post("/api/v1/map/offline")
|
||||
@@ -4655,7 +4709,8 @@ class ReticulumMeshChat:
|
||||
filename = field.filename
|
||||
if not filename.endswith(".mbtiles"):
|
||||
return web.json_response(
|
||||
{"error": "Invalid file format, must be .mbtiles"}, status=400
|
||||
{"error": "Invalid file format, must be .mbtiles"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# save to mbtiles dir
|
||||
@@ -4691,7 +4746,7 @@ class ReticulumMeshChat:
|
||||
self.config.map_offline_enabled.set(False)
|
||||
return web.json_response(
|
||||
{
|
||||
"error": "Invalid MBTiles file or unsupported format (vector maps not supported)"
|
||||
"error": "Invalid MBTiles file or unsupported format (vector maps not supported)",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
@@ -4700,7 +4755,7 @@ class ReticulumMeshChat:
|
||||
{
|
||||
"message": "Map uploaded successfully",
|
||||
"metadata": metadata,
|
||||
}
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
RNS.log(f"Error uploading map: {e}", RNS.LOG_ERROR)
|
||||
@@ -4750,7 +4805,8 @@ class ReticulumMeshChat:
|
||||
},
|
||||
)
|
||||
return web.json_response(
|
||||
{"error": "File not ready or not found"}, status=404
|
||||
{"error": "File not ready or not found"},
|
||||
status=404,
|
||||
)
|
||||
|
||||
# MIME type fix middleware - ensures JavaScript files have correct Content-Type
|
||||
@@ -4825,7 +4881,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# add other middlewares
|
||||
app.middlewares.extend(
|
||||
[auth_middleware, mime_type_middleware, security_middleware]
|
||||
[auth_middleware, mime_type_middleware, security_middleware],
|
||||
)
|
||||
|
||||
app.add_routes(routes)
|
||||
@@ -5000,12 +5056,12 @@ class ReticulumMeshChat:
|
||||
|
||||
if "page_archiver_max_versions" in data:
|
||||
self.config.page_archiver_max_versions.set(
|
||||
int(data["page_archiver_max_versions"])
|
||||
int(data["page_archiver_max_versions"]),
|
||||
)
|
||||
|
||||
if "archives_max_storage_gb" in data:
|
||||
self.config.archives_max_storage_gb.set(
|
||||
int(data["archives_max_storage_gb"])
|
||||
int(data["archives_max_storage_gb"]),
|
||||
)
|
||||
|
||||
# update crawler settings
|
||||
@@ -5017,7 +5073,7 @@ class ReticulumMeshChat:
|
||||
|
||||
if "crawler_retry_delay_seconds" in data:
|
||||
self.config.crawler_retry_delay_seconds.set(
|
||||
int(data["crawler_retry_delay_seconds"])
|
||||
int(data["crawler_retry_delay_seconds"]),
|
||||
)
|
||||
|
||||
if "crawler_max_concurrent" in data:
|
||||
@@ -5066,12 +5122,12 @@ class ReticulumMeshChat:
|
||||
|
||||
if "voicemail_auto_answer_delay_seconds" in data:
|
||||
self.config.voicemail_auto_answer_delay_seconds.set(
|
||||
int(data["voicemail_auto_answer_delay_seconds"])
|
||||
int(data["voicemail_auto_answer_delay_seconds"]),
|
||||
)
|
||||
|
||||
if "voicemail_max_recording_seconds" in data:
|
||||
self.config.voicemail_max_recording_seconds.set(
|
||||
int(data["voicemail_max_recording_seconds"])
|
||||
int(data["voicemail_max_recording_seconds"]),
|
||||
)
|
||||
|
||||
# send config to websocket clients
|
||||
@@ -5132,7 +5188,8 @@ class ReticulumMeshChat:
|
||||
# returns archived page versions for a given destination and path
|
||||
def get_archived_page_versions(self, destination_hash: str, page_path: str):
|
||||
return self.database.misc.get_archived_page_versions(
|
||||
destination_hash, page_path
|
||||
destination_hash,
|
||||
page_path,
|
||||
)
|
||||
|
||||
# flushes all archived pages
|
||||
@@ -5560,6 +5617,7 @@ class ReticulumMeshChat:
|
||||
forward_to_hash=rule_data["forward_to_hash"],
|
||||
source_filter_hash=rule_data.get("source_filter_hash"),
|
||||
is_active=rule_data.get("is_active", True),
|
||||
name=rule_data.get("name"),
|
||||
)
|
||||
# notify updated
|
||||
AsyncUtils.run_async(
|
||||
@@ -5675,7 +5733,9 @@ class ReticulumMeshChat:
|
||||
if identity is not None:
|
||||
# get lxmf.delivery destination hash
|
||||
lxmf_destination_hash = RNS.Destination.hash(
|
||||
identity, "lxmf", "delivery"
|
||||
identity,
|
||||
"lxmf",
|
||||
"delivery",
|
||||
).hex()
|
||||
|
||||
# use custom name if available
|
||||
@@ -5948,7 +6008,7 @@ class ReticulumMeshChat:
|
||||
# find lxmf user icon from database
|
||||
lxmf_user_icon = None
|
||||
db_lxmf_user_icon = self.database.misc.get_user_icon(
|
||||
announce["destination_hash"]
|
||||
announce["destination_hash"],
|
||||
)
|
||||
if db_lxmf_user_icon:
|
||||
lxmf_user_icon = {
|
||||
@@ -6218,7 +6278,7 @@ class ReticulumMeshChat:
|
||||
unpacked = Telemeter.from_packed(telemetry_data)
|
||||
if unpacked and "time" in unpacked:
|
||||
timestamp = unpacked["time"]["utc"]
|
||||
|
||||
|
||||
# physical link info
|
||||
physical_link = {
|
||||
"rssi": self.reticulum.get_packet_rssi(lxmf_message.hash),
|
||||
@@ -6270,7 +6330,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# find message from database
|
||||
db_lxmf_message = self.database.messages.get_lxmf_message_by_hash(
|
||||
lxmf_message.hash.hex()
|
||||
lxmf_message.hash.hex(),
|
||||
)
|
||||
if not db_lxmf_message:
|
||||
return
|
||||
@@ -6322,7 +6382,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# check if this message is for an alias identity (REPLY PATH)
|
||||
mapping = self.database.messages.get_forwarding_mapping(
|
||||
alias_hash=destination_hash
|
||||
alias_hash=destination_hash,
|
||||
)
|
||||
|
||||
if mapping:
|
||||
@@ -6347,7 +6407,8 @@ class ReticulumMeshChat:
|
||||
# check if this message matches a forwarding rule (FORWARD PATH)
|
||||
# we check for rules that apply to the destination of this message
|
||||
rules = self.database.misc.get_forwarding_rules(
|
||||
identity_hash=destination_hash, active_only=True
|
||||
identity_hash=destination_hash,
|
||||
active_only=True,
|
||||
)
|
||||
|
||||
for rule in rules:
|
||||
@@ -6532,7 +6593,8 @@ class ReticulumMeshChat:
|
||||
if sender_identity_hash is not None:
|
||||
if (
|
||||
self.forwarding_manager
|
||||
and sender_identity_hash in self.forwarding_manager.forwarding_destinations
|
||||
and sender_identity_hash
|
||||
in self.forwarding_manager.forwarding_destinations
|
||||
):
|
||||
source_destination = self.forwarding_manager.forwarding_destinations[
|
||||
sender_identity_hash
|
||||
@@ -6658,7 +6720,9 @@ class ReticulumMeshChat:
|
||||
lon = self.database.config.get("map_default_lon")
|
||||
|
||||
if lat is None or lon is None:
|
||||
print(f"Cannot respond to telemetry request from {to_addr_hash}: No location set")
|
||||
print(
|
||||
f"Cannot respond to telemetry request from {to_addr_hash}: No location set"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -6683,7 +6747,7 @@ class ReticulumMeshChat:
|
||||
telemetry_data=telemetry_data,
|
||||
delivery_method="opportunistic",
|
||||
no_display=True,
|
||||
)
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Failed to respond to telemetry request: {e}")
|
||||
@@ -6891,7 +6955,7 @@ class ReticulumMeshChat:
|
||||
async def resend_failed_messages_for_destination(self, destination_hash: str):
|
||||
# get messages that failed to send to this destination
|
||||
failed_messages = self.database.messages.get_failed_messages_for_destination(
|
||||
destination_hash
|
||||
destination_hash,
|
||||
)
|
||||
|
||||
# resend failed messages
|
||||
@@ -6951,7 +7015,7 @@ class ReticulumMeshChat:
|
||||
|
||||
# remove original failed message from database
|
||||
self.database.messages.delete_lxmf_message_by_hash(
|
||||
failed_message["hash"]
|
||||
failed_message["hash"],
|
||||
)
|
||||
|
||||
# tell all websocket clients that old failed message was deleted so it can remove from ui
|
||||
@@ -7031,7 +7095,7 @@ class ReticulumMeshChat:
|
||||
# gets the custom display name a user has set for the provided destination hash
|
||||
def get_custom_destination_display_name(self, destination_hash: str):
|
||||
db_destination_display_name = self.database.announces.get_custom_display_name(
|
||||
destination_hash
|
||||
destination_hash,
|
||||
)
|
||||
if db_destination_display_name is not None:
|
||||
return db_destination_display_name.display_name
|
||||
@@ -7042,12 +7106,15 @@ class ReticulumMeshChat:
|
||||
# currently, this will use the app data from the most recent announce
|
||||
# TODO: we should fetch this from our contacts database, when it gets implemented, and if not found, fallback to app data
|
||||
def get_lxmf_conversation_name(
|
||||
self, destination_hash, default_name: str | None = "Anonymous Peer"
|
||||
self,
|
||||
destination_hash,
|
||||
default_name: str | None = "Anonymous Peer",
|
||||
):
|
||||
# get lxmf.delivery announce from database for the provided destination hash
|
||||
results = self.database.announces.get_announces(aspect="lxmf.delivery")
|
||||
lxmf_announce = next(
|
||||
(a for a in results if a["destination_hash"] == destination_hash), None
|
||||
(a for a in results if a["destination_hash"] == destination_hash),
|
||||
None,
|
||||
)
|
||||
|
||||
# if app data is available in database, it should be base64 encoded text that was announced
|
||||
|
||||
@@ -8,7 +8,12 @@ class ArchiverManager:
|
||||
self.db = db
|
||||
|
||||
def archive_page(
|
||||
self, destination_hash, page_path, content, max_versions=5, max_storage_gb=1
|
||||
self,
|
||||
destination_hash,
|
||||
page_path,
|
||||
content,
|
||||
max_versions=5,
|
||||
max_storage_gb=1,
|
||||
):
|
||||
content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
|
||||
|
||||
@@ -30,23 +35,25 @@ class ArchiverManager:
|
||||
to_delete = versions[max_versions:]
|
||||
for version in to_delete:
|
||||
self.db.provider.execute(
|
||||
"DELETE FROM archived_pages WHERE id = ?", (version["id"],)
|
||||
"DELETE FROM archived_pages WHERE id = ?",
|
||||
(version["id"],),
|
||||
)
|
||||
|
||||
# Enforce total storage limit (approximate)
|
||||
total_size_row = self.db.provider.fetchone(
|
||||
"SELECT SUM(LENGTH(content)) as total_size FROM archived_pages"
|
||||
"SELECT SUM(LENGTH(content)) as total_size FROM archived_pages",
|
||||
)
|
||||
total_size = total_size_row["total_size"] or 0
|
||||
max_bytes = max_storage_gb * 1024 * 1024 * 1024
|
||||
|
||||
while total_size > max_bytes:
|
||||
oldest = self.db.provider.fetchone(
|
||||
"SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1"
|
||||
"SELECT id, LENGTH(content) as size FROM archived_pages ORDER BY created_at ASC LIMIT 1",
|
||||
)
|
||||
if oldest:
|
||||
self.db.provider.execute(
|
||||
"DELETE FROM archived_pages WHERE id = ?", (oldest["id"],)
|
||||
"DELETE FROM archived_pages WHERE id = ?",
|
||||
(oldest["id"],),
|
||||
)
|
||||
total_size -= oldest["size"]
|
||||
else:
|
||||
|
||||
@@ -6,10 +6,14 @@ class ConfigManager:
|
||||
self.database_version = self.IntConfig(self, "database_version", None)
|
||||
self.display_name = self.StringConfig(self, "display_name", "Anonymous Peer")
|
||||
self.auto_announce_enabled = self.BoolConfig(
|
||||
self, "auto_announce_enabled", False
|
||||
self,
|
||||
"auto_announce_enabled",
|
||||
False,
|
||||
)
|
||||
self.auto_announce_interval_seconds = self.IntConfig(
|
||||
self, "auto_announce_interval_seconds", 0
|
||||
self,
|
||||
"auto_announce_interval_seconds",
|
||||
0,
|
||||
)
|
||||
self.last_announced_at = self.IntConfig(self, "last_announced_at", None)
|
||||
self.theme = self.StringConfig(self, "theme", "light")
|
||||
@@ -83,18 +87,26 @@ class ConfigManager:
|
||||
16,
|
||||
) # for propagation node messages
|
||||
self.page_archiver_enabled = self.BoolConfig(
|
||||
self, "page_archiver_enabled", True
|
||||
self,
|
||||
"page_archiver_enabled",
|
||||
True,
|
||||
)
|
||||
self.page_archiver_max_versions = self.IntConfig(
|
||||
self, "page_archiver_max_versions", 5
|
||||
self,
|
||||
"page_archiver_max_versions",
|
||||
5,
|
||||
)
|
||||
self.archives_max_storage_gb = self.IntConfig(
|
||||
self, "archives_max_storage_gb", 1
|
||||
self,
|
||||
"archives_max_storage_gb",
|
||||
1,
|
||||
)
|
||||
self.crawler_enabled = self.BoolConfig(self, "crawler_enabled", False)
|
||||
self.crawler_max_retries = self.IntConfig(self, "crawler_max_retries", 3)
|
||||
self.crawler_retry_delay_seconds = self.IntConfig(
|
||||
self, "crawler_retry_delay_seconds", 3600
|
||||
self,
|
||||
"crawler_retry_delay_seconds",
|
||||
3600,
|
||||
)
|
||||
self.crawler_max_concurrent = self.IntConfig(self, "crawler_max_concurrent", 1)
|
||||
self.auth_enabled = self.BoolConfig(self, "auth_enabled", False)
|
||||
@@ -124,7 +136,9 @@ class ConfigManager:
|
||||
self.map_offline_path = self.StringConfig(self, "map_offline_path", None)
|
||||
self.map_mbtiles_dir = self.StringConfig(self, "map_mbtiles_dir", None)
|
||||
self.map_tile_cache_enabled = self.BoolConfig(
|
||||
self, "map_tile_cache_enabled", True
|
||||
self,
|
||||
"map_tile_cache_enabled",
|
||||
True,
|
||||
)
|
||||
self.map_default_lat = self.StringConfig(self, "map_default_lat", "0.0")
|
||||
self.map_default_lon = self.StringConfig(self, "map_default_lon", "0.0")
|
||||
|
||||
@@ -5,8 +5,8 @@ from .messages import MessageDAO
|
||||
from .misc import MiscDAO
|
||||
from .provider import DatabaseProvider
|
||||
from .schema import DatabaseSchema
|
||||
from .telephone import TelephoneDAO
|
||||
from .telemetry import TelemetryDAO
|
||||
from .telephone import TelephoneDAO
|
||||
from .voicemails import VoicemailDAO
|
||||
|
||||
|
||||
@@ -27,7 +27,9 @@ class Database:
|
||||
|
||||
def migrate_from_legacy(self, reticulum_config_dir, identity_hash_hex):
|
||||
migrator = LegacyMigrator(
|
||||
self.provider, reticulum_config_dir, identity_hash_hex
|
||||
self.provider,
|
||||
reticulum_config_dir,
|
||||
identity_hash_hex,
|
||||
)
|
||||
if migrator.should_migrate():
|
||||
return migrator.migrate()
|
||||
|
||||
@@ -26,32 +26,40 @@ class AnnounceDAO:
|
||||
columns = ", ".join(fields)
|
||||
placeholders = ", ".join(["?"] * len(fields))
|
||||
update_set = ", ".join(
|
||||
[f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"]
|
||||
[f"{f} = EXCLUDED.{f}" for f in fields if f != "destination_hash"],
|
||||
)
|
||||
|
||||
query = (
|
||||
f"INSERT INTO announces ({columns}, updated_at) VALUES ({placeholders}, ?) "
|
||||
f"INSERT INTO announces ({columns}, created_at, updated_at) VALUES ({placeholders}, ?, ?) "
|
||||
f"ON CONFLICT(destination_hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at"
|
||||
) # noqa: S608
|
||||
)
|
||||
|
||||
params = [data.get(f) for f in fields]
|
||||
params.append(datetime.now(UTC))
|
||||
now = datetime.now(UTC)
|
||||
params.append(now)
|
||||
params.append(now)
|
||||
self.provider.execute(query, params)
|
||||
|
||||
def get_announces(self, aspect=None):
|
||||
if aspect:
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM announces WHERE aspect = ?", (aspect,)
|
||||
"SELECT * FROM announces WHERE aspect = ?",
|
||||
(aspect,),
|
||||
)
|
||||
return self.provider.fetchall("SELECT * FROM announces")
|
||||
|
||||
def get_announce_by_hash(self, destination_hash):
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM announces WHERE destination_hash = ?", (destination_hash,)
|
||||
"SELECT * FROM announces WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
|
||||
def get_filtered_announces(
|
||||
self, aspect=None, search_term=None, limit=None, offset=0
|
||||
self,
|
||||
aspect=None,
|
||||
search_term=None,
|
||||
limit=None,
|
||||
offset=0,
|
||||
):
|
||||
query = "SELECT * FROM announces WHERE 1=1"
|
||||
params = []
|
||||
@@ -76,11 +84,11 @@ class AnnounceDAO:
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO custom_destination_display_names (destination_hash, display_name, updated_at)
|
||||
VALUES (?, ?, ?)
|
||||
INSERT INTO custom_destination_display_names (destination_hash, display_name, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, display_name, now),
|
||||
(destination_hash, display_name, now, now),
|
||||
)
|
||||
|
||||
def get_custom_display_name(self, destination_hash):
|
||||
@@ -101,17 +109,18 @@ class AnnounceDAO:
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO favourite_destinations (destination_hash, display_name, aspect, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
INSERT INTO favourite_destinations (destination_hash, display_name, aspect, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET display_name = EXCLUDED.display_name, aspect = EXCLUDED.aspect, updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, display_name, aspect, now),
|
||||
(destination_hash, display_name, aspect, now, now),
|
||||
)
|
||||
|
||||
def get_favourites(self, aspect=None):
|
||||
if aspect:
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM favourite_destinations WHERE aspect = ?", (aspect,)
|
||||
"SELECT * FROM favourite_destinations WHERE aspect = ?",
|
||||
(aspect,),
|
||||
)
|
||||
return self.provider.fetchall("SELECT * FROM favourite_destinations")
|
||||
|
||||
|
||||
@@ -17,9 +17,16 @@ class ConfigDAO:
|
||||
if value is None:
|
||||
self.provider.execute("DELETE FROM config WHERE key = ?", (key,))
|
||||
else:
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, ?)",
|
||||
(key, str(value), datetime.now(UTC)),
|
||||
"""
|
||||
INSERT INTO config (key, value, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
value = EXCLUDED.value,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(key, str(value), now, now),
|
||||
)
|
||||
|
||||
def delete(self, key):
|
||||
|
||||
@@ -21,7 +21,10 @@ class LegacyMigrator:
|
||||
# Check each directory
|
||||
for config_dir in possible_dirs:
|
||||
legacy_path = os.path.join(
|
||||
config_dir, "identities", self.identity_hash_hex, "database.db"
|
||||
config_dir,
|
||||
"identities",
|
||||
self.identity_hash_hex,
|
||||
"database.db",
|
||||
)
|
||||
if os.path.exists(legacy_path):
|
||||
# Ensure it's not the same as our current DB path
|
||||
@@ -103,13 +106,13 @@ class LegacyMigrator:
|
||||
legacy_columns = [
|
||||
row["name"]
|
||||
for row in self.provider.fetchall(
|
||||
f"PRAGMA {alias}.table_info({table})"
|
||||
f"PRAGMA {alias}.table_info({table})",
|
||||
)
|
||||
]
|
||||
current_columns = [
|
||||
row["name"]
|
||||
for row in self.provider.fetchall(
|
||||
f"PRAGMA table_info({table})"
|
||||
f"PRAGMA table_info({table})",
|
||||
)
|
||||
]
|
||||
|
||||
@@ -125,11 +128,11 @@ class LegacyMigrator:
|
||||
migrate_query = f"INSERT OR IGNORE INTO {table} ({cols_str}) SELECT {cols_str} FROM {alias}.{table}" # noqa: S608
|
||||
self.provider.execute(migrate_query)
|
||||
print(
|
||||
f" - Migrated table: {table} ({len(common_columns)} columns)"
|
||||
f" - Migrated table: {table} ({len(common_columns)} columns)",
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f" - Skipping table {table}: No common columns found"
|
||||
f" - Skipping table {table}: No common columns found",
|
||||
)
|
||||
except Exception as e:
|
||||
print(f" - Failed to migrate table {table}: {e}")
|
||||
|
||||
@@ -39,9 +39,9 @@ class MessageDAO:
|
||||
update_set = ", ".join([f"{f} = EXCLUDED.{f}" for f in fields if f != "hash"])
|
||||
|
||||
query = (
|
||||
f"INSERT INTO lxmf_messages ({columns}, updated_at) VALUES ({placeholders}, ?) "
|
||||
f"INSERT INTO lxmf_messages ({columns}, created_at, updated_at) VALUES ({placeholders}, ?, ?) "
|
||||
f"ON CONFLICT(hash) DO UPDATE SET {update_set}, updated_at = EXCLUDED.updated_at"
|
||||
) # noqa: S608
|
||||
)
|
||||
|
||||
params = []
|
||||
for f in fields:
|
||||
@@ -49,18 +49,23 @@ class MessageDAO:
|
||||
if f == "fields" and isinstance(val, dict):
|
||||
val = json.dumps(val)
|
||||
params.append(val)
|
||||
params.append(datetime.now(UTC).isoformat())
|
||||
|
||||
now = datetime.now(UTC).isoformat()
|
||||
params.append(now)
|
||||
params.append(now)
|
||||
|
||||
self.provider.execute(query, params)
|
||||
|
||||
def get_lxmf_message_by_hash(self, message_hash):
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM lxmf_messages WHERE hash = ?", (message_hash,)
|
||||
"SELECT * FROM lxmf_messages WHERE hash = ?",
|
||||
(message_hash,),
|
||||
)
|
||||
|
||||
def delete_lxmf_message_by_hash(self, message_hash):
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_messages WHERE hash = ?", (message_hash,)
|
||||
"DELETE FROM lxmf_messages WHERE hash = ?",
|
||||
(message_hash,),
|
||||
)
|
||||
|
||||
def get_conversation_messages(self, destination_hash, limit=100, offset=0):
|
||||
@@ -88,8 +93,14 @@ class MessageDAO:
|
||||
def mark_conversation_as_read(self, destination_hash):
|
||||
now = datetime.now(UTC).isoformat()
|
||||
self.provider.execute(
|
||||
"INSERT OR REPLACE INTO lxmf_conversation_read_state (destination_hash, last_read_at, updated_at) VALUES (?, ?, ?)",
|
||||
(destination_hash, now, now),
|
||||
"""
|
||||
INSERT INTO lxmf_conversation_read_state (destination_hash, last_read_at, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET
|
||||
last_read_at = EXCLUDED.last_read_at,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, now, now, now),
|
||||
)
|
||||
|
||||
def is_conversation_unread(self, destination_hash):
|
||||
@@ -142,7 +153,10 @@ class MessageDAO:
|
||||
|
||||
# Forwarding Mappings
|
||||
def get_forwarding_mapping(
|
||||
self, alias_hash=None, original_sender_hash=None, final_recipient_hash=None
|
||||
self,
|
||||
alias_hash=None,
|
||||
original_sender_hash=None,
|
||||
final_recipient_hash=None,
|
||||
):
|
||||
if alias_hash:
|
||||
return self.provider.fetchone(
|
||||
@@ -181,16 +195,28 @@ class MessageDAO:
|
||||
def mark_notification_as_viewed(self, destination_hash):
|
||||
now = datetime.now(UTC).isoformat()
|
||||
self.provider.execute(
|
||||
"INSERT OR REPLACE INTO notification_viewed_state (destination_hash, last_viewed_at, updated_at) VALUES (?, ?, ?)",
|
||||
(destination_hash, now, now),
|
||||
"""
|
||||
INSERT INTO notification_viewed_state (destination_hash, last_viewed_at, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET
|
||||
last_viewed_at = EXCLUDED.last_viewed_at,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, now, now, now),
|
||||
)
|
||||
|
||||
def mark_all_notifications_as_viewed(self, destination_hashes):
|
||||
now = datetime.now(UTC).isoformat()
|
||||
for destination_hash in destination_hashes:
|
||||
self.provider.execute(
|
||||
"INSERT OR REPLACE INTO notification_viewed_state (destination_hash, last_viewed_at, updated_at) VALUES (?, ?, ?)",
|
||||
(destination_hash, now, now),
|
||||
"""
|
||||
INSERT INTO notification_viewed_state (destination_hash, last_viewed_at, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET
|
||||
last_viewed_at = EXCLUDED.last_viewed_at,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, now, now, now),
|
||||
)
|
||||
|
||||
def is_notification_viewed(self, destination_hash, message_timestamp):
|
||||
|
||||
@@ -9,9 +9,10 @@ class MiscDAO:
|
||||
|
||||
# Blocked Destinations
|
||||
def add_blocked_destination(self, destination_hash):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"INSERT OR IGNORE INTO blocked_destinations (destination_hash, updated_at) VALUES (?, ?)",
|
||||
(destination_hash, datetime.now(UTC)),
|
||||
"INSERT OR IGNORE INTO blocked_destinations (destination_hash, created_at, updated_at) VALUES (?, ?, ?)",
|
||||
(destination_hash, now, now),
|
||||
)
|
||||
|
||||
def is_destination_blocked(self, destination_hash):
|
||||
@@ -34,9 +35,10 @@ class MiscDAO:
|
||||
|
||||
# Spam Keywords
|
||||
def add_spam_keyword(self, keyword):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"INSERT OR IGNORE INTO spam_keywords (keyword, updated_at) VALUES (?, ?)",
|
||||
(keyword, datetime.now(UTC)),
|
||||
"INSERT OR IGNORE INTO spam_keywords (keyword, created_at, updated_at) VALUES (?, ?, ?)",
|
||||
(keyword, now, now),
|
||||
)
|
||||
|
||||
def get_spam_keywords(self):
|
||||
@@ -55,20 +57,31 @@ class MiscDAO:
|
||||
|
||||
# User Icons
|
||||
def update_lxmf_user_icon(
|
||||
self, destination_hash, icon_name, foreground_colour, background_colour
|
||||
self,
|
||||
destination_hash,
|
||||
icon_name,
|
||||
foreground_colour,
|
||||
background_colour,
|
||||
):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO lxmf_user_icons (destination_hash, icon_name, foreground_colour, background_colour, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
INSERT INTO lxmf_user_icons (destination_hash, icon_name, foreground_colour, background_colour, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash) DO UPDATE SET
|
||||
icon_name = EXCLUDED.icon_name,
|
||||
foreground_colour = EXCLUDED.foreground_colour,
|
||||
background_colour = EXCLUDED.background_colour,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, icon_name, foreground_colour, background_colour, now),
|
||||
(
|
||||
destination_hash,
|
||||
icon_name,
|
||||
foreground_colour,
|
||||
background_colour,
|
||||
now,
|
||||
now,
|
||||
),
|
||||
)
|
||||
|
||||
def get_user_icon(self, destination_hash):
|
||||
@@ -89,23 +102,31 @@ class MiscDAO:
|
||||
return self.provider.fetchall(query, params)
|
||||
|
||||
def create_forwarding_rule(
|
||||
self, identity_hash, forward_to_hash, source_filter_hash, is_active=True
|
||||
self,
|
||||
identity_hash,
|
||||
forward_to_hash,
|
||||
source_filter_hash,
|
||||
is_active=True,
|
||||
name=None,
|
||||
):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"INSERT INTO lxmf_forwarding_rules (identity_hash, forward_to_hash, source_filter_hash, is_active, updated_at) VALUES (?, ?, ?, ?, ?)",
|
||||
"INSERT INTO lxmf_forwarding_rules (identity_hash, forward_to_hash, source_filter_hash, is_active, name, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
identity_hash,
|
||||
forward_to_hash,
|
||||
source_filter_hash,
|
||||
1 if is_active else 0,
|
||||
name,
|
||||
now,
|
||||
now,
|
||||
),
|
||||
)
|
||||
|
||||
def delete_forwarding_rule(self, rule_id):
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_forwarding_rules WHERE id = ?", (rule_id,)
|
||||
"DELETE FROM lxmf_forwarding_rules WHERE id = ?",
|
||||
(rule_id,),
|
||||
)
|
||||
|
||||
def toggle_forwarding_rule(self, rule_id):
|
||||
@@ -116,9 +137,10 @@ class MiscDAO:
|
||||
|
||||
# Archived Pages
|
||||
def archive_page(self, destination_hash, page_path, content, page_hash):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"INSERT INTO archived_pages (destination_hash, page_path, content, hash) VALUES (?, ?, ?, ?)",
|
||||
(destination_hash, page_path, content, page_hash),
|
||||
"INSERT INTO archived_pages (destination_hash, page_path, content, hash, created_at) VALUES (?, ?, ?, ?, ?)",
|
||||
(destination_hash, page_path, content, page_hash, now),
|
||||
)
|
||||
|
||||
def get_archived_page_versions(self, destination_hash, page_path):
|
||||
@@ -154,22 +176,27 @@ class MiscDAO:
|
||||
|
||||
# Crawl Tasks
|
||||
def upsert_crawl_task(
|
||||
self, destination_hash, page_path, status="pending", retry_count=0
|
||||
self,
|
||||
destination_hash,
|
||||
page_path,
|
||||
status="pending",
|
||||
retry_count=0,
|
||||
):
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO crawl_tasks (destination_hash, page_path, status, retry_count)
|
||||
VALUES (?, ?, ?, ?)
|
||||
INSERT INTO crawl_tasks (destination_hash, page_path, status, retry_count, created_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash, page_path) DO UPDATE SET
|
||||
status = EXCLUDED.status,
|
||||
retry_count = EXCLUDED.retry_count
|
||||
""",
|
||||
(destination_hash, page_path, status, retry_count),
|
||||
(destination_hash, page_path, status, retry_count, now),
|
||||
)
|
||||
|
||||
def get_pending_crawl_tasks(self):
|
||||
return self.provider.fetchall(
|
||||
"SELECT * FROM crawl_tasks WHERE status = 'pending'"
|
||||
"SELECT * FROM crawl_tasks WHERE status = 'pending'",
|
||||
)
|
||||
|
||||
def update_crawl_task(self, task_id, **kwargs):
|
||||
@@ -199,5 +226,6 @@ class MiscDAO:
|
||||
|
||||
def get_archived_page_by_id(self, archive_id):
|
||||
return self.provider.fetchone(
|
||||
"SELECT * FROM archived_pages WHERE id = ?", (archive_id,)
|
||||
"SELECT * FROM archived_pages WHERE id = ?",
|
||||
(archive_id,),
|
||||
)
|
||||
|
||||
@@ -24,7 +24,8 @@ class DatabaseProvider:
|
||||
def connection(self):
|
||||
if not hasattr(self._local, "connection"):
|
||||
self._local.connection = sqlite3.connect(
|
||||
self.db_path, check_same_thread=False
|
||||
self.db_path,
|
||||
check_same_thread=False,
|
||||
)
|
||||
self._local.connection.row_factory = sqlite3.Row
|
||||
# Enable WAL mode for better concurrency
|
||||
|
||||
@@ -2,7 +2,7 @@ from .provider import DatabaseProvider
|
||||
|
||||
|
||||
class DatabaseSchema:
|
||||
LATEST_VERSION = 15
|
||||
LATEST_VERSION = 16
|
||||
|
||||
def __init__(self, provider: DatabaseProvider):
|
||||
self.provider = provider
|
||||
@@ -17,7 +17,8 @@ class DatabaseSchema:
|
||||
|
||||
def _get_current_version(self):
|
||||
row = self.provider.fetchone(
|
||||
"SELECT value FROM config WHERE key = ?", ("database_version",)
|
||||
"SELECT value FROM config WHERE key = ?",
|
||||
("database_version",),
|
||||
)
|
||||
if row:
|
||||
return int(row["value"])
|
||||
@@ -160,6 +161,7 @@ class DatabaseSchema:
|
||||
"lxmf_forwarding_rules": """
|
||||
CREATE TABLE IF NOT EXISTS lxmf_forwarding_rules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
identity_hash TEXT,
|
||||
forward_to_hash TEXT,
|
||||
source_filter_hash TEXT,
|
||||
@@ -232,42 +234,42 @@ class DatabaseSchema:
|
||||
# Create indexes that were present
|
||||
if table_name == "announces":
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_announces_aspect ON announces(aspect)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_announces_identity_hash ON announces(identity_hash)",
|
||||
)
|
||||
elif table_name == "lxmf_messages":
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_source_hash ON lxmf_messages(source_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_messages_destination_hash ON lxmf_messages(destination_hash)",
|
||||
)
|
||||
elif table_name == "blocked_destinations":
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_blocked_destinations_hash ON blocked_destinations(destination_hash)",
|
||||
)
|
||||
elif table_name == "spam_keywords":
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_spam_keywords_keyword ON spam_keywords(keyword)",
|
||||
)
|
||||
elif table_name == "notification_viewed_state":
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)",
|
||||
)
|
||||
elif table_name == "lxmf_telemetry":
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)",
|
||||
)
|
||||
|
||||
def migrate(self, current_version):
|
||||
@@ -283,13 +285,13 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_destination_hash ON archived_pages(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_page_path ON archived_pages(page_path)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_archived_pages_hash ON archived_pages(hash)",
|
||||
)
|
||||
|
||||
if current_version < 8:
|
||||
@@ -306,16 +308,17 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_destination_hash ON crawl_tasks(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_crawl_tasks_page_path ON crawl_tasks(page_path)",
|
||||
)
|
||||
|
||||
if current_version < 9:
|
||||
self.provider.execute("""
|
||||
CREATE TABLE IF NOT EXISTS lxmf_forwarding_rules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
identity_hash TEXT,
|
||||
forward_to_hash TEXT,
|
||||
source_filter_hash TEXT,
|
||||
@@ -325,7 +328,7 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_rules_identity_hash ON lxmf_forwarding_rules(identity_hash)",
|
||||
)
|
||||
|
||||
self.provider.execute("""
|
||||
@@ -340,13 +343,13 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_alias_hash ON lxmf_forwarding_mappings(alias_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_sender_hash ON lxmf_forwarding_mappings(original_sender_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_forwarding_mappings_recipient_hash ON lxmf_forwarding_mappings(final_recipient_hash)",
|
||||
)
|
||||
|
||||
if current_version < 10:
|
||||
@@ -356,54 +359,54 @@ class DatabaseSchema:
|
||||
|
||||
# Clean up duplicates before adding unique indexes
|
||||
self.provider.execute(
|
||||
"DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)"
|
||||
"DELETE FROM announces WHERE id NOT IN (SELECT MAX(id) FROM announces GROUP BY destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)"
|
||||
"DELETE FROM crawl_tasks WHERE id NOT IN (SELECT MAX(id) FROM crawl_tasks GROUP BY destination_hash, page_path)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)"
|
||||
"DELETE FROM custom_destination_display_names WHERE id NOT IN (SELECT MAX(id) FROM custom_destination_display_names GROUP BY destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)"
|
||||
"DELETE FROM favourite_destinations WHERE id NOT IN (SELECT MAX(id) FROM favourite_destinations GROUP BY destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)"
|
||||
"DELETE FROM lxmf_user_icons WHERE id NOT IN (SELECT MAX(id) FROM lxmf_user_icons GROUP BY destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)"
|
||||
"DELETE FROM lxmf_conversation_read_state WHERE id NOT IN (SELECT MAX(id) FROM lxmf_conversation_read_state GROUP BY destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)"
|
||||
"DELETE FROM lxmf_messages WHERE id NOT IN (SELECT MAX(id) FROM lxmf_messages GROUP BY hash)",
|
||||
)
|
||||
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_announces_destination_hash_unique ON announces(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_crawl_tasks_destination_path_unique ON crawl_tasks(destination_hash, page_path)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_custom_display_names_dest_hash_unique ON custom_destination_display_names(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_favourite_destinations_dest_hash_unique ON favourite_destinations(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_messages_hash_unique ON lxmf_messages(hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_user_icons_dest_hash_unique ON lxmf_user_icons(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_conversation_read_state_dest_hash_unique ON lxmf_conversation_read_state(destination_hash)",
|
||||
)
|
||||
|
||||
if current_version < 11:
|
||||
# Add is_spam column to lxmf_messages if it doesn't exist
|
||||
try:
|
||||
self.provider.execute(
|
||||
"ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0"
|
||||
"ALTER TABLE lxmf_messages ADD COLUMN is_spam INTEGER DEFAULT 0",
|
||||
)
|
||||
except Exception:
|
||||
# Column might already exist if table was created with newest schema
|
||||
@@ -423,10 +426,10 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_call_history_remote_hash ON call_history(remote_identity_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_call_history_timestamp ON call_history(timestamp)",
|
||||
)
|
||||
|
||||
if current_version < 13:
|
||||
@@ -443,10 +446,10 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_voicemails_remote_hash ON voicemails(remote_identity_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_voicemails_remote_hash ON voicemails(remote_identity_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_voicemails_timestamp ON voicemails(timestamp)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_voicemails_timestamp ON voicemails(timestamp)",
|
||||
)
|
||||
|
||||
if current_version < 14:
|
||||
@@ -460,10 +463,10 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_notification_viewed_state_destination_hash ON notification_viewed_state(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_notification_viewed_state_dest_hash_unique ON notification_viewed_state(destination_hash)",
|
||||
)
|
||||
|
||||
if current_version < 15:
|
||||
@@ -481,17 +484,31 @@ class DatabaseSchema:
|
||||
)
|
||||
""")
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_destination_hash ON lxmf_telemetry(destination_hash)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)"
|
||||
"CREATE INDEX IF NOT EXISTS idx_lxmf_telemetry_timestamp ON lxmf_telemetry(timestamp)",
|
||||
)
|
||||
self.provider.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)"
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS idx_lxmf_telemetry_dest_ts_unique ON lxmf_telemetry(destination_hash, timestamp)",
|
||||
)
|
||||
|
||||
if current_version < 16:
|
||||
try:
|
||||
self.provider.execute(
|
||||
"ALTER TABLE lxmf_forwarding_rules ADD COLUMN name TEXT",
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Update version in config
|
||||
self.provider.execute(
|
||||
"INSERT OR REPLACE INTO config (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)",
|
||||
"""
|
||||
INSERT INTO config (key, value, created_at, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
value = EXCLUDED.value,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
("database_version", str(self.LATEST_VERSION)),
|
||||
)
|
||||
|
||||
@@ -8,24 +8,26 @@ class TelemetryDAO:
|
||||
def __init__(self, provider: DatabaseProvider):
|
||||
self.provider = provider
|
||||
|
||||
def upsert_telemetry(self, destination_hash, timestamp, data, received_from=None, physical_link=None):
|
||||
def upsert_telemetry(
|
||||
self, destination_hash, timestamp, data, received_from=None, physical_link=None
|
||||
):
|
||||
now = datetime.now(UTC).isoformat()
|
||||
|
||||
|
||||
# If physical_link is a dict, convert to json
|
||||
if isinstance(physical_link, dict):
|
||||
physical_link = json.dumps(physical_link)
|
||||
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO lxmf_telemetry (destination_hash, timestamp, data, received_from, physical_link, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
INSERT INTO lxmf_telemetry (destination_hash, timestamp, data, received_from, physical_link, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(destination_hash, timestamp) DO UPDATE SET
|
||||
data = EXCLUDED.data,
|
||||
received_from = EXCLUDED.received_from,
|
||||
physical_link = EXCLUDED.physical_link,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
""",
|
||||
(destination_hash, timestamp, data, received_from, physical_link, now),
|
||||
(destination_hash, timestamp, data, received_from, physical_link, now, now),
|
||||
)
|
||||
|
||||
def get_latest_telemetry(self, destination_hash):
|
||||
@@ -58,4 +60,3 @@ class TelemetryDAO:
|
||||
"DELETE FROM lxmf_telemetry WHERE destination_hash = ?",
|
||||
(destination_hash,),
|
||||
)
|
||||
|
||||
|
||||
@@ -14,6 +14,9 @@ class TelephoneDAO:
|
||||
duration_seconds,
|
||||
timestamp,
|
||||
):
|
||||
from datetime import UTC, datetime
|
||||
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO call_history (
|
||||
@@ -22,8 +25,9 @@ class TelephoneDAO:
|
||||
is_incoming,
|
||||
status,
|
||||
duration_seconds,
|
||||
timestamp
|
||||
) VALUES (?, ?, ?, ?, ?, ?)
|
||||
timestamp,
|
||||
created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
remote_identity_hash,
|
||||
@@ -32,6 +36,7 @@ class TelephoneDAO:
|
||||
status,
|
||||
duration_seconds,
|
||||
timestamp,
|
||||
now,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -13,6 +13,9 @@ class VoicemailDAO:
|
||||
duration_seconds,
|
||||
timestamp,
|
||||
):
|
||||
from datetime import UTC, datetime
|
||||
|
||||
now = datetime.now(UTC)
|
||||
self.provider.execute(
|
||||
"""
|
||||
INSERT INTO voicemails (
|
||||
@@ -20,8 +23,9 @@ class VoicemailDAO:
|
||||
remote_identity_name,
|
||||
filename,
|
||||
duration_seconds,
|
||||
timestamp
|
||||
) VALUES (?, ?, ?, ?, ?)
|
||||
timestamp,
|
||||
created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
remote_identity_hash,
|
||||
@@ -29,6 +33,7 @@ class VoicemailDAO:
|
||||
filename,
|
||||
duration_seconds,
|
||||
timestamp,
|
||||
now,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -58,6 +63,6 @@ class VoicemailDAO:
|
||||
|
||||
def get_unread_count(self):
|
||||
row = self.provider.fetchone(
|
||||
"SELECT COUNT(*) as count FROM voicemails WHERE is_read = 0"
|
||||
"SELECT COUNT(*) as count FROM voicemails WHERE is_read = 0",
|
||||
)
|
||||
return row["count"] if row else 0
|
||||
|
||||
@@ -21,19 +21,22 @@ class ForwardingManager:
|
||||
for mapping in mappings:
|
||||
try:
|
||||
private_key_bytes = base64.b64decode(
|
||||
mapping["alias_identity_private_key"]
|
||||
mapping["alias_identity_private_key"],
|
||||
)
|
||||
alias_identity = RNS.Identity.from_bytes(private_key_bytes)
|
||||
alias_hash = mapping["alias_hash"]
|
||||
|
||||
# create temp router for this alias
|
||||
router_storage_path = os.path.join(
|
||||
self.storage_path, "forwarding", alias_hash
|
||||
self.storage_path,
|
||||
"forwarding",
|
||||
alias_hash,
|
||||
)
|
||||
os.makedirs(router_storage_path, exist_ok=True)
|
||||
|
||||
router = LXMF.LXMRouter(
|
||||
identity=alias_identity, storagepath=router_storage_path
|
||||
identity=alias_identity,
|
||||
storagepath=router_storage_path,
|
||||
)
|
||||
router.PROCESSING_INTERVAL = 1
|
||||
if self.config:
|
||||
@@ -44,7 +47,7 @@ class ForwardingManager:
|
||||
router.register_delivery_callback(self.delivery_callback)
|
||||
|
||||
alias_destination = router.register_delivery_identity(
|
||||
identity=alias_identity
|
||||
identity=alias_identity,
|
||||
)
|
||||
|
||||
self.forwarding_destinations[alias_hash] = alias_destination
|
||||
@@ -54,7 +57,10 @@ class ForwardingManager:
|
||||
print(f"Failed to load forwarding alias {mapping['alias_hash']}: {e}")
|
||||
|
||||
def get_or_create_mapping(
|
||||
self, source_hash, final_recipient_hash, original_destination_hash
|
||||
self,
|
||||
source_hash,
|
||||
final_recipient_hash,
|
||||
original_destination_hash,
|
||||
):
|
||||
mapping = self.db.messages.get_forwarding_mapping(
|
||||
original_sender_hash=source_hash,
|
||||
@@ -67,12 +73,15 @@ class ForwardingManager:
|
||||
|
||||
# create temp router for this alias
|
||||
router_storage_path = os.path.join(
|
||||
self.storage_path, "forwarding", alias_hash
|
||||
self.storage_path,
|
||||
"forwarding",
|
||||
alias_hash,
|
||||
)
|
||||
os.makedirs(router_storage_path, exist_ok=True)
|
||||
|
||||
router = LXMF.LXMRouter(
|
||||
identity=alias_identity, storagepath=router_storage_path
|
||||
identity=alias_identity,
|
||||
storagepath=router_storage_path,
|
||||
)
|
||||
router.PROCESSING_INTERVAL = 1
|
||||
if self.config:
|
||||
@@ -83,7 +92,7 @@ class ForwardingManager:
|
||||
router.register_delivery_callback(self.delivery_callback)
|
||||
|
||||
alias_destination = router.register_delivery_identity(
|
||||
identity=alias_identity
|
||||
identity=alias_identity,
|
||||
)
|
||||
|
||||
self.forwarding_destinations[alias_hash] = alias_destination
|
||||
@@ -91,7 +100,7 @@ class ForwardingManager:
|
||||
|
||||
data = {
|
||||
"alias_identity_private_key": base64.b64encode(
|
||||
alias_identity.get_private_key()
|
||||
alias_identity.get_private_key(),
|
||||
).decode(),
|
||||
"alias_hash": alias_hash,
|
||||
"original_sender_hash": source_hash,
|
||||
|
||||
@@ -62,7 +62,7 @@ class MapManager:
|
||||
"size": stats.st_size,
|
||||
"mtime": stats.st_mtime,
|
||||
"is_active": full_path == self.get_offline_path(),
|
||||
}
|
||||
},
|
||||
)
|
||||
return sorted(files, key=lambda x: x["mtime"], reverse=True)
|
||||
|
||||
@@ -182,10 +182,10 @@ class MapManager:
|
||||
# create schema
|
||||
cursor.execute("CREATE TABLE metadata (name text, value text)")
|
||||
cursor.execute(
|
||||
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)"
|
||||
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob)",
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)"
|
||||
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row)",
|
||||
)
|
||||
|
||||
# insert metadata
|
||||
@@ -235,7 +235,7 @@ class MapManager:
|
||||
current_count += 1
|
||||
self._export_progress[export_id]["current"] = current_count
|
||||
self._export_progress[export_id]["progress"] = int(
|
||||
(current_count / total_tiles) * 100
|
||||
(current_count / total_tiles) * 100,
|
||||
)
|
||||
|
||||
# commit after each zoom level
|
||||
@@ -259,7 +259,7 @@ class MapManager:
|
||||
y = int(
|
||||
(1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi)
|
||||
/ 2.0
|
||||
* n
|
||||
* n,
|
||||
)
|
||||
return x, y
|
||||
|
||||
|
||||
@@ -40,7 +40,8 @@ class MessageHandler:
|
||||
OR (destination_hash = ? AND source_hash = ?))
|
||||
"""
|
||||
self.db.provider.execute(
|
||||
query, [local_hash, destination_hash, local_hash, destination_hash]
|
||||
query,
|
||||
[local_hash, destination_hash, local_hash, destination_hash],
|
||||
)
|
||||
|
||||
def search_messages(self, local_hash, search_term):
|
||||
|
||||
@@ -53,7 +53,7 @@ class RNCPHandler:
|
||||
)
|
||||
|
||||
self.receive_destination.set_link_established_callback(
|
||||
self._client_link_established
|
||||
self._client_link_established,
|
||||
)
|
||||
|
||||
if fetch_allowed:
|
||||
@@ -97,7 +97,7 @@ class RNCPHandler:
|
||||
if resource.metadata:
|
||||
try:
|
||||
filename = os.path.basename(
|
||||
resource.metadata["name"].decode("utf-8")
|
||||
resource.metadata["name"].decode("utf-8"),
|
||||
)
|
||||
save_dir = os.path.join(self.storage_dir, "rncp_received")
|
||||
os.makedirs(save_dir, exist_ok=True)
|
||||
@@ -118,7 +118,8 @@ class RNCPHandler:
|
||||
counter += 1
|
||||
base, ext = os.path.splitext(filename)
|
||||
saved_filename = os.path.join(
|
||||
save_dir, f"{base}.{counter}{ext}"
|
||||
save_dir,
|
||||
f"{base}.{counter}{ext}",
|
||||
)
|
||||
|
||||
shutil.move(resource.data.name, saved_filename)
|
||||
@@ -137,7 +138,13 @@ class RNCPHandler:
|
||||
self.active_transfers[transfer_id]["status"] = "failed"
|
||||
|
||||
def _fetch_request(
|
||||
self, path, data, request_id, link_id, remote_identity, requested_at
|
||||
self,
|
||||
path,
|
||||
data,
|
||||
request_id,
|
||||
link_id,
|
||||
remote_identity,
|
||||
requested_at,
|
||||
):
|
||||
if self.fetch_jail:
|
||||
if data.startswith(self.fetch_jail + "/"):
|
||||
@@ -349,7 +356,7 @@ class RNCPHandler:
|
||||
if resource.metadata:
|
||||
try:
|
||||
filename = os.path.basename(
|
||||
resource.metadata["name"].decode("utf-8")
|
||||
resource.metadata["name"].decode("utf-8"),
|
||||
)
|
||||
if save_path:
|
||||
save_dir = os.path.abspath(os.path.expanduser(save_path))
|
||||
|
||||
@@ -109,13 +109,13 @@ class RNProbeHandler:
|
||||
reception_stats = {}
|
||||
if self.reticulum.is_connected_to_shared_instance:
|
||||
reception_rssi = self.reticulum.get_packet_rssi(
|
||||
receipt.proof_packet.packet_hash
|
||||
receipt.proof_packet.packet_hash,
|
||||
)
|
||||
reception_snr = self.reticulum.get_packet_snr(
|
||||
receipt.proof_packet.packet_hash
|
||||
receipt.proof_packet.packet_hash,
|
||||
)
|
||||
reception_q = self.reticulum.get_packet_q(
|
||||
receipt.proof_packet.packet_hash
|
||||
receipt.proof_packet.packet_hash,
|
||||
)
|
||||
|
||||
if reception_rssi is not None:
|
||||
|
||||
@@ -59,23 +59,28 @@ class RNStatusHandler:
|
||||
sorting = sorting.lower()
|
||||
if sorting in ("rate", "bitrate"):
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("bitrate", 0) or 0, reverse=sort_reverse
|
||||
key=lambda i: i.get("bitrate", 0) or 0,
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
elif sorting == "rx":
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("rxb", 0) or 0, reverse=sort_reverse
|
||||
key=lambda i: i.get("rxb", 0) or 0,
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
elif sorting == "tx":
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("txb", 0) or 0, reverse=sort_reverse
|
||||
key=lambda i: i.get("txb", 0) or 0,
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
elif sorting == "rxs":
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("rxs", 0) or 0, reverse=sort_reverse
|
||||
key=lambda i: i.get("rxs", 0) or 0,
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
elif sorting == "txs":
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("txs", 0) or 0, reverse=sort_reverse
|
||||
key=lambda i: i.get("txs", 0) or 0,
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
elif sorting == "traffic":
|
||||
interfaces.sort(
|
||||
@@ -100,7 +105,8 @@ class RNStatusHandler:
|
||||
)
|
||||
elif sorting == "held":
|
||||
interfaces.sort(
|
||||
key=lambda i: i.get("held_announces", 0) or 0, reverse=sort_reverse
|
||||
key=lambda i: i.get("held_announces", 0) or 0,
|
||||
reverse=sort_reverse,
|
||||
)
|
||||
|
||||
formatted_interfaces = []
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
import struct
|
||||
import time
|
||||
import RNS.vendor.umsgpack as umsgpack
|
||||
|
||||
from RNS.vendor import umsgpack
|
||||
|
||||
|
||||
class Sensor:
|
||||
SID_NONE = 0x00
|
||||
SID_TIME = 0x01
|
||||
SID_LOCATION = 0x02
|
||||
SID_PRESSURE = 0x03
|
||||
SID_BATTERY = 0x04
|
||||
SID_PHYSICAL_LINK = 0x05
|
||||
SID_ACCELERATION = 0x06
|
||||
SID_TEMPERATURE = 0x07
|
||||
SID_HUMIDITY = 0x08
|
||||
SID_MAGNETIC_FIELD = 0x09
|
||||
SID_AMBIENT_LIGHT = 0x0A
|
||||
SID_GRAVITY = 0x0B
|
||||
SID_ANGULAR_VELOCITY = 0x0C
|
||||
SID_PROXIMITY = 0x0E
|
||||
SID_INFORMATION = 0x0F
|
||||
SID_RECEIVED = 0x10
|
||||
SID_NONE = 0x00
|
||||
SID_TIME = 0x01
|
||||
SID_LOCATION = 0x02
|
||||
SID_PRESSURE = 0x03
|
||||
SID_BATTERY = 0x04
|
||||
SID_PHYSICAL_LINK = 0x05
|
||||
SID_ACCELERATION = 0x06
|
||||
SID_TEMPERATURE = 0x07
|
||||
SID_HUMIDITY = 0x08
|
||||
SID_MAGNETIC_FIELD = 0x09
|
||||
SID_AMBIENT_LIGHT = 0x0A
|
||||
SID_GRAVITY = 0x0B
|
||||
SID_ANGULAR_VELOCITY = 0x0C
|
||||
SID_PROXIMITY = 0x0E
|
||||
SID_INFORMATION = 0x0F
|
||||
SID_RECEIVED = 0x10
|
||||
SID_POWER_CONSUMPTION = 0x11
|
||||
SID_POWER_PRODUCTION = 0x12
|
||||
SID_PROCESSOR = 0x13
|
||||
SID_RAM = 0x14
|
||||
SID_NVM = 0x15
|
||||
SID_TANK = 0x16
|
||||
SID_FUEL = 0x17
|
||||
SID_RNS_TRANSPORT = 0x19
|
||||
SID_LXMF_PROPAGATION = 0x18
|
||||
SID_CONNECTION_MAP = 0x1A
|
||||
SID_CUSTOM = 0xff
|
||||
SID_POWER_PRODUCTION = 0x12
|
||||
SID_PROCESSOR = 0x13
|
||||
SID_RAM = 0x14
|
||||
SID_NVM = 0x15
|
||||
SID_TANK = 0x16
|
||||
SID_FUEL = 0x17
|
||||
SID_RNS_TRANSPORT = 0x19
|
||||
SID_LXMF_PROPAGATION = 0x18
|
||||
SID_CONNECTION_MAP = 0x1A
|
||||
SID_CUSTOM = 0xFF
|
||||
|
||||
|
||||
class Telemeter:
|
||||
@staticmethod
|
||||
@@ -38,27 +41,35 @@ class Telemeter:
|
||||
if packed is None:
|
||||
return None
|
||||
return {
|
||||
"latitude": struct.unpack("!i", packed[0])[0]/1e6,
|
||||
"longitude": struct.unpack("!i", packed[1])[0]/1e6,
|
||||
"altitude": struct.unpack("!i", packed[2])[0]/1e2,
|
||||
"speed": struct.unpack("!I", packed[3])[0]/1e2,
|
||||
"bearing": struct.unpack("!i", packed[4])[0]/1e2,
|
||||
"accuracy": struct.unpack("!H", packed[5])[0]/1e2,
|
||||
"latitude": struct.unpack("!i", packed[0])[0] / 1e6,
|
||||
"longitude": struct.unpack("!i", packed[1])[0] / 1e6,
|
||||
"altitude": struct.unpack("!i", packed[2])[0] / 1e2,
|
||||
"speed": struct.unpack("!I", packed[3])[0] / 1e2,
|
||||
"bearing": struct.unpack("!i", packed[4])[0] / 1e2,
|
||||
"accuracy": struct.unpack("!H", packed[5])[0] / 1e2,
|
||||
"last_update": packed[6],
|
||||
}
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def pack_location(latitude, longitude, altitude=0, speed=0, bearing=0, accuracy=0, last_update=None):
|
||||
def pack_location(
|
||||
latitude,
|
||||
longitude,
|
||||
altitude=0,
|
||||
speed=0,
|
||||
bearing=0,
|
||||
accuracy=0,
|
||||
last_update=None,
|
||||
):
|
||||
try:
|
||||
return [
|
||||
struct.pack("!i", int(round(latitude, 6)*1e6)),
|
||||
struct.pack("!i", int(round(longitude, 6)*1e6)),
|
||||
struct.pack("!i", int(round(altitude, 2)*1e2)),
|
||||
struct.pack("!I", int(round(speed, 2)*1e2)),
|
||||
struct.pack("!i", int(round(bearing, 2)*1e2)),
|
||||
struct.pack("!H", int(round(accuracy, 2)*1e2)),
|
||||
struct.pack("!i", int(round(latitude, 6) * 1e6)),
|
||||
struct.pack("!i", int(round(longitude, 6) * 1e6)),
|
||||
struct.pack("!i", int(round(altitude, 2) * 1e2)),
|
||||
struct.pack("!I", int(round(speed, 2) * 1e2)),
|
||||
struct.pack("!i", int(round(bearing, 2) * 1e2)),
|
||||
struct.pack("!H", int(round(accuracy, 2) * 1e2)),
|
||||
int(last_update or time.time()),
|
||||
]
|
||||
except Exception:
|
||||
@@ -85,4 +96,3 @@ class Telemeter:
|
||||
if location:
|
||||
p[Sensor.SID_LOCATION] = Telemeter.pack_location(**location)
|
||||
return umsgpack.packb(p)
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ class TelephoneManager:
|
||||
if destination_identity is None:
|
||||
# If not found by identity hash, try as destination hash
|
||||
destination_identity = RNS.Identity.recall(
|
||||
destination_hash
|
||||
destination_hash,
|
||||
) # Identity.recall takes identity hash
|
||||
|
||||
if destination_identity is None:
|
||||
|
||||
@@ -66,7 +66,8 @@ LANGUAGE_CODE_TO_NAME = {
|
||||
class TranslatorHandler:
|
||||
def __init__(self, libretranslate_url: str | None = None):
|
||||
self.libretranslate_url = libretranslate_url or os.getenv(
|
||||
"LIBRETRANSLATE_URL", "http://localhost:5000"
|
||||
"LIBRETRANSLATE_URL",
|
||||
"http://localhost:5000",
|
||||
)
|
||||
self.has_argos = HAS_ARGOS
|
||||
self.has_argos_lib = HAS_ARGOS_LIB
|
||||
@@ -188,14 +189,18 @@ class TranslatorHandler:
|
||||
return {
|
||||
"translated_text": result.get("translatedText", ""),
|
||||
"source_lang": result.get("detectedLanguage", {}).get(
|
||||
"language", source_lang
|
||||
"language",
|
||||
source_lang,
|
||||
),
|
||||
"target_lang": target_lang,
|
||||
"source": "libretranslate",
|
||||
}
|
||||
|
||||
def _translate_argos(
|
||||
self, text: str, source_lang: str, target_lang: str
|
||||
self,
|
||||
text: str,
|
||||
source_lang: str,
|
||||
target_lang: str,
|
||||
) -> dict[str, Any]:
|
||||
if source_lang == "auto":
|
||||
if self.has_argos_lib:
|
||||
@@ -220,7 +225,10 @@ class TranslatorHandler:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def _translate_argos_lib(
|
||||
self, text: str, source_lang: str, target_lang: str
|
||||
self,
|
||||
text: str,
|
||||
source_lang: str,
|
||||
target_lang: str,
|
||||
) -> dict[str, Any]:
|
||||
try:
|
||||
installed_packages = package.get_installed_packages()
|
||||
@@ -250,7 +258,10 @@ class TranslatorHandler:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def _translate_argos_cli(
|
||||
self, text: str, source_lang: str, target_lang: str
|
||||
self,
|
||||
text: str,
|
||||
source_lang: str,
|
||||
target_lang: str,
|
||||
) -> dict[str, Any]:
|
||||
if source_lang == "auto" or not source_lang:
|
||||
msg = "Auto-detection is not supported with CLI. Please select a source language manually."
|
||||
@@ -368,7 +379,8 @@ class TranslatorHandler:
|
||||
return languages
|
||||
|
||||
def install_language_package(
|
||||
self, package_name: str = "translate"
|
||||
self,
|
||||
package_name: str = "translate",
|
||||
) -> dict[str, Any]:
|
||||
argospm = shutil.which("argospm")
|
||||
if not argospm:
|
||||
|
||||
@@ -2,6 +2,7 @@ import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
@@ -53,9 +54,34 @@ class VoicemailManager:
|
||||
|
||||
def get_name_for_identity_hash(self, identity_hash):
|
||||
"""Default implementation, should be patched by ReticulumMeshChat"""
|
||||
return
|
||||
|
||||
def _find_bundled_binary(self, name):
|
||||
if getattr(sys, "frozen", False):
|
||||
exe_dir = os.path.dirname(sys.executable)
|
||||
# Try in bin/ subdirectory of the executable
|
||||
local_bin = os.path.join(exe_dir, "bin", name)
|
||||
if platform.system() == "Windows":
|
||||
local_bin += ".exe"
|
||||
if os.path.exists(local_bin):
|
||||
return local_bin
|
||||
# Try in executable directory itself
|
||||
local_bin = os.path.join(exe_dir, name)
|
||||
if platform.system() == "Windows":
|
||||
local_bin += ".exe"
|
||||
if os.path.exists(local_bin):
|
||||
return local_bin
|
||||
return None
|
||||
|
||||
def _find_espeak(self):
|
||||
# Try bundled first
|
||||
bundled = self._find_bundled_binary("espeak-ng")
|
||||
if bundled:
|
||||
return bundled
|
||||
bundled = self._find_bundled_binary("espeak")
|
||||
if bundled:
|
||||
return bundled
|
||||
|
||||
# Try standard name first
|
||||
path = shutil.which("espeak-ng")
|
||||
if path:
|
||||
@@ -80,6 +106,11 @@ class VoicemailManager:
|
||||
return None
|
||||
|
||||
def _find_ffmpeg(self):
|
||||
# Try bundled first
|
||||
bundled = self._find_bundled_binary("ffmpeg")
|
||||
if bundled:
|
||||
return bundled
|
||||
|
||||
path = shutil.which("ffmpeg")
|
||||
if path:
|
||||
return path
|
||||
@@ -149,7 +180,10 @@ class VoicemailManager:
|
||||
return True
|
||||
|
||||
def handle_incoming_call(self, caller_identity):
|
||||
RNS.log(f"Voicemail: handle_incoming_call from {RNS.prettyhexrep(caller_identity.hash)}", RNS.LOG_DEBUG)
|
||||
RNS.log(
|
||||
f"Voicemail: handle_incoming_call from {RNS.prettyhexrep(caller_identity.hash)}",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
if not self.config.voicemail_enabled.get():
|
||||
RNS.log("Voicemail: Voicemail is disabled", RNS.LOG_DEBUG)
|
||||
return
|
||||
@@ -158,7 +192,10 @@ class VoicemailManager:
|
||||
RNS.log(f"Voicemail: Will auto-answer in {delay} seconds", RNS.LOG_DEBUG)
|
||||
|
||||
def voicemail_job():
|
||||
RNS.log(f"Voicemail: Auto-answer timer started for {RNS.prettyhexrep(caller_identity.hash)}", RNS.LOG_DEBUG)
|
||||
RNS.log(
|
||||
f"Voicemail: Auto-answer timer started for {RNS.prettyhexrep(caller_identity.hash)}",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
time.sleep(delay)
|
||||
|
||||
# Check if still ringing and no other active call
|
||||
@@ -167,12 +204,16 @@ class VoicemailManager:
|
||||
RNS.log("Voicemail: No telephone object", RNS.LOG_ERROR)
|
||||
return
|
||||
|
||||
RNS.log(f"Voicemail: Checking status. Call status: {telephone.call_status}, Active call: {telephone.active_call}", RNS.LOG_DEBUG)
|
||||
|
||||
RNS.log(
|
||||
f"Voicemail: Checking status. Call status: {telephone.call_status}, Active call: {telephone.active_call}",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
|
||||
if (
|
||||
telephone
|
||||
and telephone.active_call
|
||||
and telephone.active_call.get_remote_identity().hash == caller_identity.hash
|
||||
and telephone.active_call.get_remote_identity().hash
|
||||
== caller_identity.hash
|
||||
and telephone.call_status == LXST.Signalling.STATUS_RINGING
|
||||
):
|
||||
RNS.log(
|
||||
@@ -181,9 +222,15 @@ class VoicemailManager:
|
||||
)
|
||||
self.start_voicemail_session(caller_identity)
|
||||
else:
|
||||
RNS.log("Voicemail: Auto-answer conditions not met after delay", RNS.LOG_DEBUG)
|
||||
RNS.log(
|
||||
"Voicemail: Auto-answer conditions not met after delay",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
if telephone.active_call:
|
||||
RNS.log(f"Voicemail: Active call remote: {RNS.prettyhexrep(telephone.active_call.get_remote_identity().hash)}", RNS.LOG_DEBUG)
|
||||
RNS.log(
|
||||
f"Voicemail: Active call remote: {RNS.prettyhexrep(telephone.active_call.get_remote_identity().hash)}",
|
||||
RNS.LOG_DEBUG,
|
||||
)
|
||||
|
||||
threading.Thread(target=voicemail_job, daemon=True).start()
|
||||
|
||||
@@ -209,20 +256,28 @@ class VoicemailManager:
|
||||
def session_job():
|
||||
try:
|
||||
# 1. Play greeting
|
||||
greeting_source = OpusFileSource(greeting_path, target_frame_ms=60)
|
||||
# Attach to transmit mixer
|
||||
greeting_pipeline = Pipeline(
|
||||
source=greeting_source, codec=Null(), sink=telephone.transmit_mixer
|
||||
)
|
||||
greeting_pipeline.start()
|
||||
try:
|
||||
greeting_source = OpusFileSource(greeting_path, target_frame_ms=60)
|
||||
# Attach to transmit mixer
|
||||
greeting_pipeline = Pipeline(
|
||||
source=greeting_source,
|
||||
codec=Null(),
|
||||
sink=telephone.transmit_mixer,
|
||||
)
|
||||
greeting_pipeline.start()
|
||||
|
||||
# Wait for greeting to finish
|
||||
while greeting_source.running:
|
||||
time.sleep(0.1)
|
||||
if not telephone.active_call:
|
||||
return
|
||||
# Wait for greeting to finish
|
||||
while greeting_source.running:
|
||||
time.sleep(0.1)
|
||||
if not telephone.active_call:
|
||||
return
|
||||
|
||||
greeting_pipeline.stop()
|
||||
greeting_pipeline.stop()
|
||||
except Exception as e:
|
||||
RNS.log(
|
||||
f"Voicemail: Could not play greeting (libs missing?): {e}",
|
||||
RNS.LOG_ERROR,
|
||||
)
|
||||
|
||||
# 2. Play beep
|
||||
beep_source = LXST.ToneSource(
|
||||
@@ -306,7 +361,7 @@ class VoicemailManager:
|
||||
# Save to database if long enough
|
||||
if duration >= 1:
|
||||
remote_name = self.get_name_for_identity_hash(
|
||||
self.recording_remote_identity.hash.hex()
|
||||
self.recording_remote_identity.hash.hex(),
|
||||
)
|
||||
self.db.voicemails.add_voicemail(
|
||||
remote_identity_hash=self.recording_remote_identity.hash.hex(),
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""
|
||||
Auto-generated helper so Python tooling and the Electron build
|
||||
"""Auto-generated helper so Python tooling and the Electron build
|
||||
share the same version string.
|
||||
"""
|
||||
|
||||
__version__ = '2.50.0'
|
||||
__version__ = "3.0.0"
|
||||
|
||||
Reference in New Issue
Block a user