mirror of
https://github.com/rommapp/romm.git
synced 2025-12-22 10:27:13 +00:00
Add scheduled tasks for library scan + titledb update
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -52,3 +52,6 @@ backend/romm_test/logs
|
||||
|
||||
# service worker
|
||||
frontend/dev-dist
|
||||
|
||||
# outside data
|
||||
switch_titledb.json
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import emoji
|
||||
import socketio # type: ignore
|
||||
from rq import Queue
|
||||
|
||||
from logger.logger import log
|
||||
from utils import fs, fastapi
|
||||
from utils.exceptions import PlatformsNotFoundException, RomsNotFoundException
|
||||
from handler import dbh
|
||||
from utils.socket import socket_server
|
||||
from utils.cache import redis_client, redis_url, redis_connectable
|
||||
|
||||
scan_queue = Queue(connection=redis_client)
|
||||
from utils.redis import high_prio_queue, redis_url, redis_connectable
|
||||
|
||||
|
||||
async def scan_platforms(paltforms: str, complete_rescan: bool):
|
||||
@@ -83,6 +80,6 @@ async def scan_handler(_sid: str, platforms: str, complete_rescan: bool = True):
|
||||
|
||||
# Run in worker if redis is available
|
||||
if redis_connectable:
|
||||
return scan_queue.enqueue(scan_platforms, platforms, complete_rescan)
|
||||
return high_prio_queue.enqueue(scan_platforms, platforms, complete_rescan)
|
||||
else:
|
||||
await scan_platforms(platforms, complete_rescan)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
opl_index = {
|
||||
{
|
||||
"SLES_556.71": {
|
||||
"Name": "Fifa '14",
|
||||
"Region": "PAL"
|
||||
@@ -4,23 +4,31 @@ import pydash
|
||||
import requests
|
||||
import re
|
||||
import time
|
||||
import os
|
||||
import json
|
||||
from unidecode import unidecode as uc
|
||||
from requests.exceptions import HTTPError, Timeout
|
||||
from typing import Optional
|
||||
|
||||
from config import CLIENT_ID, CLIENT_SECRET
|
||||
from utils import get_file_name_with_no_tags as get_search_term
|
||||
from logger.logger import log
|
||||
from utils.cache import cache
|
||||
from .ps2_opl_index import opl_index
|
||||
|
||||
MAIN_GAME_CATEGORY = 0
|
||||
EXPANDED_GAME_CATEGORY = 10
|
||||
|
||||
N_SCREENSHOTS = 5
|
||||
PS2_IGDB_ID = 8
|
||||
SWITCH_IGDB_ID = 130
|
||||
|
||||
ps2_opl_regex = r"^([A-Z]{4}_\d{3}\.\d{2})\..*$"
|
||||
PS2_IGDB_ID = 8
|
||||
ps2_opl_index_file = os.path.join(
|
||||
os.path.dirname(__file__), "fixtures", "ps2_opl_index.json"
|
||||
)
|
||||
|
||||
switch_titledb_regex = r"^(70[0-9]{12})$"
|
||||
switch_titledb_index_file = os.path.join(
|
||||
os.path.dirname(__file__), "fixtures", "switch_titledb.json"
|
||||
)
|
||||
|
||||
|
||||
class IGDBHandler:
|
||||
@@ -125,9 +133,23 @@ class IGDBHandler:
|
||||
match = re.match(ps2_opl_regex, search_term)
|
||||
if p_igdb_id == PS2_IGDB_ID and match:
|
||||
serial_code = match.group(1)
|
||||
index_entry = opl_index.get(serial_code, None)
|
||||
if index_entry:
|
||||
search_term = index_entry["Name"] # type: ignore
|
||||
|
||||
with open(ps2_opl_index_file, "r") as index_json:
|
||||
opl_index = json.loads(index_json.read())
|
||||
index_entry = opl_index.get(serial_code, None)
|
||||
if index_entry:
|
||||
search_term = index_entry["Name"] # type: ignore
|
||||
|
||||
# Patch support for switch titleID filename format
|
||||
match = re.match(switch_titledb_regex, search_term)
|
||||
if p_igdb_id == SWITCH_IGDB_ID and match:
|
||||
title_id = match.group(1)
|
||||
|
||||
with open(switch_titledb_index_file, "r") as index_json:
|
||||
titledb_index = json.loads(index_json.read())
|
||||
index_entry = titledb_index.get(title_id, None)
|
||||
if index_entry:
|
||||
search_term = index_entry["name"] # type: ignore
|
||||
|
||||
res = (
|
||||
self._search_rom(uc(search_term), p_igdb_id, MAIN_GAME_CATEGORY)
|
||||
|
||||
5
backend/tasks/__init__.py
Normal file
5
backend/tasks/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from rq_scheduler import Scheduler
|
||||
|
||||
from utils.redis import low_prio_queue
|
||||
|
||||
scheduler = Scheduler(queue=low_prio_queue, connection=low_prio_queue.connection)
|
||||
36
backend/tasks/scan_library.py
Normal file
36
backend/tasks/scan_library.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from utils.redis import redis_connectable
|
||||
from logger.logger import log
|
||||
from . import scheduler
|
||||
from utils.exceptions import SchedulerException
|
||||
|
||||
|
||||
async def run():
|
||||
from endpoints.scan import scan_platforms
|
||||
|
||||
log.info("Scheduled library scan started...")
|
||||
await scan_platforms("", False)
|
||||
log.info("Scheduled library scan done.")
|
||||
|
||||
|
||||
def schedule():
|
||||
if not redis_connectable:
|
||||
raise SchedulerException("Redis not connectable, library scan not scheduled.")
|
||||
|
||||
existing_jobs = scheduler.get_jobs(func_name="tasks.scan_library.run")
|
||||
if existing_jobs:
|
||||
raise SchedulerException("Library scan already scheduled.")
|
||||
|
||||
return scheduler.cron(
|
||||
"0 3 * * *", # At 3:00 AM every day
|
||||
func="tasks.scan_library.run",
|
||||
repeat=None,
|
||||
)
|
||||
|
||||
|
||||
def unschedule():
|
||||
existing_jobs = scheduler.get_jobs(func_name="tasks.scan_library.run")
|
||||
|
||||
if not existing_jobs:
|
||||
raise SchedulerException("No library scan scheduled.")
|
||||
|
||||
scheduler.cancel(*existing_jobs)
|
||||
59
backend/tasks/update_switch_titledb.py
Normal file
59
backend/tasks/update_switch_titledb.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import requests
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from utils.redis import redis_connectable
|
||||
from logger.logger import log
|
||||
from typing import Final
|
||||
from . import scheduler
|
||||
from utils.exceptions import SchedulerException
|
||||
|
||||
RAW_URL: Final = "https://raw.githubusercontent.com/blawar/titledb/master/US.en.json"
|
||||
FIXTURE_FILE_PATH = (
|
||||
Path(os.path.dirname(__file__)).parent
|
||||
/ "handler"
|
||||
/ "fixtures"
|
||||
/ "switch_titledb.json"
|
||||
)
|
||||
|
||||
print(FIXTURE_FILE_PATH)
|
||||
|
||||
|
||||
async def run():
|
||||
log.info("Scheduled TitleDB update started...")
|
||||
|
||||
try:
|
||||
response = requests.get(RAW_URL)
|
||||
response.raise_for_status()
|
||||
|
||||
with open(FIXTURE_FILE_PATH, "wb") as fixture:
|
||||
fixture.write(response.content)
|
||||
|
||||
log.info("TitleDB update done.")
|
||||
except requests.exceptions.RequestException as e:
|
||||
log.error("TitleDB update failed.", exc_info=True)
|
||||
log.error(e)
|
||||
|
||||
|
||||
def schedule():
|
||||
if not redis_connectable:
|
||||
raise SchedulerException("Redis not connectable, titleDB update not scheduled.")
|
||||
|
||||
existing_jobs = scheduler.get_jobs(func_name="tasks.update_switch_titledb.run")
|
||||
if existing_jobs:
|
||||
raise SchedulerException("TitleDB update already scheduled.")
|
||||
|
||||
return scheduler.cron(
|
||||
"0 3 * * *", # At 3:00 AM every day
|
||||
func="tasks.update_switch_titledb.run",
|
||||
repeat=None,
|
||||
)
|
||||
|
||||
|
||||
def unschedule():
|
||||
existing_jobs = scheduler.get_jobs(func_name="tasks.update_switch_titledb.run")
|
||||
|
||||
if not existing_jobs:
|
||||
raise SchedulerException("No TitleDB update scheduled.")
|
||||
|
||||
scheduler.cancel(*existing_jobs)
|
||||
@@ -1,15 +1,7 @@
|
||||
from redis import Redis, ConnectionError
|
||||
from redis import Redis
|
||||
|
||||
from config import REDIS_HOST, REDIS_PORT
|
||||
|
||||
redis_client = Redis(host=REDIS_HOST, port=int(REDIS_PORT), db=0)
|
||||
redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}"
|
||||
|
||||
try:
|
||||
redis_connectable = redis_client.ping()
|
||||
except ConnectionError:
|
||||
redis_connectable = False
|
||||
|
||||
from .redis import redis_connectable
|
||||
|
||||
class FallbackCache:
|
||||
def __init__(self) -> None:
|
||||
|
||||
@@ -35,3 +35,12 @@ class RomAlreadyExistsException(Exception):
|
||||
|
||||
def __repr__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
class SchedulerException(Exception):
|
||||
def __init__(self, message: str):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return self.message
|
||||
|
||||
17
backend/utils/redis.py
Normal file
17
backend/utils/redis.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from redis import Redis, ConnectionError
|
||||
from rq import Queue
|
||||
|
||||
from config import REDIS_HOST, REDIS_PORT
|
||||
|
||||
|
||||
redis_client = Redis(host=REDIS_HOST, port=int(REDIS_PORT), db=0)
|
||||
redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}"
|
||||
|
||||
try:
|
||||
redis_connectable = redis_client.ping()
|
||||
except ConnectionError:
|
||||
redis_connectable = False
|
||||
|
||||
high_prio_queue = Queue(name="high", connection=redis_client)
|
||||
default_queue = Queue(name="default", connection=redis_client)
|
||||
low_prio_queue = Queue(name="low", connection=redis_client)
|
||||
@@ -1,6 +1,6 @@
|
||||
import socketio # type: ignore
|
||||
|
||||
from utils.cache import redis_url, redis_connectable
|
||||
from utils.redis import redis_url, redis_connectable
|
||||
|
||||
|
||||
socket_server = socketio.AsyncServer(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
from rq import Worker, Queue, Connection
|
||||
|
||||
from utils.cache import redis_client, redis_connectable
|
||||
from utils.redis import redis_client, redis_connectable
|
||||
|
||||
listen = ["high", "default", "low"]
|
||||
|
||||
|
||||
43
poetry.lock
generated
43
poetry.lock
generated
@@ -319,6 +319,16 @@ files = [
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crontab"
|
||||
version = "1.0.1"
|
||||
description = "Parse and use crontab schedules in Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "crontab-1.0.1.tar.gz", hash = "sha256:89477e3f93c81365e738d5ee2659509e6373bb2846de13922663e79aa74c6b91"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "41.0.3"
|
||||
@@ -469,6 +479,20 @@ sqlalchemy = ["SQLAlchemy (>=1.3.20)", "sqlakeyset (>=2.0.1680321678,<3.0.0)"]
|
||||
sqlmodel = ["sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.9)"]
|
||||
tortoise = ["tortoise-orm (>=0.16.18,<0.20.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "freezegun"
|
||||
version = "1.2.2"
|
||||
description = "Let your Python tests travel through time"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"},
|
||||
{file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = ">=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "2.0.2"
|
||||
@@ -1336,6 +1360,23 @@ files = [
|
||||
click = ">=5.0.0"
|
||||
redis = ">=4.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "rq-scheduler"
|
||||
version = "0.13.1"
|
||||
description = "Provides job scheduling capabilities to RQ (Redis Queue)"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "rq-scheduler-0.13.1.tar.gz", hash = "sha256:89d6a18f215536362b22c0548db7dbb8678bc520c18dc18a82fd0bb2b91695ce"},
|
||||
{file = "rq_scheduler-0.13.1-py2.py3-none-any.whl", hash = "sha256:c2b19c3aedfc7de4d405183c98aa327506e423bf4cdc556af55aaab9bbe5d1a1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
crontab = ">=0.23.0"
|
||||
freezegun = "*"
|
||||
python-dateutil = "*"
|
||||
rq = ">=0.13"
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
@@ -1933,4 +1974,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "c973b508a9bf80ff3875173c2c9b505e5c598fba6ef82f432936ce2a270cb292"
|
||||
content-hash = "4beccb9fef42c91d3c8391a8adde50b619a12538fad90bb317727c5257454beb"
|
||||
|
||||
@@ -34,6 +34,7 @@ types-requests = "^2.31.0.2"
|
||||
mypy = "^1.4.1"
|
||||
types-redis = "^4.6.0.3"
|
||||
stream-zip = "^0.0.67"
|
||||
rq-scheduler = "^0.13.1"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
Reference in New Issue
Block a user