Compare commits

...

3 Commits

Author SHA1 Message Date
skymike03
51ad08ff33 v2.5.0.4 (2026.02.15)
- add some new cool musics :P
2026-02-15 20:34:28 +01:00
skymike03
d6a5c4b27e Add type ignore comment for requests import in network.py 2026-02-08 18:13:03 +01:00
skymike03
2c7c3414a5 v2.5.0.3 (2026.02.08)
- add 7z support for extract games
- add cookie test for archive.org downloads (new romhacks platforms added)
2026-02-08 16:58:11 +01:00
28 changed files with 308 additions and 22 deletions

View File

@@ -0,0 +1 @@
donation-identifier=39546f3b2d3f67a664818596d81a5bec; abtest-identifier=fee0e28eb6c8d0de147d19db4303ee84; logged-in-sig=1802098179%201770562179%20AKHN8aF4EsFeR%2FundhgQTu0j27ZdFZXmgyUiqnJvXq%2BwtDGVvapqhKUFhIlI9bXAMYLMHDRJoO76bsqXI662nrIsx58efihNrafdk285r8MAdotWx03usO30baYoNPoMMEaK8iuhtbfTEyfE7oTZwdO7wjxNUTm%2Bbjjm6kmUD3HSQRzPsc0oWrrnd8Wj2x3UiuZeRnBfC60OjJHcnKC2Xv7teS%2BBx3EdKAG1i739MxTzjtEfERWw83bnaV30827qaFhZ%2BDK3%2FwCGOUwtablPA%2B0EeLR9%2BoYeC6x5aaJMZHBMjBowSIEE4QAK9IG9haBsn7%2F1PCweYuLivMIZJeA7mA%3D%3D; logged-in-user=rgsx%40outlook.fr

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -14,7 +14,7 @@ except Exception:
pygame = None # type: ignore
# Version actuelle de l'application
app_version = "2.5.0.2"
app_version = "2.5.0.4"
# Nombre de jours avant de proposer la mise à jour de la liste des jeux
GAMELIST_UPDATE_DAYS = 7
@@ -186,6 +186,7 @@ RGSX_SETTINGS_PATH = os.path.join(SAVE_FOLDER, "rgsx_settings.json")
API_KEY_1FICHIER_PATH = os.path.join(SAVE_FOLDER, "1FichierAPI.txt")
API_KEY_ALLDEBRID_PATH = os.path.join(SAVE_FOLDER, "AllDebridAPI.txt")
API_KEY_REALDEBRID_PATH = os.path.join(SAVE_FOLDER, "RealDebridAPI.txt")
ARCHIVE_ORG_COOKIE_PATH = os.path.join(APP_FOLDER, "assets", "ArchiveOrgCookie.txt")

View File

@@ -1,4 +1,4 @@
import requests
import requests # type: ignore
import subprocess
import os
import sys
@@ -15,7 +15,7 @@ try:
except Exception:
pygame = None # type: ignore
from config import OTA_VERSION_ENDPOINT,APP_FOLDER, UPDATE_FOLDER, OTA_UPDATE_ZIP
from utils import sanitize_filename, extract_zip, extract_rar, load_api_key_1fichier, load_api_key_alldebrid, normalize_platform_name, load_api_keys
from utils import sanitize_filename, extract_zip, extract_rar, extract_7z, load_api_key_1fichier, load_api_key_alldebrid, normalize_platform_name, load_api_keys, load_archive_org_cookie
from history import save_history
from display import show_toast
import logging
@@ -32,11 +32,45 @@ from language import _ # Import de la fonction de traduction
import re
import html as html_module
from urllib.parse import urljoin, unquote
import urllib.parse
logger = logging.getLogger(__name__)
def _redact_headers(headers: dict) -> dict:
"""Return a copy of headers with sensitive fields redacted for logs."""
if not isinstance(headers, dict):
return {}
safe = headers.copy()
if 'Cookie' in safe and safe['Cookie']:
safe['Cookie'] = '<redacted>'
return safe
def _split_archive_org_path(url: str):
"""Parse archive.org download URL and return (identifier, archive_name, inner_path)."""
try:
parsed = urllib.parse.urlsplit(url)
parts = parsed.path.split('/download/', 1)
if len(parts) != 2:
return None, None, None
after = parts[1]
identifier = after.split('/', 1)[0]
rest = after[len(identifier):]
if rest.startswith('/'):
rest = rest[1:]
rest_decoded = urllib.parse.unquote(rest)
if '/' not in rest_decoded:
return identifier, None, None
first_seg, remainder = rest_decoded.split('/', 1)
if first_seg.lower().endswith(('.zip', '.rar', '.7z')):
return identifier, first_seg, remainder
return identifier, None, None
except Exception:
return None, None, None
# --- File d'attente de téléchargements (worker) ---
def download_queue_worker():
"""Worker qui surveille la file d'attente et lance le prochain téléchargement si aucun n'est actif."""
@@ -821,6 +855,7 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
cancel_events[task_id] = threading.Event()
def download_thread():
nonlocal url
try:
# IMPORTANT: Créer l'entrée dans config.history dès le début avec status "Downloading"
# pour que l'interface web puisse afficher le téléchargement en cours
@@ -1059,14 +1094,67 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
download_headers = headers.copy()
download_headers['Accept'] = 'application/octet-stream, */*'
download_headers['Referer'] = 'https://myrient.erista.me/'
archive_cookie = load_archive_org_cookie()
archive_alt_urls = []
meta_json = None
# Préparation spécifique archive.org : récupérer quelques pages pour obtenir cookies éventuels
# Préparation spécifique archive.org : normaliser URL + récupérer cookies/metadata
if 'archive.org/download/' in url:
try:
pre_id = url.split('/download/')[1].split('/')[0]
session.get('https://archive.org/robots.txt', timeout=20)
session.get(f'https://archive.org/metadata/{pre_id}', timeout=20)
parsed = urllib.parse.urlsplit(url)
parts = parsed.path.split('/download/', 1)
pre_id = None
rest_decoded = None
if len(parts) == 2:
after = parts[1]
pre_id = after.split('/', 1)[0]
rest = after[len(pre_id):]
if rest.startswith('/'):
rest = rest[1:]
rest_decoded = urllib.parse.unquote(rest)
rest_encoded = urllib.parse.quote(rest_decoded, safe='/') if rest_decoded else ''
new_path = f"/download/{pre_id}/" + rest_encoded
url = urllib.parse.urlunsplit((parsed.scheme, parsed.netloc, new_path, parsed.query, parsed.fragment))
logger.debug(f"URL archive.org normalisée: {url}")
if not pre_id:
pre_id = url.split('/download/')[1].split('/')[0]
download_headers['Referer'] = f"https://archive.org/details/{pre_id}"
download_headers['Origin'] = 'https://archive.org'
if archive_cookie:
download_headers['Cookie'] = archive_cookie
if archive_cookie:
# Apply cookie to session for redirects to ia*.us.archive.org
for pair in archive_cookie.split(';'):
if '=' in pair:
name, value = pair.split('=', 1)
session.cookies.set(name.strip(), value.strip(), domain='.archive.org')
session.get('https://archive.org/robots.txt', timeout=20, headers={'Cookie': archive_cookie} if archive_cookie else None)
meta_resp = session.get(f'https://archive.org/metadata/{pre_id}', timeout=20, headers={'Cookie': archive_cookie} if archive_cookie else None)
if meta_resp.status_code == 200:
try:
meta_json = meta_resp.json()
except Exception:
meta_json = None
logger.debug(f"Pré-chargement cookies/metadata archive.org pour {pre_id}")
# Construire des URLs alternatives pour archive interne
identifier, archive_name, inner_path = _split_archive_org_path(url)
if identifier and archive_name and inner_path:
# Variante sans préfixe archive
archive_alt_urls.append(f"https://archive.org/download/{identifier}/" + urllib.parse.quote(inner_path, safe='/'))
# Variante filename
archive_alt_urls.append(f"https://archive.org/download/{identifier}/{archive_name}?filename=" + urllib.parse.quote(inner_path, safe='/'))
# Variante view_archive.php via serveur/dir metadata
if meta_json:
server = meta_json.get('server')
directory = meta_json.get('dir')
if server and directory:
archive_path = f"{directory}/{archive_name}"
view_url = f"https://{server}/view_archive.php?archive=" + urllib.parse.quote(archive_path, safe='/') + "&file=" + urllib.parse.quote(inner_path, safe='/')
# Prioriser view_archive.php (cas valide observe dans le navigateur)
archive_alt_urls.insert(0, view_url)
except Exception as e:
logger.debug(f"Pré-chargement archive.org ignoré: {e}")
@@ -1087,19 +1175,22 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
header_variants = [
download_headers,
{ # Variante sans Referer spécifique
'User-Agent': headers['User-Agent'],
'User-Agent': headers.get('User-Agent', download_headers.get('User-Agent', 'Mozilla/5.0')),
'Accept': 'application/octet-stream,*/*;q=0.8',
'Accept-Language': headers['Accept-Language'],
'Connection': 'keep-alive'
'Accept-Language': headers.get('Accept-Language', 'en-US,en;q=0.5'),
'Connection': 'keep-alive',
**({'Cookie': archive_cookie} if archive_cookie else {})
},
{ # Variante minimaliste type curl
'User-Agent': 'curl/8.4.0',
'Accept': '*/*'
'Accept': '*/*',
**({'Cookie': archive_cookie} if archive_cookie else {})
},
{ # Variante avec Referer archive.org
'User-Agent': headers['User-Agent'],
'User-Agent': headers.get('User-Agent', download_headers.get('User-Agent', 'Mozilla/5.0')),
'Accept': '*/*',
'Referer': 'https://archive.org/'
'Referer': 'https://archive.org/',
**({'Cookie': archive_cookie} if archive_cookie else {})
}
]
response = None
@@ -1117,7 +1208,7 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
# Mettre à jour le fichier web
# Plus besoin de update_web_progress
logger.debug(f"Tentative téléchargement {attempt}/{len(header_variants)} avec headers: {hv}")
logger.debug(f"Tentative téléchargement {attempt}/{len(header_variants)} avec headers: {_redact_headers(hv)}")
# Timeout plus long pour archive.org, avec tuple (connect_timeout, read_timeout)
timeout_val = (60, 90) if 'archive.org' in url else 30
r = session.get(url, stream=True, timeout=timeout_val, allow_redirects=True, headers=hv)
@@ -1161,13 +1252,36 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
time.sleep(2)
if response is None:
if archive_alt_urls and (last_status in (401, 403) or last_error_type in ("timeout", "connection", "request")):
for alt_url in archive_alt_urls:
try:
timeout_val = (45, 90)
logger.debug(f"Tentative archive.org alt URL: {alt_url}")
alt_headers = download_headers.copy()
try:
alt_host = urllib.parse.urlsplit(alt_url).netloc
if alt_host.startswith("ia") and alt_host.endswith(".archive.org"):
alt_headers["Referer"] = f"https://{alt_host}/"
alt_headers["Origin"] = "https://archive.org"
except Exception:
pass
r = session.get(alt_url, stream=True, timeout=timeout_val, allow_redirects=True, headers=alt_headers)
if r.status_code not in (401, 403):
r.raise_for_status()
response = r
url = alt_url
break
except Exception as e:
logger.debug(f"Alt URL archive.org échec: {e}")
# Fallback metadata archive.org pour message clair
if 'archive.org/download/' in url:
try:
identifier = url.split('/download/')[1].split('/')[0]
meta_resp = session.get(f'https://archive.org/metadata/{identifier}', timeout=30)
if meta_resp.status_code == 200:
meta_json = meta_resp.json()
if meta_json is None:
meta_resp = session.get(f'https://archive.org/metadata/{identifier}', timeout=30)
if meta_resp.status_code == 200:
meta_json = meta_resp.json()
if meta_json:
if meta_json.get('is_dark'):
raise requests.HTTPError(f"Item archive.org restreint (is_dark=true): {identifier}")
if not meta_json.get('files'):
@@ -1176,7 +1290,7 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
available = [f.get('name') for f in meta_json.get('files', [])][:10]
raise requests.HTTPError(f"Accès refusé (HTTP {last_status}). Fichiers disponibles exemples: {available}")
else:
raise requests.HTTPError(f"HTTP {last_status} & metadata {meta_resp.status_code} pour {identifier}")
raise requests.HTTPError(f"HTTP {last_status} & metadata indisponible pour {identifier}")
except requests.HTTPError:
raise
except Exception as e:
@@ -1365,6 +1479,21 @@ async def download_rom(url, platform, game_name, is_zip_non_supported=False, tas
logger.error(f"Exception lors de l'extraction RAR: {str(e)}")
result[0] = False
result[1] = f"Erreur extraction RAR {game_name}: {str(e)}"
elif extension == ".7z":
try:
success, msg = extract_7z(dest_path, dest_dir, url)
if success:
logger.debug(f"Extraction 7z réussie: {msg}")
result[0] = True
result[1] = _("network_download_extract_ok").format(game_name)
else:
logger.error(f"Erreur extraction 7z: {msg}")
result[0] = False
result[1] = _("network_extraction_failed").format(msg)
except Exception as e:
logger.error(f"Exception lors de l'extraction 7z: {str(e)}")
result[0] = False
result[1] = f"Erreur extraction 7z {game_name}: {str(e)}"
else:
logger.warning(f"Type d'archive non supporté: {extension}")
result[0] = True
@@ -2401,6 +2530,21 @@ async def download_from_1fichier(url, platform, game_name, is_zip_non_supported=
logger.error(f"Exception lors de l'extraction RAR: {str(e)}")
result[0] = False
result[1] = f"Erreur extraction RAR {game_name}: {str(e)}"
elif extension == ".7z":
try:
success, msg = extract_7z(dest_path, dest_dir, url)
logger.debug(f"Extraction 7z terminée: {msg}")
if success:
result[0] = True
result[1] = _("network_download_extract_ok").format(game_name)
else:
logger.error(f"Erreur extraction 7z: {msg}")
result[0] = False
result[1] = _("network_extraction_failed").format(msg)
except Exception as e:
logger.error(f"Exception lors de l'extraction 7z: {str(e)}")
result[0] = False
result[1] = f"Erreur extraction 7z {game_name}: {str(e)}"
else:
logger.warning(f"Type d'archive non supporté: {extension}")
result[0] = True

View File

@@ -1,4 +1,5 @@
import shutil
import requests # type: ignore
import re
import json
import os
@@ -648,7 +649,7 @@ def _check_url_connectivity(url: str, timeout: int = 6) -> bool:
headers = {"User-Agent": "RGSX-Connectivity/1.0"}
try:
try:
import requests # type: ignore
try:
response = requests.head(url, timeout=timeout, allow_redirects=True, headers=headers)
@@ -927,7 +928,7 @@ def check_extension_before_download(url, platform, game_name):
is_supported = is_extension_supported(sanitized_name, platform, extensions_data)
extension = os.path.splitext(sanitized_name)[1].lower()
is_archive = extension in (".zip", ".rar")
is_archive = extension in (".zip", ".rar", ".7z")
# Déterminer si le système (dossier) est connu dans extensions_data
dest_folder_name = _get_dest_folder_name(platform)
@@ -1838,6 +1839,95 @@ def extract_rar(rar_path, dest_dir, url):
except Exception as e:
logger.error(f"Erreur lors de la suppression de {rar_path}: {str(e)}")
def extract_7z(archive_path, dest_dir, url):
"""Extrait le contenu d'un fichier 7z dans le dossier cible."""
try:
os.makedirs(dest_dir, exist_ok=True)
if config.OPERATING_SYSTEM == "Windows":
seven_z_cmd = config.SEVEN_Z_EXE
else:
seven_z_cmd = config.SEVEN_Z_LINUX
try:
if os.path.exists(seven_z_cmd) and not os.access(seven_z_cmd, os.X_OK):
logger.warning("7zz n'est pas exécutable, correction des permissions...")
os.chmod(seven_z_cmd, 0o755)
except Exception as e:
logger.error(f"Erreur lors de la vérification des permissions de 7zz: {e}")
if not os.path.exists(seven_z_cmd):
return False, "7z non trouvé - vérifiez que 7z.exe (Windows) ou 7zz (Linux) est présent dans assets/progs"
# Capture état initial
before_dirs = _capture_directories_before_extraction(dest_dir)
before_items = _capture_all_items_before_extraction(dest_dir)
iso_before = set()
for root, dirs, files in os.walk(dest_dir):
for file in files:
if file.lower().endswith('.iso'):
iso_before.add(os.path.abspath(os.path.join(root, file)))
# Calcul taille totale via 7z l -slt (best effort)
total_size = 0
try:
list_cmd = [seven_z_cmd, "l", "-slt", archive_path]
result = subprocess.run(list_cmd, capture_output=True, text=True)
if result.returncode == 0:
current_size = None
is_dir = False
for line in result.stdout.splitlines():
line = line.strip()
if not line:
if current_size is not None and not is_dir:
total_size += current_size
current_size = None
is_dir = False
continue
if line.startswith("Attributes ="):
attrs = line.split("=", 1)[1].strip()
if "D" in attrs:
is_dir = True
elif line.startswith("Size ="):
try:
current_size = int(line.split("=", 1)[1].strip())
except Exception:
current_size = None
if current_size is not None and not is_dir:
total_size += current_size
except Exception as e:
logger.debug(f"Impossible de calculer la taille 7z: {e}")
if url not in getattr(config, 'download_progress', {}):
config.download_progress[url] = {}
config.download_progress[url].update({
"downloaded_size": 0,
"total_size": total_size,
"status": "Extracting",
"progress_percent": 0
})
config.needs_redraw = True
extract_cmd = [seven_z_cmd, "x", archive_path, f"-o{dest_dir}", "-y"]
logger.debug(f"Commande d'extraction 7z: {' '.join(extract_cmd)}")
result = subprocess.run(extract_cmd, capture_output=True, text=True)
if result.returncode > 2:
error_msg = result.stderr.strip() or f"Erreur extraction 7z (code {result.returncode})"
logger.error(error_msg)
return False, error_msg
if result.returncode != 0:
logger.warning(f"7z a retourné un avertissement (code {result.returncode}): {result.stderr}")
# Gestion plateformes spéciales
success, error_msg = _handle_special_platforms(dest_dir, archive_path, before_dirs, iso_before, url, before_items)
if not success:
return False, error_msg
return _finalize_extraction(archive_path, dest_dir, url)
except Exception as e:
logger.error(f"Erreur lors de l'extraction 7z: {str(e)}")
return False, _("utils_extraction_failed").format(str(e))
def handle_ps3(dest_dir, new_dirs=None, extracted_basename=None, url=None, archive_name=None):
"""Gère le traitement spécifique des jeux PS3.
PS3 Redump (ps3): Décryptage ISO + extraction dans dossier .ps3
@@ -1894,7 +1984,6 @@ def handle_ps3(dest_dir, new_dirs=None, extracted_basename=None, url=None, archi
key_zip_path = os.path.join(dest_dir, f"_temp_key_{key_zip_name}")
try:
import requests
response = requests.get(key_url, stream=True, timeout=30)
response.raise_for_status()
@@ -2690,6 +2779,57 @@ def load_api_keys(force: bool = False):
}
def load_archive_org_cookie(force: bool = False) -> str:
"""Charge le cookie Archive.org depuis un fichier texte.
- Fichier: config.ARCHIVE_ORG_COOKIE_PATH
- Accepte soit une ligne brute de cookie, soit une ligne "Cookie: ..."
- Utilise un cache mtime pour éviter les relectures
"""
try:
path = getattr(config, 'ARCHIVE_ORG_COOKIE_PATH', '')
if not path:
return ""
cache_attr = '_archive_cookie_cache'
if not hasattr(config, cache_attr):
setattr(config, cache_attr, {'mtime': None, 'value': ''})
cache_data = getattr(config, cache_attr)
# Créer le fichier vide si absent
try:
if not os.path.exists(path):
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, 'w', encoding='utf-8') as f:
f.write("")
except Exception as ce:
logger.error(f"Impossible de préparer le fichier cookie archive.org: {ce}")
return ""
try:
mtime = os.path.getmtime(path)
except Exception:
mtime = None
if force or (mtime is not None and mtime != cache_data.get('mtime')):
try:
with open(path, 'r', encoding='utf-8') as f:
value = f.read().strip()
except Exception as re:
logger.error(f"Erreur lecture cookie archive.org: {re}")
value = ""
if value.lower().startswith("cookie:"):
value = value.split(":", 1)[1].strip()
cache_data['mtime'] = mtime
cache_data['value'] = value
return cache_data.get('value', '') or ""
except Exception as e:
logger.error(f"Erreur load_archive_org_cookie: {e}")
return ""
def save_api_keys(api_keys: dict):
"""Sauvegarde les clés API (1fichier, AllDebrid, RealDebrid) dans leurs fichiers respectifs.

View File

@@ -1,3 +1,3 @@
{
"version": "2.5.0.2"
"version": "2.5.0.4"
}