diff --git a/.idea/misc.xml b/.idea/misc.xml
index 9cbe48c..6845554 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -3,7 +3,7 @@
-
+
diff --git a/.idea/oxapp25.iml b/.idea/oxapp25.iml
index 41326f4..51cc41f 100644
--- a/.idea/oxapp25.iml
+++ b/.idea/oxapp25.iml
@@ -4,7 +4,7 @@
-
+
\ No newline at end of file
diff --git a/main.py b/main.py
index 7d3745e..b7bec4b 100644
--- a/main.py
+++ b/main.py
@@ -1,6 +1,4 @@
-
-
-from PySide6.QtCore import QStandardPaths, QDataStream, QByteArray, QIODevice, Signal, Qt
+from PySide6.QtCore import QStandardPaths, QDataStream, QByteArray, QIODevice, Signal, Qt, QTimer, QCryptographicHash
from PySide6.QtNetwork import QLocalServer, QLocalSocket
from PySide6.QtWidgets import QApplication
@@ -10,6 +8,10 @@ import asyncio
import os
import platform
import argparse
+import hashlib
+import random
+import string
+import base64
from pathlib import Path
from src.logs import configure_logging
@@ -23,6 +25,7 @@ class SingleApplication(QApplication):
def __init__(self, app_id, args):
super().__init__(args)
self.app_id = app_id
+ self.shared_key = hashlib.sha256(app_id.encode()).hexdigest()[:16]
self.server = None
self.is_primary_instance = self.try_connect_to_primary()
@@ -34,6 +37,52 @@ class SingleApplication(QApplication):
# En cas d'erreur (serveur déjà existant mais zombie), on le supprime et on réessaie
QLocalServer.removeServer(self.app_id)
self.server.listen(self.app_id)
+ else:
+ QTimer.singleShot(0, self.quit)
+
+ def encrypt_data(self, data_str):
+ """Méthode simple pour brouiller les données"""
+ # Générer une "nonce" aléatoire pour éviter que les mêmes données produisent le même résultat
+ nonce = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
+
+ # Combiner la nonce, la clé et les données
+ combined = nonce + self.shared_key + data_str
+
+ # Utiliser SHA-256 pour obtenir un hash
+ hash_obj = QCryptographicHash(QCryptographicHash.Algorithm.Sha256)
+ hash_obj.addData(combined.encode())
+ signature = hash_obj.result().toHex().data().decode()[:16]
+
+ # Encoder le tout en base64
+ encoded = base64.b64encode((nonce + signature + data_str).encode()).decode()
+ return encoded
+
+ def decrypt_data(self, encoded_str):
+ """Déchiffre les données et vérifie leur intégrité"""
+ try:
+ # Décoder de base64
+ decoded = base64.b64decode(encoded_str.encode()).decode()
+
+ # Extraire nonce, signature et données
+ nonce = decoded[:8]
+ signature = decoded[8:24]
+ data_str = decoded[24:]
+
+ # Vérifier la signature
+ combined = nonce + self.shared_key + data_str
+ hash_obj = QCryptographicHash(QCryptographicHash.Algorithm.Sha256)
+ hash_obj.addData(combined.encode())
+ expected_signature = hash_obj.result().toHex().data().decode()[:16]
+
+ if signature != expected_signature:
+ print("Signature invalide, données potentiellement corrompues ou falsifiées")
+ return None
+
+ return data_str
+
+ except Exception as e:
+ print(f"Erreur lors du déchiffrement: {e}")
+ return None
def try_connect_to_primary(self):
"""Essaie de se connecter à l'instance primaire de l'application"""
@@ -43,26 +92,35 @@ class SingleApplication(QApplication):
if socket.waitForConnected(500):
# Récupérer les arguments pour les envoyer à l'instance primaire
args = sys.argv[1:] if len(sys.argv) > 1 else []
+ encrypt_args = self.encrypt_data(";".join(args))
# Envoyer les arguments à l'instance primaire
stream = QDataStream(socket)
- stream.writeQString(";".join(args))
+ stream.writeQString(encrypt_args)
socket.flush()
+ socket.waitForBytesWritten(1000)
socket.disconnectFromServer()
+
+ QTimer.singleShot(0, self.quit)
+
return False # Ce n'est pas l'instance primaire
return True # C'est l'instance primaire
def handle_new_connection(self):
"""Gère une nouvelle connexion d'une instance secondaire"""
socket = self.server.nextPendingConnection()
- if socket.waitForReadyRead(1000):
+
+ if socket.waitForReadyRead(2000):
stream = QDataStream(socket)
- args_str = stream.readQString()
- args = args_str.split(";") if args_str else []
+
+ encrypted_args = stream.readQString()
+ args_str = self.decrypt_data(encrypted_args)
# Émettre un signal pour informer l'application des fichiers à ouvrir
- if args:
- self.files_received.emit(args)
+ if args_str:
+ args = args_str.split(";") if args_str else []
+ if args:
+ self.files_received.emit(args)
socket.disconnectFromServer()
@@ -84,6 +142,9 @@ if __name__ == "__main__":
app_id = "OxAPP25"
app = SingleApplication(app_id, sys.argv)
+ if not app.is_primary_instance:
+ sys.exit(0)
+
event_loop = qasync.QEventLoop(app)
asyncio.set_event_loop(event_loop)
diff --git a/src/conf.py b/src/conf.py
index 7e5437f..f556704 100644
--- a/src/conf.py
+++ b/src/conf.py
@@ -31,13 +31,6 @@ class ConfManager(QObject):
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG) # Niveau DEBUG pour capturer tous les messages
- # S'assurer qu'un gestionnaire de log est présent
- if not self.logger.handlers:
- handler = logging.StreamHandler()
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
- handler.setFormatter(formatter)
- self.logger.addHandler(handler)
-
self.logger.info("Initialisation de ConfManager")
# Verrou pour les opérations de fichier (thread-safety)
diff --git a/src/download.py b/src/download.py
index d34e1e8..0256137 100644
--- a/src/download.py
+++ b/src/download.py
@@ -1,3 +1,5 @@
+import traceback
+
from PySide6.QtCore import QObject, Signal, QTimer
from PySide6.QtNetwork import QNetworkCookie
@@ -13,6 +15,7 @@ import httpx
from src.datatypes import FileType, FileStatsType
from src.async_file import async_open
+from src.utils import aexec_in
if TYPE_CHECKING:
from windows.main_window import MainWindow
@@ -36,7 +39,7 @@ class DownloadManager(QObject):
self.task_stats: dict[str, FileStatsType] = {}
self.waiter = asyncio.Event()
self.client_session: None|httpx.AsyncClient = None
- self.cookies = []
+ self.cookies = {}
# slots
# self.status_updated.connect(lambda data: self.conf.set_value("files", self.files))
@@ -55,21 +58,26 @@ class DownloadManager(QObject):
self.status = {}
self.update_status()
- async def initialize(self):
- self.client_session = httpx.AsyncClient(
- timeout=httpx.Timeout(None),
- follow_redirects=True,
- verify=False,
- # http2=True,
- )
-
- for cookie in self.cookies:
- await self.add_cookie(cookie)
- self.logger.info("Session aiohttp initialisée")
+ # async def initialize(self):
+ # self.client_session = httpx.AsyncClient(
+ # timeout=httpx.Timeout(
+ # connect=5.0, # 5 secondes pour établir la connexion
+ # read=None, # Pas de timeout pour la lecture des données (téléchargement)
+ # write=60.0, # 60 secondes pour envoyer des données
+ # pool=5.0 # 5 secondes pour obtenir une connexion du pool
+ # ),
+ # follow_redirects=True,
+ # verify=False,
+ # # http2=True,
+ # )
+ #
+ # for cookie in self.cookies:
+ # await self.add_cookie(cookie)
+ # self.logger.info("Session aiohttp initialisée")
async def loop_queue(self):
- if self.client_session is None:
- await self.initialize()
+ # if self.client_session is None:
+ # await self.initialize()
self.logger.info("Démarrage de la boucle de téléchargement")
while True:
@@ -78,6 +86,8 @@ class DownloadManager(QObject):
else:
file = await self.next_file()
if file is None:
+ if not self.tasks:
+ await self.set_pause(True)
await self.wait()
else:
self.tasks[file] = asyncio.create_task(self.download_file(file))
@@ -89,22 +99,52 @@ class DownloadManager(QObject):
self.logger.info("loop queue resumed")
- def set_pause(self, value):
+ async def set_pause(self, value):
if self.pause == value:
return
self.pause = value
- if self.pause:
- for file_id, task in self.tasks.items():
- if not task.done():
- task.cancel()
- self.logger.info("Tous les téléchargements ont été mis en pause")
- else:
- self.pause = False
- self.waiter.set()
- self.logger.info("Reprise des téléchargements")
+ try:
+ if self.pause:
+ self.logger.info("Essaie de la mise en pause")
+
+ # on attend 0.5 sec pour voir si le téléchargement s'interrompt proprement avec le self.pause.
+ asyncio.create_task(aexec_in(1, self._clean_connections))
+ # await asyncio.sleep(0.5)
+ # for task in self.tasks.values():
+ # if task and not task.done():
+ # task.cancel()
+ # # Attendre que les tâches se terminent proprement
+ # await asyncio.gather(*[t for t in self.tasks.values() if t and not t.done()],
+ # return_exceptions=True)
+ else:
+ self.pause = False
+ self.waiter.set()
+ self.logger.info("Reprise des téléchargements")
+ except Exception as e:
+ self.logger.error(f"Erreur lors de la mise en pause: {e}")
+
self.update_status()
+ async def _clean_connections(self):
+ """Fonction asynchrone interne pour nettoyer les connexions lors de la pause"""
+ self.logger.debug("cleaning connections")
+ try:
+ # Annuler proprement les tâches en cours
+ for file, task in self.tasks.items():
+ self.logger.error(f"task for {file.target} not cancelled: {task}")
+ if task and not task.done():
+ task.cancel()
+ self.logger.error(f"trying to cancel task for {file.target}")
+
+
+ # Attendre que les tâches se terminent
+ if self.tasks.values():
+ await asyncio.wait([t for t in self.tasks.values() if t and not t.done()],
+ timeout=2.0)
+ except Exception as e:
+ self.logger.error(f"Erreur lors du nettoyage des connexions: {e}")
+
async def next_file(self) -> FileType | None:
self.logger.debug("Recherche du prochain fichier à télécharger")
for file in self.files.values():
@@ -138,43 +178,60 @@ class DownloadManager(QObject):
headers.update({"Range": f"bytes={stats.downloaded_size}-{stats.total_size}"})
mode: Literal["ab", "wb"] = "ab" if stats.downloaded_size > 0 else "wb"
+
+ # Initilisation de la session:
try:
- async with self.client_session.stream("GET", file.url, headers=headers) as response:
- async with await anyio.open_file(file_path, mode) as f:
+ async with httpx.AsyncClient(
+ timeout=httpx.Timeout(connect=5.0, read=None, write=60.0, pool=5.0),
+ follow_redirects=True,
+ verify=False,
+ cookies=self.cookies,
+ ) as client:
+ # requête pour le téléchargement
+ async with client.stream("GET", file.url, headers=headers) as response:
+ # on trigger les bad requests
response.raise_for_status()
+ # on ouvre le fichier pour commencer à écrire
+ async with await anyio.open_file(file_path, mode) as f:
- last_update_time = time.monotonic()
- last_downloaded_size = stats.downloaded_size
- async for chunk in response.aiter_bytes(self.chunk_size):
- if not chunk:
- break
- await f.write(chunk)
- if self.pause:
- break
- chunk_size = len(chunk)
- stats.downloaded_size += chunk_size
+ last_update_time = time.monotonic()
+ last_downloaded_size = stats.downloaded_size
- current_time = time.monotonic()
- elapsed_time = current_time - last_update_time
+ async for chunk in response.aiter_bytes(self.chunk_size):
+ if self.pause:
+ await response.aclose()
+ break
- if elapsed_time >= 1.0:
- bytes_downloaded = stats.downloaded_size - last_downloaded_size
- current_speed = bytes_downloaded / elapsed_time
- if stats.speed > 0:
- stats.speed = round(0.7 * current_speed + 0.3 * stats.speed)
+ if not chunk:
+ break
+
+ await f.write(chunk)
+ chunk_size = len(chunk)
+ stats.downloaded_size += chunk_size
+
+ current_time = time.monotonic()
+ elapsed_time = current_time - last_update_time
+
+ if elapsed_time >= 1.0:
+ bytes_downloaded = stats.downloaded_size - last_downloaded_size
+ current_speed = bytes_downloaded / elapsed_time
+ if stats.speed > 0:
+ stats.speed = round(0.7 * current_speed + 0.3 * stats.speed)
+ else:
+ stats.speed = round(current_speed)
+
+ last_update_time = current_time
+ last_downloaded_size = stats.downloaded_size
else:
- stats.speed = round(current_speed)
-
- last_update_time = current_time
- last_downloaded_size = stats.downloaded_size
- else:
- await asyncio.sleep(0.005)
+ await asyncio.sleep(0.005)
except httpx.HTTPStatusError as e:
- self.logger.error(f"Erreur HTTP lors du téléchargement de {file.target}: {e.response.status_code} - {e}")
+ self.logger.error(
+ f"Erreur HTTP lors du téléchargement de {file.target}: {e.response.status_code} - {e}")
file.error = f"Erreur HTTP {e.response.status_code}: {str(e)}"
except httpx.TimeoutException as e:
- self.logger.error(f"Délai d'attente dépassé lors du téléchargement de {file.target}: {str(e)}")
+ self.logger.error(
+ f"Délai d'attente dépassé lors du téléchargement de {file.target}: {str(e)}")
file.error = f"Délai d'attente dépassé: {str(e)}"
except httpx.ConnectError as e:
@@ -198,13 +255,17 @@ class DownloadManager(QObject):
file.error = f"Erreur d'E/S: {str(e)}"
except Exception as e:
- self.logger.error(f"Erreur inattendue lors du téléchargement de {file.target}: {type(e).__name__} - {str(e)}")
+ self.logger.error(
+ f"Erreur inattendue lors du téléchargement de {file.target}: {type(e).__name__} - {str(e)}")
+ print(traceback.format_exc())
file.error = f"Erreur inattendue: {str(e)}"
else:
- file.downloaded = True
- self.logger.info(f"Téléchargement de {file.target} terminé avec succès")
-
+ if self.pause:
+ self.logger.info(f"Téléchargement de {file.target} mis en pause")
+ else:
+ file.downloaded = True
+ self.logger.info(f"Téléchargement de {file.target} terminé avec succès")
finally:
await self.task_ended(file)
@@ -253,29 +314,21 @@ class DownloadManager(QObject):
self.files_updated.emit(self.files)
def update_status(self):
- self.status = {
+ new_status = {
"pause": self.pause,
"max_worker": self.max_worker,
"total_files": len(self.files),
"downloaded_files": sum(file.downloaded for file in self.files.values() if file.downloaded),
"downloading": [task.id for task in self.tasks.keys()],
"total_size": sum(file.total_size for file in self.files.values()),
- "downloaded_size": sum(file.total_size for file in self.files.values() if file.downloaded) + sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
+ # "downloaded_size": sum(file.total_size for file in self.files.values() if file.downloaded) + sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
+ "downloaded_size": sum(file.size_downloaded for file in self.files.values()),
"speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
"downloader_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()}
}
- self.status_updated.emit(self.status)
-
- # def update_dl_stats(self):
- # old_stats = deepcopy(self.dl_stats)
- # self.dl_stats = {
- # "speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
- # "downloaded_size": sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
- # "downloading_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()},
- # }
- # if old_stats != self.dl_stats:
- # self.stats_updated.emit(self.dl_stats)
- # return self.dl_stats
+ if self.status != new_status:
+ self.status = new_status
+ self.status_updated.emit(self.status)
async def add_cookie(self, cookie: QNetworkCookie):
"""
@@ -284,16 +337,8 @@ class DownloadManager(QObject):
Args:
cookie: Un objet QNetworkCookie de PySide6
"""
- if self.client_session is None:
- # Si la session n'est pas encore initialisée, stocker le cookie pour plus tard
- self.cookies.append(cookie)
- return
-
# Extraction des informations essentielles du QNetworkCookie
name = cookie.name().data().decode()
value = cookie.value().data().decode()
-
- # Ajout direct du cookie sans attributs supplémentaires
- self.client_session.cookies[name] = value
-
+ self.cookies.update({name: value})
self.logger.info(f"Cookie ajouté: {name}={value}")
diff --git a/src/handler.py b/src/handler.py
index 1e9e9e0..e2abf79 100644
--- a/src/handler.py
+++ b/src/handler.py
@@ -1,3 +1,5 @@
+import asyncio
+
from PySide6.QtCore import QObject, Slot, Signal, QTimer, Property
import json
@@ -51,12 +53,12 @@ class WebHandler(QObject):
@Slot(result=str)
def on_site_ready(self):
- self.site_ready = True
- self.on_site_ready.emit()
+ self.site_loaded = True
+ self.site_ready.emit()
@Slot(bool)
def set_pause(self, value):
- self.download_manager.set_pause(value)
+ asyncio.create_task(self.download_manager.set_pause(value))
@Slot(list)
@Slot(str)
@@ -70,7 +72,7 @@ class WebHandler(QObject):
@Slot(list)
def del_files(self, file_ids):
if isinstance(file_ids, str):
- file_ids = json.loads(file_ids)
+ file_ids = [file_ids]
self.download_manager.del_files(file_ids)
@Slot()
diff --git a/src/utils.py b/src/utils.py
index 28d3f2b..4b259c9 100644
--- a/src/utils.py
+++ b/src/utils.py
@@ -1,3 +1,5 @@
+import asyncio
+
from PySide6.QtNetwork import QNetworkCookie
import time
@@ -94,3 +96,7 @@ class RestrictedUnpickler(pickle.Unpickler):
raise pickle.UnpicklingError(f"Accès refusé à la classe {module}.{name} pour des raisons de sécurité")
+async def aexec_in(secs, func):
+ await asyncio.sleep(secs)
+ return await func()
+
diff --git a/windows/site_window.py b/windows/site_window.py
index ac35c1b..b2a9311 100644
--- a/windows/site_window.py
+++ b/windows/site_window.py
@@ -34,7 +34,6 @@ class SiteWindow(QWebEngineView):
self.persistent_profile = QWebEngineProfile("OxAppProfile", parent=self)
self.cookie_store = self.persistent_profile.cookieStore()
self.cookie_store.cookieAdded.connect(self.on_cookie_added.emit)
- self.cookie_store.cookieAdded.connect(self.test_cookie)
self.persistent_profile.setHttpCacheType(QWebEngineProfile.HttpCacheType.MemoryHttpCache)
self.persistent_profile.setPersistentStoragePath(str(self.conf.app_config_path / "web_cache"))
self.persistent_profile.setPersistentCookiesPolicy(QWebEngineProfile.PersistentCookiesPolicy.AllowPersistentCookies)
@@ -60,14 +59,9 @@ class SiteWindow(QWebEngineView):
self.load(parent.url)
- def test_cookie(self, *args, **kwargs):
- print("cook", *args, **kwargs)
- # self.cookie_store.loadAllCookies()
-
@Slot(bool)
def on_load_finished(self, is_success):
- print("load finished")
if is_success:
api_file = QFile(":/qtwebchannel/qwebchannel.js")
api_file.open(QIODevice.OpenModeFlag.ReadOnly)