Compare commits
3 Commits
last_async
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| ee5e1e2af4 | |||
| 01d598684c | |||
| f442e334d0 |
2
.idea/misc.xml
generated
2
.idea/misc.xml
generated
@@ -3,7 +3,7 @@
|
|||||||
<component name="Black">
|
<component name="Black">
|
||||||
<option name="sdkName" value="Python 3.13 (oxapp25)" />
|
<option name="sdkName" value="Python 3.13 (oxapp25)" />
|
||||||
</component>
|
</component>
|
||||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (oxapp25)" project-jdk-type="Python SDK" />
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (oxapp25)" project-jdk-type="Python SDK" />
|
||||||
<component name="PythonCompatibilityInspectionAdvertiser">
|
<component name="PythonCompatibilityInspectionAdvertiser">
|
||||||
<option name="version" value="3" />
|
<option name="version" value="3" />
|
||||||
</component>
|
</component>
|
||||||
|
|||||||
2
.idea/oxapp25.iml
generated
2
.idea/oxapp25.iml
generated
@@ -4,7 +4,7 @@
|
|||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||||
</content>
|
</content>
|
||||||
<orderEntry type="jdk" jdkName="Python 3.13 (oxapp25)" jdkType="Python SDK" />
|
<orderEntry type="jdk" jdkName="Python 3.12 (oxapp25)" jdkType="Python SDK" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
</component>
|
</component>
|
||||||
</module>
|
</module>
|
||||||
@@ -7,10 +7,15 @@ if "%1"=="resources" (
|
|||||||
pyinstaller OxApp.spec --noconfirm
|
pyinstaller OxApp.spec --noconfirm
|
||||||
) else if "%1"=="installer" (
|
) else if "%1"=="installer" (
|
||||||
"C:\Program Files (x86)\Inno Setup 6\ISCC.exe" "setup.iss"
|
"C:\Program Files (x86)\Inno Setup 6\ISCC.exe" "setup.iss"
|
||||||
|
) else if "%1"=="clean" (
|
||||||
|
echo Nettoyage des packages non requis...
|
||||||
|
pip freeze > unins && pip uninstall -y -r unins && pip install -r requirements.txt && del unins
|
||||||
|
echo Nettoyage terminé.
|
||||||
) else (
|
) else (
|
||||||
echo Commandes disponibles:
|
echo Commandes disponibles:
|
||||||
echo build.bat resources - Compile les ressources
|
echo build.bat resources - Compile les ressources
|
||||||
echo build.bat deploy - Déploie l'application
|
echo build.bat deploy - Déploie l'application
|
||||||
echo build.bat pyinstaller - Crée l'exécutable avec PyInstaller
|
echo build.bat pyinstaller - Crée l'exécutable avec PyInstaller
|
||||||
echo build.bat installer - Crée l'installateur avec Inno Setup
|
echo build.bat installer - Crée l'installateur avec Inno Setup
|
||||||
|
echo build.bat clean - Nettoie les packages non requis dans requirements.txt
|
||||||
)
|
)
|
||||||
|
|||||||
17
main.py
17
main.py
@@ -3,9 +3,7 @@ from PySide6.QtGui import QPalette, QColor
|
|||||||
from PySide6.QtNetwork import QLocalServer, QLocalSocket
|
from PySide6.QtNetwork import QLocalServer, QLocalSocket
|
||||||
from PySide6.QtWidgets import QApplication
|
from PySide6.QtWidgets import QApplication
|
||||||
|
|
||||||
import qasync
|
|
||||||
import sys
|
import sys
|
||||||
import asyncio
|
|
||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
@@ -189,15 +187,6 @@ if __name__ == "__main__":
|
|||||||
logger.info("Instance secondaire détectée, fermeture de l'application")
|
logger.info("Instance secondaire détectée, fermeture de l'application")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# Configuration de la boucle d'événements asyncio
|
|
||||||
logger.debug("Configuration de la boucle d'événements asyncio")
|
|
||||||
event_loop = qasync.QEventLoop(app)
|
|
||||||
asyncio.set_event_loop(event_loop)
|
|
||||||
|
|
||||||
app_close_event = asyncio.Event()
|
|
||||||
app.aboutToQuit.connect(app_close_event.set)
|
|
||||||
logger.debug("Signal aboutToQuit connecté à l'événement de fermeture")
|
|
||||||
|
|
||||||
# Création de la fenêtre principale
|
# Création de la fenêtre principale
|
||||||
logger.info("Création de la fenêtre principale")
|
logger.info("Création de la fenêtre principale")
|
||||||
window = MainWindow()
|
window = MainWindow()
|
||||||
@@ -215,9 +204,5 @@ if __name__ == "__main__":
|
|||||||
window.show()
|
window.show()
|
||||||
logger.info("Fenêtre principale affichée")
|
logger.info("Fenêtre principale affichée")
|
||||||
|
|
||||||
# Exécution de la boucle d'événements
|
|
||||||
logger.debug("Démarrage de la boucle d'événements")
|
|
||||||
with event_loop:
|
|
||||||
event_loop.run_until_complete(app_close_event.wait())
|
|
||||||
|
|
||||||
logger.info("Application terminée")
|
logger.info("Application terminée")
|
||||||
|
sys.exit(app.exec())
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ qml_files =
|
|||||||
excluded_qml_plugins =
|
excluded_qml_plugins =
|
||||||
|
|
||||||
# qt modules used. comma separated
|
# qt modules used. comma separated
|
||||||
modules = Network,WebEngineCore,Widgets,Gui,WebEngineWidgets,Core,WebChannel
|
modules = WebChannel,Core,WebEngineCore,WebEngineWidgets,Gui,Widgets,Network
|
||||||
|
|
||||||
# qt plugins used by the application. only relevant for desktop deployment. for qt plugins used
|
# qt plugins used by the application. only relevant for desktop deployment. for qt plugins used
|
||||||
# in android application see [android][plugins]
|
# in android application see [android][plugins]
|
||||||
@@ -71,7 +71,7 @@ macos.permissions =
|
|||||||
mode = standalone
|
mode = standalone
|
||||||
|
|
||||||
# (str) specify any extra nuitka arguments
|
# (str) specify any extra nuitka arguments
|
||||||
extra_args = --quiet --noinclude-qt-translations --windows-console-mode=disable --output-filename=oxapp --company-name=Oxpanel --product-name=OxApp --file-description="Application légitime OxApp" --copyright="Oxpanel (c) 2023" --file-version=1.0.0 --product-version=1.0.0 --unstripped --no-deployment-flag=self-execution --disable-ccache --disable-console
|
extra_args = --quiet --noinclude-qt-translations --windows-console-mode=disable --output-filename=oxapp --company-name=Oxpanel --product-name=OxApp --file-description="OxApp" --copyright="Oxpanel (c) 2023" --file-version=1.0.0 --product-version=1.0.0 --unstripped --no-deployment-flag=self-execution --disable-ccache --disable-console
|
||||||
|
|
||||||
[buildozer]
|
[buildozer]
|
||||||
|
|
||||||
@@ -99,3 +99,4 @@ local_libs =
|
|||||||
# architecture of deployed platform
|
# architecture of deployed platform
|
||||||
# possible values = ["aarch64", "armv7a", "i686", "x86_64"]
|
# possible values = ["aarch64", "armv7a", "i686", "x86_64"]
|
||||||
arch =
|
arch =
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1 @@
|
|||||||
PySide6<6.9,>=6.8.0
|
PySide6<6.9,>=6.8.0
|
||||||
qasync>=0.27.1
|
|
||||||
httpx[http2]
|
|
||||||
anyio
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from functools import partial
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
|
|
||||||
# Créer un executor global pour toutes les opérations fichier
|
|
||||||
_executor = ThreadPoolExecutor()
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncFile:
|
|
||||||
"""Un wrapper pour les opérations de fichier asynchrones basé sur run_in_executor."""
|
|
||||||
|
|
||||||
def __init__(self, file_path, mode='r', *args, **kwargs):
|
|
||||||
self.file_path = file_path
|
|
||||||
self.mode = mode
|
|
||||||
self.args = args
|
|
||||||
self.kwargs = kwargs
|
|
||||||
self.file = None
|
|
||||||
self._loop = asyncio.get_running_loop()
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
# Ouvrir le fichier de façon asynchrone
|
|
||||||
open_func = partial(open, self.file_path, self.mode, *self.args, **self.kwargs)
|
|
||||||
self.file = await self._loop.run_in_executor(_executor, open_func)
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
# Fermer le fichier de façon asynchrone
|
|
||||||
if self.file:
|
|
||||||
await self._loop.run_in_executor(_executor, self.file.close)
|
|
||||||
|
|
||||||
async def write(self, data):
|
|
||||||
"""Écrire des données dans le fichier de façon asynchrone."""
|
|
||||||
if not self.file:
|
|
||||||
raise ValueError("Le fichier n'est pas ouvert")
|
|
||||||
await self._loop.run_in_executor(_executor, self.file.write, data)
|
|
||||||
|
|
||||||
async def read(self, size=-1):
|
|
||||||
"""Lire des données depuis le fichier de façon asynchrone."""
|
|
||||||
if not self.file:
|
|
||||||
raise ValueError("Le fichier n'est pas ouvert")
|
|
||||||
return await self._loop.run_in_executor(_executor, self.file.read, size)
|
|
||||||
|
|
||||||
async def seek(self, offset, whence=0):
|
|
||||||
"""Déplacer le curseur dans le fichier de façon asynchrone."""
|
|
||||||
if not self.file:
|
|
||||||
raise ValueError("Le fichier n'est pas ouvert")
|
|
||||||
return await self._loop.run_in_executor(_executor, self.file.seek, offset, whence)
|
|
||||||
|
|
||||||
async def tell(self):
|
|
||||||
"""Obtenir la position actuelle dans le fichier de façon asynchrone."""
|
|
||||||
if not self.file:
|
|
||||||
raise ValueError("Le fichier n'est pas ouvert")
|
|
||||||
return await self._loop.run_in_executor(_executor, self.file.tell)
|
|
||||||
|
|
||||||
async def flush(self):
|
|
||||||
"""Forcer l'écriture des données en mémoire tampon sur le disque."""
|
|
||||||
if not self.file:
|
|
||||||
raise ValueError("Le fichier n'est pas ouvert")
|
|
||||||
await self._loop.run_in_executor(_executor, self.file.flush)
|
|
||||||
|
|
||||||
|
|
||||||
# Fonction helper pour simplifier l'utilisation (comme aiofiles.open)
|
|
||||||
async def async_open(file_path, mode='r', *args, **kwargs):
|
|
||||||
"""Ouvre un fichier en mode asynchrone, similaire à aiofiles.open."""
|
|
||||||
return AsyncFile(file_path, mode, *args, **kwargs)
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
from PySide6.QtCore import QObject, QStandardPaths, Signal
|
from PySide6.QtCore import QObject, QStandardPaths, Signal
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
|
from PySide6.QtCore import QStandardPaths
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
from argparse import FileType
|
from argparse import FileType
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from PySide6.QtCore import QStandardPaths
|
|
||||||
|
|
||||||
from dataclasses import dataclass, asdict
|
from dataclasses import dataclass, asdict
|
||||||
|
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ class ConfType:
|
|||||||
class FileType:
|
class FileType:
|
||||||
id: str
|
id: str
|
||||||
torrent_id: str
|
torrent_id: str
|
||||||
target: str|Path
|
target: Path
|
||||||
url: str
|
url: str
|
||||||
rel_path: str
|
rel_path: str
|
||||||
total_size: int
|
total_size: int
|
||||||
|
|||||||
462
src/download.py
462
src/download.py
@@ -1,21 +1,15 @@
|
|||||||
import traceback
|
from PySide6.QtCore import QObject, Signal, QTimer, QUrl, Slot, QThread
|
||||||
|
from PySide6.QtNetwork import QNetworkCookie, QNetworkAccessManager, QNetworkRequest, QNetworkReply, QNetworkCookieJar
|
||||||
|
|
||||||
from PySide6.QtCore import QObject, Signal, QTimer
|
|
||||||
from PySide6.QtNetwork import QNetworkCookie
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
from typing import Literal, TYPE_CHECKING
|
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import anyio
|
from typing import Literal, TYPE_CHECKING, BinaryIO, Optional
|
||||||
|
|
||||||
import httpx
|
import traceback
|
||||||
|
|
||||||
from src.datatypes import FileType, FileStatsType
|
from src.datatypes import FileType, FileStatsType
|
||||||
from src.async_file import async_open
|
|
||||||
from src.utils import aexec_in
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from windows.main_window import MainWindow
|
from windows.main_window import MainWindow
|
||||||
|
|
||||||
@@ -31,14 +25,15 @@ class DownloadManager(QObject):
|
|||||||
self.conf = parent.conf
|
self.conf = parent.conf
|
||||||
self.base_url = parent.url
|
self.base_url = parent.url
|
||||||
|
|
||||||
self.max_worker = 3
|
self.max_worker = 5
|
||||||
self.chunk_size = 512 * 1024
|
self.worker_pool = [
|
||||||
|
DownloaderWorker(self) for _ in range(self.max_worker)
|
||||||
|
]
|
||||||
self.pause = True
|
self.pause = True
|
||||||
|
self.stop = False
|
||||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
||||||
self.tasks: dict[FileType, asyncio.Task] = {}
|
self.tasks: dict[FileType, DownloaderWorker] = {}
|
||||||
self.task_stats: dict[str, FileStatsType] = {}
|
self.task_stats: dict[str, FileStatsType] = {}
|
||||||
self.waiter = asyncio.Event()
|
|
||||||
self.client_session: None|httpx.AsyncClient = None
|
|
||||||
self.cookies = {}
|
self.cookies = {}
|
||||||
|
|
||||||
# slots
|
# slots
|
||||||
@@ -53,224 +48,49 @@ class DownloadManager(QObject):
|
|||||||
# Création d'un logger spécifique pour cette classe
|
# Création d'un logger spécifique pour cette classe
|
||||||
self.logger = logging.getLogger('DownloadManager')
|
self.logger = logging.getLogger('DownloadManager')
|
||||||
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
||||||
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, chunk_size={self.chunk_size}, pause={self.pause}")
|
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, pause={self.pause}")
|
||||||
|
|
||||||
self.status = {}
|
self.status = {}
|
||||||
self.update_status()
|
self.update_status()
|
||||||
|
|
||||||
# async def initialize(self):
|
self.queue_timer = QTimer(self)
|
||||||
# self.client_session = httpx.AsyncClient(
|
self.queue_timer.timeout.connect(self.process_queue)
|
||||||
# timeout=httpx.Timeout(
|
self.queue_timer.start(1)
|
||||||
# connect=5.0, # 5 secondes pour établir la connexion
|
|
||||||
# read=None, # Pas de timeout pour la lecture des données (téléchargement)
|
|
||||||
# write=60.0, # 60 secondes pour envoyer des données
|
|
||||||
# pool=5.0 # 5 secondes pour obtenir une connexion du pool
|
|
||||||
# ),
|
|
||||||
# follow_redirects=True,
|
|
||||||
# verify=False,
|
|
||||||
# # http2=True,
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# for cookie in self.cookies:
|
|
||||||
# await self.add_cookie(cookie)
|
|
||||||
# self.logger.info("Session aiohttp initialisée")
|
|
||||||
|
|
||||||
async def loop_queue(self):
|
def process_queue(self):
|
||||||
# if self.client_session is None:
|
if self.pause:
|
||||||
# await self.initialize()
|
return
|
||||||
|
|
||||||
self.logger.info("Démarrage de la boucle de téléchargement")
|
available_worker = next((worker for worker in self.worker_pool if worker.available), None)
|
||||||
while True:
|
file = next((file for file in self.files.values() if not file.downloaded and file not in self.tasks), None)
|
||||||
if len(self.tasks) >= self.max_worker or self.pause:
|
if available_worker and file:
|
||||||
await self.wait()
|
self.logger.info(f"Assignation du fichier {file.id} à un worker disponible")
|
||||||
|
self.tasks[file] = available_worker
|
||||||
|
available_worker.start_download(file)
|
||||||
else:
|
else:
|
||||||
file = await self.next_file()
|
self.queue_timer.stop()
|
||||||
if file is None:
|
if all(file.downloaded for file in self.files.values()):
|
||||||
if not self.tasks:
|
self.logger.info("Tous les fichiers ont été téléchargés")
|
||||||
await self.set_pause(True)
|
self.set_pause(True)
|
||||||
await self.wait()
|
|
||||||
else:
|
|
||||||
self.tasks[file] = asyncio.create_task(self.download_file(file))
|
|
||||||
|
|
||||||
async def wait(self):
|
self.logger.info("Arrêt de la boucle de téléchargement")
|
||||||
self.logger.info("loop queue paused, waiting for tasks to finish...")
|
|
||||||
self.waiter.clear()
|
|
||||||
await self.waiter.wait()
|
|
||||||
self.logger.info("loop queue resumed")
|
|
||||||
|
|
||||||
|
def set_pause(self, value):
|
||||||
async def set_pause(self, value):
|
|
||||||
if self.pause == value:
|
if self.pause == value:
|
||||||
return
|
return
|
||||||
|
|
||||||
self.pause = value
|
self.pause = value
|
||||||
try:
|
self.logger.info(f"État de pause modifié à: {value}")
|
||||||
if self.pause:
|
if self.pause:
|
||||||
self.logger.info("Essaie de la mise en pause")
|
self.logger.info("Arrêt de tous les téléchargements actifs")
|
||||||
|
for worker in self.tasks.copy().values():
|
||||||
# on attend 0.5 sec pour voir si le téléchargement s'interrompt proprement avec le self.pause.
|
worker.stop_download()
|
||||||
asyncio.create_task(aexec_in(1, self._clean_connections))
|
|
||||||
# await asyncio.sleep(0.5)
|
|
||||||
# for task in self.tasks.values():
|
|
||||||
# if task and not task.done():
|
|
||||||
# task.cancel()
|
|
||||||
# # Attendre que les tâches se terminent proprement
|
|
||||||
# await asyncio.gather(*[t for t in self.tasks.values() if t and not t.done()],
|
|
||||||
# return_exceptions=True)
|
|
||||||
else:
|
else:
|
||||||
self.pause = False
|
|
||||||
self.waiter.set()
|
|
||||||
self.logger.info("Reprise des téléchargements")
|
self.logger.info("Reprise des téléchargements")
|
||||||
except Exception as e:
|
self.queue_timer.start(1)
|
||||||
self.logger.error(f"Erreur lors de la mise en pause: {e}")
|
QTimer.singleShot(100, self.update_status)
|
||||||
|
|
||||||
self.update_status()
|
def task_ended(self, file):
|
||||||
|
|
||||||
async def _clean_connections(self):
|
|
||||||
"""Fonction asynchrone interne pour nettoyer les connexions lors de la pause"""
|
|
||||||
self.logger.debug("cleaning connections")
|
|
||||||
try:
|
|
||||||
# Annuler proprement les tâches en cours
|
|
||||||
for file, task in self.tasks.items():
|
|
||||||
self.logger.error(f"task for {file.target} not cancelled: {task}")
|
|
||||||
if task and not task.done():
|
|
||||||
task.cancel()
|
|
||||||
self.logger.error(f"trying to cancel task for {file.target}")
|
|
||||||
|
|
||||||
|
|
||||||
# Attendre que les tâches se terminent
|
|
||||||
if self.tasks.values():
|
|
||||||
await asyncio.wait([t for t in self.tasks.values() if t and not t.done()],
|
|
||||||
timeout=2.0)
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Erreur lors du nettoyage des connexions: {e}")
|
|
||||||
|
|
||||||
async def next_file(self) -> FileType | None:
|
|
||||||
self.logger.debug("Recherche du prochain fichier à télécharger")
|
|
||||||
for file in self.files.values():
|
|
||||||
if not file.downloaded and file not in self.tasks:
|
|
||||||
self.logger.debug(f"picking file {file}")
|
|
||||||
return file
|
|
||||||
self.logger.debug("No file found to download, waiting for tasks to finish...")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def download_file(self, file: FileType):
|
|
||||||
self.logger.info(f"Début du téléchargement: {vars(file)}")
|
|
||||||
|
|
||||||
# construction des stats + vérification si le téléchargement est déjà terminé
|
|
||||||
file_path = anyio.Path(file.target)
|
|
||||||
stats = FileStatsType()
|
|
||||||
stats.total_size = file.total_size
|
|
||||||
file_stats = await file_path.stat() if await file_path.exists() else None
|
|
||||||
stats.downloaded_size = file_stats.st_size if file_stats else 0
|
|
||||||
if stats.downloaded_size >= stats.total_size:
|
|
||||||
file.downloaded = True
|
|
||||||
await self.task_ended(file)
|
|
||||||
return
|
|
||||||
|
|
||||||
await file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
self.task_stats[file.id] = stats
|
|
||||||
|
|
||||||
# construction du header
|
|
||||||
headers = {}
|
|
||||||
if stats.downloaded_size > 0:
|
|
||||||
headers.update({"Range": f"bytes={stats.downloaded_size}-{stats.total_size}"})
|
|
||||||
|
|
||||||
mode: Literal["ab", "wb"] = "ab" if stats.downloaded_size > 0 else "wb"
|
|
||||||
|
|
||||||
# Initilisation de la session:
|
|
||||||
try:
|
|
||||||
async with httpx.AsyncClient(
|
|
||||||
timeout=httpx.Timeout(connect=5.0, read=None, write=60.0, pool=5.0),
|
|
||||||
follow_redirects=True,
|
|
||||||
verify=False,
|
|
||||||
cookies=self.cookies,
|
|
||||||
http2=True,
|
|
||||||
) as client:
|
|
||||||
# requête pour le téléchargement
|
|
||||||
async with client.stream("GET", file.url, headers=headers) as response:
|
|
||||||
# on trigger les bad requests
|
|
||||||
response.raise_for_status()
|
|
||||||
# on ouvre le fichier pour commencer à écrire
|
|
||||||
async with await anyio.open_file(file_path, mode) as f:
|
|
||||||
|
|
||||||
last_update_time = time.monotonic()
|
|
||||||
last_downloaded_size = stats.downloaded_size
|
|
||||||
|
|
||||||
async for chunk in response.aiter_bytes(self.chunk_size):
|
|
||||||
if self.pause:
|
|
||||||
await response.aclose()
|
|
||||||
break
|
|
||||||
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
|
|
||||||
await f.write(chunk)
|
|
||||||
chunk_size = len(chunk)
|
|
||||||
stats.downloaded_size += chunk_size
|
|
||||||
|
|
||||||
current_time = time.monotonic()
|
|
||||||
elapsed_time = current_time - last_update_time
|
|
||||||
|
|
||||||
if elapsed_time >= 1.0:
|
|
||||||
bytes_downloaded = stats.downloaded_size - last_downloaded_size
|
|
||||||
current_speed = bytes_downloaded / elapsed_time
|
|
||||||
if stats.speed > 0:
|
|
||||||
stats.speed = round(0.7 * current_speed + 0.3 * stats.speed)
|
|
||||||
else:
|
|
||||||
stats.speed = round(current_speed)
|
|
||||||
|
|
||||||
last_update_time = current_time
|
|
||||||
last_downloaded_size = stats.downloaded_size
|
|
||||||
else:
|
|
||||||
await asyncio.sleep(0.005)
|
|
||||||
except httpx.HTTPStatusError as e:
|
|
||||||
self.logger.error(
|
|
||||||
f"Erreur HTTP lors du téléchargement de {file.target}: {e.response.status_code} - {e}")
|
|
||||||
file.error = f"Erreur HTTP {e.response.status_code}: {str(e)}"
|
|
||||||
|
|
||||||
except httpx.TimeoutException as e:
|
|
||||||
self.logger.error(
|
|
||||||
f"Délai d'attente dépassé lors du téléchargement de {file.target}: {str(e)}")
|
|
||||||
file.error = f"Délai d'attente dépassé: {str(e)}"
|
|
||||||
|
|
||||||
except httpx.ConnectError as e:
|
|
||||||
self.logger.error(f"Erreur de connexion lors du téléchargement de {file.target}: {str(e)}")
|
|
||||||
file.error = f"Erreur de connexion: {str(e)}"
|
|
||||||
|
|
||||||
except httpx.NetworkError as e:
|
|
||||||
self.logger.error(f"Erreur réseau lors du téléchargement de {file.target}: {str(e)}")
|
|
||||||
file.error = f"Erreur réseau: {str(e)}"
|
|
||||||
|
|
||||||
except httpx.RequestError as e:
|
|
||||||
self.logger.error(f"Erreur de requête lors du téléchargement de {file.target}: {str(e)}")
|
|
||||||
file.error = f"Erreur de requête: {str(e)}"
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self.logger.warning(f"Téléchargement de {file.target} annulé")
|
|
||||||
file.error = "Téléchargement annulé"
|
|
||||||
|
|
||||||
except IOError as e:
|
|
||||||
self.logger.error(f"Erreur d'E/S lors de l'écriture du fichier {file.target}: {str(e)}")
|
|
||||||
file.error = f"Erreur d'E/S: {str(e)}"
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(
|
|
||||||
f"Erreur inattendue lors du téléchargement de {file.target}: {type(e).__name__} - {str(e)}")
|
|
||||||
print(traceback.format_exc())
|
|
||||||
file.error = f"Erreur inattendue: {str(e)}"
|
|
||||||
|
|
||||||
else:
|
|
||||||
if self.pause:
|
|
||||||
self.logger.info(f"Téléchargement de {file.target} mis en pause")
|
|
||||||
else:
|
|
||||||
file.downloaded = True
|
|
||||||
self.logger.info(f"Téléchargement de {file.target} terminé avec succès")
|
|
||||||
finally:
|
|
||||||
await self.task_ended(file)
|
|
||||||
|
|
||||||
async def task_ended(self, file):
|
|
||||||
self.logger.debug(f"Fin de la tâche pour le fichier {file.id}")
|
self.logger.debug(f"Fin de la tâche pour le fichier {file.id}")
|
||||||
self.tasks.pop(file)
|
self.tasks.pop(file)
|
||||||
self.logger.debug(f"Tâche supprimée du dictionnaire des tâches actives")
|
self.logger.debug(f"Tâche supprimée du dictionnaire des tâches actives")
|
||||||
@@ -278,18 +98,14 @@ class DownloadManager(QObject):
|
|||||||
if file.id in self.task_stats:
|
if file.id in self.task_stats:
|
||||||
self.logger.debug(f"Suppression des statistiques pour le fichier {file.id}")
|
self.logger.debug(f"Suppression des statistiques pour le fichier {file.id}")
|
||||||
self.task_stats.pop(file.id)
|
self.task_stats.pop(file.id)
|
||||||
else:
|
|
||||||
self.logger.debug(f"Aucune statistique trouvée pour le fichier {file.id}")
|
|
||||||
|
|
||||||
# self.logger.debug("Mise à jour du statut du gestionnaire de téléchargement")
|
|
||||||
# self.update_status()
|
|
||||||
|
|
||||||
self.logger.debug("Notification du waiter pour traiter le prochain fichier")
|
self.logger.debug("Notification du waiter pour traiter le prochain fichier")
|
||||||
self.waiter.set()
|
self.queue_timer.start(1)
|
||||||
|
|
||||||
self.files_updated.emit(self.files)
|
self.files_updated.emit(self.files)
|
||||||
|
|
||||||
def add_files(self, files: list[dict]):
|
def add_files(self, files: list[dict]):
|
||||||
|
self.logger.info(f"Ajout de {len(files)} fichiers à la file d'attente")
|
||||||
base_target_path = Path(self.conf.get_value("download_location"))
|
base_target_path = Path(self.conf.get_value("download_location"))
|
||||||
for file in files:
|
for file in files:
|
||||||
filetype = FileType(
|
filetype = FileType(
|
||||||
@@ -305,14 +121,22 @@ class DownloadManager(QObject):
|
|||||||
else False
|
else False
|
||||||
)
|
)
|
||||||
self.files.setdefault(filetype.id, filetype)
|
self.files.setdefault(filetype.id, filetype)
|
||||||
|
self.logger.debug(f"Fichier ajouté: ID={filetype.id}, URL={filetype.url}, Taille={filetype.total_size}, Déjà téléchargé={filetype.downloaded}")
|
||||||
self.update_status()
|
self.update_status()
|
||||||
self.files_updated.emit(self.files)
|
self.files_updated.emit(self.files)
|
||||||
|
self.logger.info(f"Total des fichiers après ajout: {len(self.files)}")
|
||||||
|
|
||||||
def del_files(self, file_ids: list[str]):
|
def del_files(self, file_ids: list[str]):
|
||||||
|
self.logger.info(f"Suppression de {len(file_ids)} fichiers de la file d'attente")
|
||||||
for file_id in file_ids:
|
for file_id in file_ids:
|
||||||
|
self.logger.debug(f"Suppression du fichier avec ID: {file_id}")
|
||||||
|
if file_id in self.files:
|
||||||
self.files.pop(file_id)
|
self.files.pop(file_id)
|
||||||
|
else:
|
||||||
|
self.logger.warning(f"Tentative de suppression d'un fichier inexistant: {file_id}")
|
||||||
self.update_status()
|
self.update_status()
|
||||||
self.files_updated.emit(self.files)
|
self.files_updated.emit(self.files)
|
||||||
|
self.logger.info(f"Total des fichiers après suppression: {len(self.files)}")
|
||||||
|
|
||||||
def update_status(self):
|
def update_status(self):
|
||||||
new_status = {
|
new_status = {
|
||||||
@@ -328,18 +152,200 @@ class DownloadManager(QObject):
|
|||||||
"downloader_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()}
|
"downloader_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()}
|
||||||
}
|
}
|
||||||
if self.status != new_status:
|
if self.status != new_status:
|
||||||
|
print(new_status["downloader_stats"])
|
||||||
self.status = new_status
|
self.status = new_status
|
||||||
self.status_updated.emit(self.status)
|
self.status_updated.emit(self.status)
|
||||||
|
self.logger.debug(f"Mise à jour du statut: {len(self.files)} fichiers, {new_status['downloaded_files']} téléchargés, Vitesse: {new_status['speed']/1024:.2f} Ko/s")
|
||||||
|
|
||||||
async def add_cookie(self, cookie: QNetworkCookie):
|
def close_thread_workers(self):
|
||||||
|
self.logger.info("Fermeture de tous les workers de téléchargement")
|
||||||
|
for worker in self.worker_pool:
|
||||||
|
worker.close()
|
||||||
|
self.logger.info("Tous les workers ont été fermés")
|
||||||
|
|
||||||
|
def add_cookie(self, cookie: QNetworkCookie):
|
||||||
"""
|
"""
|
||||||
Ajoute un QNetworkCookie à la session httpx sans se préoccuper du domaine
|
Ajoute un QNetworkCookie aux sessions QNetworkAccessManager sans se préoccuper du domaine
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
cookie: Un objet QNetworkCookie de PySide6
|
cookie: Un objet QNetworkCookie de PySide6
|
||||||
"""
|
"""
|
||||||
# Extraction des informations essentielles du QNetworkCookie
|
# Extraction des informations essentielles du QNetworkCookie
|
||||||
name = cookie.name().data().decode()
|
for worker in self.worker_pool:
|
||||||
value = cookie.value().data().decode()
|
worker.add_cookie(cookie)
|
||||||
self.cookies.update({name: value})
|
self.logger.info(f"Cookie ajouté: {cookie.name().data()}={cookie.value().data()}")
|
||||||
self.logger.info(f"Cookie ajouté: {name}={value}")
|
|
||||||
|
|
||||||
|
class DownloaderWorker(QObject):
|
||||||
|
request_download = Signal(str, str) # url, destination
|
||||||
|
download_finished = Signal(FileType)
|
||||||
|
|
||||||
|
def __init__(self, parent: DownloadManager):
|
||||||
|
super().__init__(parent)
|
||||||
|
self.download_manager = parent
|
||||||
|
self.logger = logging.getLogger('DownloaderWorker')
|
||||||
|
self.logger.info("Initialisation d'un nouveau worker de téléchargement")
|
||||||
|
|
||||||
|
self.dl_thread = QThread(self)
|
||||||
|
|
||||||
|
|
||||||
|
self.manager = QNetworkAccessManager(self)
|
||||||
|
self.manager.moveToThread(self.dl_thread)
|
||||||
|
if not self.manager.cookieJar():
|
||||||
|
self.manager.setCookieJar(QNetworkCookieJar())
|
||||||
|
self.manager.finished.connect(self._on_finished)
|
||||||
|
|
||||||
|
self.available = True
|
||||||
|
self.reply: Optional[QNetworkReply] = None
|
||||||
|
self.file_io: Optional[BinaryIO] = None
|
||||||
|
self.file: Optional[FileType] = None
|
||||||
|
self.stats: Optional[FileStatsType] = None
|
||||||
|
|
||||||
|
self.last_update_time = 0
|
||||||
|
self.accumulated_bytes = 0
|
||||||
|
|
||||||
|
self.dl_thread.start()
|
||||||
|
self.logger.debug("Worker de téléchargement initialisé et prêt")
|
||||||
|
|
||||||
|
@Slot(FileType)
|
||||||
|
def start_download(self, file: FileType):
|
||||||
|
if not self.available:
|
||||||
|
self.logger.warning("Tentative de démarrage d'un téléchargement alors que le worker est déjà occupé")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.logger.info(f"Démarrage du téléchargement pour le fichier {file.id}: {file.rel_path}")
|
||||||
|
self.available = False
|
||||||
|
|
||||||
|
# construction des stats + vérification si le téléchargement est déjà terminé
|
||||||
|
self.file = file
|
||||||
|
self.stats = FileStatsType()
|
||||||
|
self.stats.total_size = file.total_size
|
||||||
|
self.stats.downloaded_size = file.size_downloaded
|
||||||
|
if self.stats.downloaded_size >= file.total_size:
|
||||||
|
self.logger.info(f"Le fichier {file.id} est déjà téléchargé")
|
||||||
|
self.file.downloaded = True
|
||||||
|
self.available = True
|
||||||
|
self.download_manager.task_ended(file)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.logger.debug(f"Création du répertoire parent pour {file.target}")
|
||||||
|
self.file.target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.download_manager.task_stats[file.id] = self.stats
|
||||||
|
|
||||||
|
request = QNetworkRequest(QUrl(file.url))
|
||||||
|
if self.stats.downloaded_size > 0:
|
||||||
|
range_header = f"bytes={self.stats.downloaded_size}-{file.total_size}"
|
||||||
|
request.setRawHeader(b"Range", range_header.encode())
|
||||||
|
mode = "ab"
|
||||||
|
self.logger.info(f"Reprise du téléchargement à partir de {self.stats.downloaded_size} octets")
|
||||||
|
else:
|
||||||
|
mode = "wb"
|
||||||
|
self.logger.info(f"Nouveau téléchargement pour {file.id}")
|
||||||
|
|
||||||
|
self.reply = self.manager.get(request)
|
||||||
|
self.reply.readyRead.connect(self._on_data_ready)
|
||||||
|
self.reply.downloadProgress.connect(self._on_progress)
|
||||||
|
self.file_io = open(file.target, mode)
|
||||||
|
self.logger.debug(f"Fichier ouvert en mode {mode}: {file.target}")
|
||||||
|
|
||||||
|
self.last_update_time = time.monotonic()
|
||||||
|
self.accumulated_bytes = 0
|
||||||
|
|
||||||
|
def _on_data_ready(self):
|
||||||
|
chunk_size = 0
|
||||||
|
while self.reply.bytesAvailable():
|
||||||
|
chunk = self.reply.read(64 * 1024)
|
||||||
|
self.file_io.write(chunk.data())
|
||||||
|
chunk_size += chunk.size()
|
||||||
|
|
||||||
|
self.stats.downloaded_size += chunk_size
|
||||||
|
self.accumulated_bytes += chunk_size
|
||||||
|
now = time.monotonic()
|
||||||
|
elapsed = now - self.last_update_time
|
||||||
|
|
||||||
|
if elapsed >= 1.0:
|
||||||
|
# EMA lissage court terme (optionnel : 0.7 nouveau / 0.3 ancien)
|
||||||
|
new_speed = self.accumulated_bytes / elapsed
|
||||||
|
if self.stats.speed > 0:
|
||||||
|
self.stats.speed = round(0.7 * new_speed + 0.3 * self.stats.speed)
|
||||||
|
else:
|
||||||
|
self.stats.speed = round(new_speed)
|
||||||
|
|
||||||
|
self.accumulated_bytes = 0
|
||||||
|
self.last_update_time = now
|
||||||
|
# self.logger.info(f"Données reçues: {chunk_size} octets, vitesse: {self.stats.speed/1024:.2f} Ko/s")
|
||||||
|
|
||||||
|
def _on_progress(self, bytes_received, bytes_total):
|
||||||
|
pass
|
||||||
|
# self.logger.debug(f"Progression du téléchargement pour {self.file.id}: {bytes_received}/{bytes_total} octets ({(bytes_received/max(bytes_total, 1))*100:.1f}%)")
|
||||||
|
|
||||||
|
def _on_finished(self, reply: QNetworkReply):
|
||||||
|
if not self.file:
|
||||||
|
self.logger.error("Réponse reçue mais aucun fichier associé")
|
||||||
|
return
|
||||||
|
|
||||||
|
if reply.error() == QNetworkReply.NetworkError.NoError:
|
||||||
|
self.logger.info(f"Téléchargement terminé avec succès pour {self.file.id}")
|
||||||
|
self.file.downloaded = True
|
||||||
|
else:
|
||||||
|
error_msg = f"Erreur HTTP {reply.error()}: {reply.errorString()}"
|
||||||
|
self.logger.error(f"Échec du téléchargement pour {self.file.id}: {error_msg}")
|
||||||
|
self.file.error = error_msg
|
||||||
|
|
||||||
|
self.download_manager.task_ended(self.file)
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
|
||||||
|
def stop_download(self):
|
||||||
|
if self.file:
|
||||||
|
self.logger.info(f"Arrêt du téléchargement pour {self.file.id}")
|
||||||
|
else:
|
||||||
|
self.logger.info("Arrêt du téléchargement (aucun fichier associé)")
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.logger.debug("Nettoyage des ressources de téléchargement")
|
||||||
|
if self.file_io:
|
||||||
|
self.file_io.close()
|
||||||
|
self.file_io = None
|
||||||
|
self.logger.debug("Fichier fermé")
|
||||||
|
|
||||||
|
if self.reply:
|
||||||
|
if not self.reply.isFinished():
|
||||||
|
self.logger.debug("Annulation de la requête réseau")
|
||||||
|
self.reply.abort()
|
||||||
|
if self.reply:
|
||||||
|
self.reply.deleteLater()
|
||||||
|
self.reply = None
|
||||||
|
self.logger.debug("Requête réseau nettoyée")
|
||||||
|
|
||||||
|
# if self.stats:
|
||||||
|
# self.stats = None
|
||||||
|
# self.logger.debug("Statistiques réinitialisées")
|
||||||
|
#
|
||||||
|
# if self.file:
|
||||||
|
# self.file = None
|
||||||
|
# self.logger.debug("Référence au fichier supprimée")
|
||||||
|
|
||||||
|
self.available = True
|
||||||
|
self.logger.debug("Worker marqué comme disponible")
|
||||||
|
|
||||||
|
def add_cookie(self, cookie: QNetworkCookie):
|
||||||
|
cookie_jar = self.manager.cookieJar()
|
||||||
|
all_cookies = cookie_jar.allCookies()
|
||||||
|
all_cookies.append(cookie)
|
||||||
|
cookie_jar.setAllCookies(all_cookies)
|
||||||
|
self.logger.debug(f"Cookie ajouté au worker: {cookie.name().data()}={cookie.value().data()}")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.logger.info("Fermeture du worker de téléchargement")
|
||||||
|
if self.reply:
|
||||||
|
self.logger.debug("Annulation de la requête en cours")
|
||||||
|
self.reply.abort()
|
||||||
|
self.reply.deleteLater()
|
||||||
|
self.manager.deleteLater()
|
||||||
|
|
||||||
|
self.dl_thread.quit()
|
||||||
|
self.dl_thread.wait()
|
||||||
|
self.logger.debug("Arrêt du thread de téléchargement")
|
||||||
|
self.logger.info("Worker fermé")
|
||||||
@@ -1,326 +0,0 @@
|
|||||||
from PySide6.QtCore import QObject, Signal, QTimer
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
from copy import deepcopy
|
|
||||||
from typing import Literal, TYPE_CHECKING
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
from pathlib import Path
|
|
||||||
from http.cookies import SimpleCookie
|
|
||||||
|
|
||||||
import aiofiles
|
|
||||||
import aiopath
|
|
||||||
import aiohttp
|
|
||||||
from PySide6.QtNetwork import QNetworkCookie
|
|
||||||
|
|
||||||
from src.datatypes import FileType, FileStatsType
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from windows.main_window import MainWindow
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadManager(QObject):
|
|
||||||
status_updated = Signal(dict)
|
|
||||||
stats_updated = Signal(dict)
|
|
||||||
files_updated = Signal(dict)
|
|
||||||
|
|
||||||
def __init__(self, parent: "MainWindow" = None):
|
|
||||||
super().__init__(parent)
|
|
||||||
|
|
||||||
self.conf = parent.conf
|
|
||||||
self.base_url = parent.url
|
|
||||||
|
|
||||||
self.max_worker = 2
|
|
||||||
self.chunk_size = 1024 * 1024
|
|
||||||
self.pause = True
|
|
||||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
|
||||||
self.tasks: dict[FileType, asyncio.Task] = {}
|
|
||||||
self.task_stats: dict[str, FileStatsType] = {}
|
|
||||||
self.waiter = asyncio.Event()
|
|
||||||
self.client_session: None|aiohttp.ClientSession = None
|
|
||||||
self.cookies = {}
|
|
||||||
|
|
||||||
# slots
|
|
||||||
# self.status_updated.connect(lambda data: self.conf.set_value("files", self.files))
|
|
||||||
self.files_updated.connect(lambda data: self.conf.set_value("files", data))
|
|
||||||
|
|
||||||
# stats timer
|
|
||||||
self.timer_dl_stats = QTimer(self)
|
|
||||||
self.timer_dl_stats.timeout.connect(self.update_status)
|
|
||||||
self.timer_dl_stats.start(2000)
|
|
||||||
|
|
||||||
# Création d'un logger spécifique pour cette classe
|
|
||||||
self.logger = logging.getLogger('DownloadManager')
|
|
||||||
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
|
||||||
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, chunk_size={self.chunk_size}, pause={self.pause}")
|
|
||||||
|
|
||||||
self.status = {}
|
|
||||||
self.update_status()
|
|
||||||
|
|
||||||
async def initialize(self):
|
|
||||||
self.client_session = aiohttp.ClientSession()
|
|
||||||
self.logger.info("Session aiohttp initialisée")
|
|
||||||
|
|
||||||
async def loop_queue(self):
|
|
||||||
if self.client_session is None:
|
|
||||||
await self.initialize()
|
|
||||||
|
|
||||||
self.logger.info("Démarrage de la boucle de téléchargement")
|
|
||||||
while True:
|
|
||||||
if len(self.tasks) >= self.max_worker or self.pause:
|
|
||||||
await self.wait()
|
|
||||||
else:
|
|
||||||
file = await self.next_file()
|
|
||||||
if file is None:
|
|
||||||
await self.wait()
|
|
||||||
else:
|
|
||||||
self.tasks[file] = asyncio.create_task(self.download_file(file))
|
|
||||||
|
|
||||||
async def wait(self):
|
|
||||||
self.logger.info("loop queue paused, waiting for tasks to finish...")
|
|
||||||
self.waiter.clear()
|
|
||||||
await self.waiter.wait()
|
|
||||||
self.logger.info("loop queue resumed")
|
|
||||||
|
|
||||||
|
|
||||||
def set_pause(self, value):
|
|
||||||
if self.pause == value:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.pause = value
|
|
||||||
if self.pause:
|
|
||||||
for file_id, task in self.tasks.items():
|
|
||||||
if not task.done():
|
|
||||||
task.cancel()
|
|
||||||
self.logger.info("Tous les téléchargements ont été mis en pause")
|
|
||||||
else:
|
|
||||||
self.pause = False
|
|
||||||
self.waiter.set()
|
|
||||||
self.logger.info("Reprise des téléchargements")
|
|
||||||
self.update_status()
|
|
||||||
|
|
||||||
async def next_file(self) -> FileType | None:
|
|
||||||
self.logger.debug("Recherche du prochain fichier à télécharger")
|
|
||||||
for file in self.files.values():
|
|
||||||
if not file.downloaded and file not in self.tasks:
|
|
||||||
self.logger.debug(f"picking file {file}")
|
|
||||||
return file
|
|
||||||
self.logger.debug("No file found to download, waiting for tasks to finish...")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def download_file(self, file: FileType):
|
|
||||||
self.logger.info(f"Début du téléchargement: {vars(file)}")
|
|
||||||
|
|
||||||
# construction des stats + vérification si le téléchargement est déjà terminé
|
|
||||||
file_path = aiopath.AsyncPath(file.target)
|
|
||||||
stats = FileStatsType()
|
|
||||||
stats.total_size = file.total_size
|
|
||||||
stats.downloaded_size = await file_path.stat().st_size if await file_path.exists() else 0
|
|
||||||
if stats.downloaded_size >= stats.total_size:
|
|
||||||
file.downloaded = True
|
|
||||||
await self.task_ended(file)
|
|
||||||
return
|
|
||||||
|
|
||||||
await file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
self.task_stats[file.id] = stats
|
|
||||||
|
|
||||||
# construction du header
|
|
||||||
headers = {}
|
|
||||||
if stats.downloaded_size > 0:
|
|
||||||
headers.update({"Range": f"bytes={stats.downloaded_size}-{stats.total_size}"})
|
|
||||||
|
|
||||||
mode: Literal["ab", "wb"] = "ab" if stats.downloaded_size > 0 else "wb"
|
|
||||||
try:
|
|
||||||
async with aiofiles.open(file_path, mode) as f:
|
|
||||||
async with self.client_session.get(file.url, cookies=self.cookies) as response:
|
|
||||||
print("Content-Encoding:", response.headers.get('Content-Encoding'))
|
|
||||||
last_update_time = time.monotonic()
|
|
||||||
last_downloaded_size = stats.downloaded_size
|
|
||||||
async for chunk in response.content.iter_chunked(self.chunk_size):
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
await f.write(chunk)
|
|
||||||
if self.pause:
|
|
||||||
break
|
|
||||||
chunk_size = len(chunk)
|
|
||||||
stats.downloaded_size += chunk_size
|
|
||||||
|
|
||||||
current_time = time.monotonic()
|
|
||||||
elapsed_time = current_time - last_update_time
|
|
||||||
|
|
||||||
if elapsed_time >= 0.5:
|
|
||||||
bytes_downloaded = stats.downloaded_size - last_downloaded_size
|
|
||||||
current_speed = bytes_downloaded / elapsed_time
|
|
||||||
if stats.speed > 0:
|
|
||||||
stats.speed = round(0.7 * current_speed + 0.3 * stats.speed)
|
|
||||||
else:
|
|
||||||
stats.speed = round(current_speed)
|
|
||||||
|
|
||||||
last_update_time = current_time
|
|
||||||
last_downloaded_size = stats.downloaded_size
|
|
||||||
except aiohttp.ClientResponseError as e:
|
|
||||||
self.logger.error(f"Erreur HTTP lors du téléchargement de {file.target}: {e.status} - {e.message}")
|
|
||||||
file.error = f"Erreur HTTP {e.status}: {e.message}"
|
|
||||||
|
|
||||||
except aiohttp.ClientError as e:
|
|
||||||
self.logger.error(f"Erreur de connexion lors du téléchargement de {file.target}: {str(e)}")
|
|
||||||
file.error = f"Erreur de connexion: {str(e)}"
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self.logger.warning(f"Téléchargement de {file.target} annulé")
|
|
||||||
file.error = "Téléchargement annulé"
|
|
||||||
# raise # Propager l'exception pour une annulation propre
|
|
||||||
|
|
||||||
except IOError as e:
|
|
||||||
self.logger.error(f"Erreur d'E/S lors de l'écriture du fichier {file.target}: {str(e)}")
|
|
||||||
file.error = f"Erreur d'E/S: {str(e)}"
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Erreur inattendue lors du téléchargement de {file.target}: {type(e).__name__} - {str(e)}")
|
|
||||||
file.error = f"Erreur inattendue: {str(e)}"
|
|
||||||
|
|
||||||
else:
|
|
||||||
file.downloaded = True
|
|
||||||
self.logger.info(f"Téléchargement de {file.target} terminé avec succès")
|
|
||||||
|
|
||||||
finally:
|
|
||||||
await self.task_ended(file)
|
|
||||||
|
|
||||||
async def task_ended(self, file):
|
|
||||||
self.logger.debug(f"Fin de la tâche pour le fichier {file.id}")
|
|
||||||
self.tasks.pop(file)
|
|
||||||
self.logger.debug(f"Tâche supprimée du dictionnaire des tâches actives")
|
|
||||||
|
|
||||||
if file.id in self.task_stats:
|
|
||||||
self.logger.debug(f"Suppression des statistiques pour le fichier {file.id}")
|
|
||||||
self.task_stats.pop(file.id)
|
|
||||||
else:
|
|
||||||
self.logger.debug(f"Aucune statistique trouvée pour le fichier {file.id}")
|
|
||||||
|
|
||||||
# self.logger.debug("Mise à jour du statut du gestionnaire de téléchargement")
|
|
||||||
# self.update_status()
|
|
||||||
|
|
||||||
self.logger.debug("Notification du waiter pour traiter le prochain fichier")
|
|
||||||
self.waiter.set()
|
|
||||||
|
|
||||||
self.files_updated.emit(self.files)
|
|
||||||
|
|
||||||
def add_files(self, files: list[dict]):
|
|
||||||
base_target_path = Path(self.conf.get_value("download_location"))
|
|
||||||
for file in files:
|
|
||||||
filetype = FileType(
|
|
||||||
id=file["id"],
|
|
||||||
torrent_id=file["torrent"],
|
|
||||||
target=base_target_path / file["rel_name"],
|
|
||||||
url=urljoin(self.base_url, file["download_url"]),
|
|
||||||
rel_path=file["rel_name"],
|
|
||||||
total_size=file["size"],
|
|
||||||
)
|
|
||||||
filetype.downloaded = (
|
|
||||||
True if filetype.target.exists() and filetype.target.stat().st_size == filetype.total_size
|
|
||||||
else False
|
|
||||||
)
|
|
||||||
self.files.setdefault(filetype.id, filetype)
|
|
||||||
self.update_status()
|
|
||||||
self.files_updated.emit(self.files)
|
|
||||||
|
|
||||||
def del_files(self, file_ids: list[str]):
|
|
||||||
for file_id in file_ids:
|
|
||||||
self.files.pop(file_id)
|
|
||||||
self.update_status()
|
|
||||||
self.files_updated.emit(self.files)
|
|
||||||
|
|
||||||
def update_status(self):
|
|
||||||
self.status = {
|
|
||||||
"pause": self.pause,
|
|
||||||
"max_worker": self.max_worker,
|
|
||||||
"total_files": len(self.files),
|
|
||||||
"downloaded_files": sum(file.downloaded for file in self.files.values() if file.downloaded),
|
|
||||||
"downloading": [task.id for task in self.tasks.keys()],
|
|
||||||
"total_size": sum(file.total_size for file in self.files.values()),
|
|
||||||
"downloaded_size": sum(file.total_size for file in self.files.values() if file.downloaded) + sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
|
|
||||||
"speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
|
|
||||||
"downloader_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()}
|
|
||||||
}
|
|
||||||
self.status_updated.emit(self.status)
|
|
||||||
|
|
||||||
# def update_dl_stats(self):
|
|
||||||
# old_stats = deepcopy(self.dl_stats)
|
|
||||||
# self.dl_stats = {
|
|
||||||
# "speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
|
|
||||||
# "downloaded_size": sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
|
|
||||||
# "downloading_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()},
|
|
||||||
# }
|
|
||||||
# if old_stats != self.dl_stats:
|
|
||||||
# self.stats_updated.emit(self.dl_stats)
|
|
||||||
# return self.dl_stats
|
|
||||||
|
|
||||||
async def add_cookie(self, cookie):
|
|
||||||
"""
|
|
||||||
Ajoute un QNetworkCookie à la session client.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cookie (QNetworkCookie): Le cookie PySide6 à ajouter à la session
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
cookie_name = cookie.name().data().decode() if cookie.name() else "Inconnu"
|
|
||||||
self.logger.debug(f"Tentative d'ajout d'un cookie: {cookie_name}")
|
|
||||||
|
|
||||||
if not self.client_session:
|
|
||||||
self.logger.warning("Impossible d'ajouter le cookie: la session client n'est pas initialisée")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Vérification que c'est bien un QNetworkCookie
|
|
||||||
from PySide6.QtNetwork import QNetworkCookie
|
|
||||||
if not isinstance(cookie, QNetworkCookie):
|
|
||||||
self.logger.error(f"Format de cookie invalide: {type(cookie)}, un QNetworkCookie est attendu")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Extraction des informations du QNetworkCookie
|
|
||||||
cookie_name = cookie.name().data().decode()
|
|
||||||
cookie_value = cookie.value().data().decode()
|
|
||||||
cookie_domain = cookie.domain()
|
|
||||||
cookie_path = cookie.path()
|
|
||||||
|
|
||||||
self.logger.debug(
|
|
||||||
f"Informations du cookie - Nom: {cookie_name}, Valeur: {cookie_value[:10]}..., Domaine: {cookie_domain}, Chemin: {cookie_path}")
|
|
||||||
|
|
||||||
# Création du cookie pour aiohttp
|
|
||||||
|
|
||||||
simple_cookie = SimpleCookie()
|
|
||||||
simple_cookie[cookie_name] = cookie_value
|
|
||||||
|
|
||||||
# Ajout des attributs du cookie
|
|
||||||
# if cookie_domain:
|
|
||||||
# self.logger.debug(f"Définition du domaine du cookie: {cookie_domain}")
|
|
||||||
# simple_cookie[cookie_name]['domain'] = cookie_domain
|
|
||||||
|
|
||||||
if cookie_path:
|
|
||||||
simple_cookie[cookie_name]['path'] = cookie_path
|
|
||||||
|
|
||||||
if cookie.isSecure():
|
|
||||||
self.logger.debug(f"Le cookie '{cookie_name}' est marqué comme sécurisé")
|
|
||||||
simple_cookie[cookie_name]['secure'] = True
|
|
||||||
|
|
||||||
if cookie.isHttpOnly():
|
|
||||||
self.logger.debug(f"Le cookie '{cookie_name}' est marqué comme HttpOnly")
|
|
||||||
simple_cookie[cookie_name]['httponly'] = True
|
|
||||||
|
|
||||||
# Ajout de l'expiration si ce n'est pas un cookie de session
|
|
||||||
if not cookie.isSessionCookie():
|
|
||||||
expiration = cookie.expirationDate().toString("yyyy-MM-dd hh:mm:ss")
|
|
||||||
self.logger.debug(f"Le cookie expirera le: {expiration}")
|
|
||||||
# La conversion de date pourrait nécessiter plus de code ici
|
|
||||||
|
|
||||||
# Ajout du cookie à la session aiohttp - la méthode correcte
|
|
||||||
self.client_session.cookie_jar.update_cookies(simple_cookie)
|
|
||||||
|
|
||||||
self.logger.info(f"Cookie '{cookie_name}' ajouté avec succès à la session")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.exception(f"Erreur lors de l'ajout du cookie: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,173 +0,0 @@
|
|||||||
import aiopath
|
|
||||||
from PySide6.QtCore import Signal, QObject, QTimer, Slot
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import aiofiles
|
|
||||||
import time
|
|
||||||
from typing import TYPE_CHECKING, Dict, Any
|
|
||||||
|
|
||||||
from src.conf import ConfManager
|
|
||||||
from src.datatypes import FileType, FileStatsType
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from windows.main_window import MainWindow
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadManager(QObject):
|
|
||||||
max_worker = 5
|
|
||||||
downloading: dict[str, "Downloader"] = dict()
|
|
||||||
pause = True
|
|
||||||
chunk_size = 128 * 1024 # 128KB
|
|
||||||
finished = Signal(str)
|
|
||||||
|
|
||||||
file_update = Signal(dict)
|
|
||||||
stats_update = Signal(dict)
|
|
||||||
status_updated = Signal(dict)
|
|
||||||
|
|
||||||
client_session: aiohttp.ClientSession = None
|
|
||||||
|
|
||||||
def __init__(self, parent=None):
|
|
||||||
super().__init__(parent)
|
|
||||||
self.conf: "ConfManager" = parent.conf
|
|
||||||
|
|
||||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
|
||||||
|
|
||||||
self.waiter = asyncio.Event()
|
|
||||||
# self.downloads = downloads
|
|
||||||
|
|
||||||
self.timer_stats = QTimer(self)
|
|
||||||
self.timer_stats.timeout.connect(self.update_stats)
|
|
||||||
self.timer_stats.start(500)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status(self):
|
|
||||||
return {
|
|
||||||
"pause": self.pause,
|
|
||||||
"max_worker": self.max_worker,
|
|
||||||
"len_total": len(self.files),
|
|
||||||
"len_downloaded": sum(file.downloaded for file in self.files.values()),
|
|
||||||
"downloading": list(self.downloading.keys()),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def read_queue(self):
|
|
||||||
while True:
|
|
||||||
if len(self.downloading) >= self.max_worker or self.pause:
|
|
||||||
await self.waiter.wait()
|
|
||||||
else:
|
|
||||||
file = await self.next_file()
|
|
||||||
if file is None:
|
|
||||||
await self.waiter.wait()
|
|
||||||
else:
|
|
||||||
downloader = Downloader(file, self)
|
|
||||||
downloader.finished.connect(self.file_update.emit)
|
|
||||||
self.downloading[file.id] = downloader
|
|
||||||
asyncio.create_task(downloader.start())
|
|
||||||
|
|
||||||
async def next_file(self):
|
|
||||||
for file in self.files.values():
|
|
||||||
if not file.downloaded and file not in self.downloading:
|
|
||||||
return file
|
|
||||||
|
|
||||||
async def file_finished(self, file):
|
|
||||||
self.downloading.pop(file)
|
|
||||||
self.waiter.set()
|
|
||||||
|
|
||||||
async def add_files(self, files: list[dict]):
|
|
||||||
for file in files:
|
|
||||||
self.files.setdefault(file["id"], FileType) # ajoute l'entré si la clé n'existe pas, si elle existe, ne fait rien
|
|
||||||
self.conf.set_value("files", self.files)
|
|
||||||
self.file_update.emit(self.files.items())
|
|
||||||
|
|
||||||
async def del_files(self, file: str|list[str]):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# @Slot()
|
|
||||||
# def update_stats(self):
|
|
||||||
# if len(self.downloading):
|
|
||||||
# file_stats = {file_id: worker.stats for file_id, worker in self.downloading.items()}
|
|
||||||
# global_stats = {
|
|
||||||
# "speed": sum(stat.speed for stat in file_stats.values()),
|
|
||||||
# "total_size": sum(file.total_size for file in self.files.values()),
|
|
||||||
# "size_downloaded": sum(file.total_size for file in self.files.values() if file.downloaded) + sum(stat.downloaded_size for stat in file_stats.values()),
|
|
||||||
# }
|
|
||||||
#
|
|
||||||
# return {
|
|
||||||
# "global": global_stats,
|
|
||||||
# "files": file_stats
|
|
||||||
# }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Downloader(QObject):
|
|
||||||
finished = Signal(FileType)
|
|
||||||
|
|
||||||
stats = FileStatsType()
|
|
||||||
|
|
||||||
def __init__(self, file: FileType, manager: DownloadManager):
|
|
||||||
super().__init__(manager)
|
|
||||||
self.file = file
|
|
||||||
self.manager = manager
|
|
||||||
self.target_path = aiopath.AsyncPath(self.file.target)
|
|
||||||
# max queue 200 MO environ
|
|
||||||
self.download_queue = asyncio.Queue(maxsize=1024 * self.manager.chunk_size)
|
|
||||||
|
|
||||||
# limiter le buffer
|
|
||||||
|
|
||||||
async def start(self):
|
|
||||||
# todo : prendre en charge la reprise si échec
|
|
||||||
self.stats.total_size = self.file.total_size
|
|
||||||
self.stats.downloaded_size = (await self.target_path.stat()).st_size if await self.target_path.exists() else 0
|
|
||||||
if self.stats.total_size >= self.stats.downloaded_size:
|
|
||||||
self.file.downloaded = True
|
|
||||||
self.finished.emit(self.file)
|
|
||||||
else:
|
|
||||||
download_task = asyncio.create_task(self.download_data())
|
|
||||||
write_task = asyncio.create_task(self.write_to_disk())
|
|
||||||
await asyncio.gather(download_task, write_task)
|
|
||||||
self.finished.emit(self.file)
|
|
||||||
|
|
||||||
async def download_data(self):
|
|
||||||
async with self.manager.client_session.get(self.file.url) as response:
|
|
||||||
last_update_time = time.monotonic()
|
|
||||||
last_downloaded_size = self.stats.downloaded_size
|
|
||||||
|
|
||||||
async for chunk in response.content.iter_chunked(self.manager.chunk_size):
|
|
||||||
await self.download_queue.put(chunk)
|
|
||||||
# while True:
|
|
||||||
# chunk = await response.content.read(self.manager.chunk_size)
|
|
||||||
# if not chunk:
|
|
||||||
# await self.download_queue.put(None)
|
|
||||||
# break
|
|
||||||
# await self.download_queue.put(chunk)
|
|
||||||
|
|
||||||
chunk_size = len(chunk)
|
|
||||||
self.stats.downloaded_size += chunk_size
|
|
||||||
|
|
||||||
# Calcul simple de la vitesse actuelle
|
|
||||||
current_time = time.monotonic()
|
|
||||||
elapsed_time = current_time - last_update_time
|
|
||||||
|
|
||||||
# Mettre à jour la vitesse toutes les 0.5 seconde
|
|
||||||
if elapsed_time >= 0.5:
|
|
||||||
bytes_downloaded = self.stats.downloaded_size - last_downloaded_size
|
|
||||||
current_speed = bytes_downloaded / elapsed_time
|
|
||||||
# Lissage simple : 70% nouvelle valeur + 30% ancienne valeur
|
|
||||||
if self.stats.speed > 0:
|
|
||||||
self.stats.speed = round(0.7 * current_speed + 0.3 * self.stats.speed)
|
|
||||||
else:
|
|
||||||
self.stats.speed = round(current_speed)
|
|
||||||
|
|
||||||
last_update_time = current_time
|
|
||||||
last_downloaded_size = self.stats.downloaded_size
|
|
||||||
await self.download_queue.put(None)
|
|
||||||
|
|
||||||
async def write_to_disk(self):
|
|
||||||
async with aiofiles.open(self.target_path, "wb") as f:
|
|
||||||
while True:
|
|
||||||
chunk = await self.download_queue.get()
|
|
||||||
if chunk is None:
|
|
||||||
break
|
|
||||||
await f.write(chunk)
|
|
||||||
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
import logging
|
|
||||||
import threading
|
|
||||||
|
|
||||||
from PySide6.QtCore import QObject, Signal, QTimer
|
|
||||||
|
|
||||||
from src.datatypes import FileType, FileStatsType
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadManager(QObject):
|
|
||||||
status_updated = Signal(dict)
|
|
||||||
stats_updated = Signal(dict)
|
|
||||||
files_updated = Signal(dict)
|
|
||||||
|
|
||||||
def __init__(self, parent=None):
|
|
||||||
super().__init__(parent)
|
|
||||||
|
|
||||||
self.conf = parent.conf
|
|
||||||
self.base_url = parent.url
|
|
||||||
|
|
||||||
self.max_worker = 2
|
|
||||||
self.chunk_size = 128*1024
|
|
||||||
self.pause = True
|
|
||||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
|
||||||
self.tasks: dict[FileType, threading.Thread] = {}
|
|
||||||
self.task_stats: dict[str, FileStatsType] = {}
|
|
||||||
self.waiter = threading.Event()
|
|
||||||
self.cookies = []
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.stop = False
|
|
||||||
|
|
||||||
# slots
|
|
||||||
self.files_updated.connect(lambda data: self.conf.set_value("files", data))
|
|
||||||
|
|
||||||
# stats timer
|
|
||||||
self.timer_dl_stats = QTimer(self)
|
|
||||||
self.timer_dl_stats.timeout.connect(self.update_status)
|
|
||||||
self.timer_dl_stats.start(2000)
|
|
||||||
|
|
||||||
# Création d'un logger spécifique pour cette classe
|
|
||||||
self.logger = logging.getLogger('DownloadManager')
|
|
||||||
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
|
||||||
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, chunk_size={self.chunk_size}, pause={self.pause}")
|
|
||||||
|
|
||||||
self.status = {}
|
|
||||||
self.update_status()
|
|
||||||
|
|
||||||
def loop_queue(self):
|
|
||||||
self.logger.info("Démarrage de la boucle de téléchargement")
|
|
||||||
while True:
|
|
||||||
if len(self.tasks) >= self.max_worker or self.pause:
|
|
||||||
self.wait()
|
|
||||||
else:
|
|
||||||
file = self.next_file()
|
|
||||||
if file is None:
|
|
||||||
self.wait()
|
|
||||||
else:
|
|
||||||
# todo démarrer la task
|
|
||||||
pass
|
|
||||||
|
|
||||||
def wait(self):
|
|
||||||
self.logger.info("loop queue paused, waiting for tasks to finish...")
|
|
||||||
self.waiter.clear()
|
|
||||||
self.waiter.wait()
|
|
||||||
self.logger.info("loop queue resumed")
|
|
||||||
|
|
||||||
def set_pause(self, value):
|
|
||||||
if self.pause == value:
|
|
||||||
return
|
|
||||||
|
|
||||||
if value:
|
|
||||||
for file_id, task in self.tasks.items():
|
|
||||||
# todo cancel les tasks
|
|
||||||
pass
|
|
||||||
self.logger.info("Tous les téléchargements ont été mis en pause")
|
|
||||||
else:
|
|
||||||
self.waiter.set()
|
|
||||||
self.logger.info("Reprise des téléchargements")
|
|
||||||
self.pause = value
|
|
||||||
self.update_status()
|
|
||||||
|
|
||||||
def next_file(self):
|
|
||||||
self.logger.debug("Recherche du prochain fichier à télécharger")
|
|
||||||
for file in self.files.values():
|
|
||||||
if not file.downloaded and file not in self.tasks:
|
|
||||||
self.logger.debug(f"picking file {file}")
|
|
||||||
return file
|
|
||||||
self.logger.debug("No file found to download, waiting for tasks to finish...")
|
|
||||||
return None
|
|
||||||
@@ -1,12 +1,9 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from PySide6.QtCore import QObject, Slot, Signal, QTimer, Property
|
from PySide6.QtCore import QObject, Slot, Signal, QTimer, Property
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from PySide6.QtWidgets import QFileDialog
|
from PySide6.QtWidgets import QFileDialog
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
from src.datatypes import FileType
|
from src.datatypes import FileType
|
||||||
from src.download import DownloadManager
|
from src.download import DownloadManager
|
||||||
|
|
||||||
@@ -80,7 +77,7 @@ class WebHandler(QObject):
|
|||||||
def set_pause(self, value):
|
def set_pause(self, value):
|
||||||
"""Définit l'état de pause du gestionnaire de téléchargement"""
|
"""Définit l'état de pause du gestionnaire de téléchargement"""
|
||||||
self.logger.info(f"Demande de changement d'état de pause: {value}")
|
self.logger.info(f"Demande de changement d'état de pause: {value}")
|
||||||
asyncio.create_task(self.download_manager.set_pause(value))
|
self.download_manager.set_pause(value)
|
||||||
self.logger.debug("Tâche asynchrone de changement d'état créée")
|
self.logger.debug("Tâche asynchrone de changement d'état créée")
|
||||||
|
|
||||||
@Slot(list)
|
@Slot(list)
|
||||||
|
|||||||
@@ -19,14 +19,14 @@ def configure_logging(debug_mode=False):
|
|||||||
# Créer un formateur commun pour tous les handlers
|
# Créer un formateur commun pour tous les handlers
|
||||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
|
||||||
# Ajouter un handler pour la console uniquement en mode debug
|
|
||||||
if debug_mode:
|
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
handler.setLevel(logging.INFO)
|
handler.setLevel(logging.INFO)
|
||||||
|
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
root.addHandler(handler)
|
root.addHandler(handler)
|
||||||
else:
|
|
||||||
|
# Ajouter un handler pour la console uniquement en mode debug
|
||||||
|
if not debug_mode:
|
||||||
# En mode production, on peut éventuellement configurer un logger vers un fichier
|
# En mode production, on peut éventuellement configurer un logger vers un fichier
|
||||||
# ou simplement ne rien ajouter pour ne pas afficher de logs
|
# ou simplement ne rien ajouter pour ne pas afficher de logs
|
||||||
log_dir = Path(QStandardPaths.writableLocation(QStandardPaths.StandardLocation.AppConfigLocation)) / "logs"
|
log_dir = Path(QStandardPaths.writableLocation(QStandardPaths.StandardLocation.AppConfigLocation)) / "logs"
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
import asyncio
|
|
||||||
|
|
||||||
from PySide6.QtNetwork import QNetworkCookie
|
from PySide6.QtNetwork import QNetworkCookie
|
||||||
|
|
||||||
import time
|
import time
|
||||||
@@ -8,6 +6,7 @@ import pickle
|
|||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def cookie_qt_to_py(cookie: QNetworkCookie):
|
def cookie_qt_to_py(cookie: QNetworkCookie):
|
||||||
return Cookie(
|
return Cookie(
|
||||||
version=0,
|
version=0,
|
||||||
@@ -94,9 +93,3 @@ class RestrictedUnpickler(pickle.Unpickler):
|
|||||||
|
|
||||||
# Si non autorisé, lever une exception avec message détaillé
|
# Si non autorisé, lever une exception avec message détaillé
|
||||||
raise pickle.UnpicklingError(f"Accès refusé à la classe {module}.{name} pour des raisons de sécurité")
|
raise pickle.UnpicklingError(f"Accès refusé à la classe {module}.{name} pour des raisons de sécurité")
|
||||||
|
|
||||||
|
|
||||||
async def aexec_in(secs, func):
|
|
||||||
await asyncio.sleep(secs)
|
|
||||||
return await func()
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
import asyncio
|
from PySide6.QtCore import QTimer
|
||||||
|
from PySide6.QtGui import QIcon
|
||||||
|
from PySide6.QtWidgets import QMainWindow
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from PySide6.QtCore import QTimer
|
|
||||||
from PySide6.QtGui import QIcon
|
|
||||||
from PySide6.QtWidgets import QMainWindow
|
|
||||||
|
|
||||||
from src.conf import ConfManager
|
from src.conf import ConfManager
|
||||||
from src.download import DownloadManager
|
from src.download import DownloadManager
|
||||||
from src.handler import WebHandler
|
from src.handler import WebHandler
|
||||||
@@ -42,20 +41,11 @@ class MainWindow(QMainWindow):
|
|||||||
self.setCentralWidget(self.site_window)
|
self.setCentralWidget(self.site_window)
|
||||||
|
|
||||||
# connection des signaux
|
# connection des signaux
|
||||||
self.site_window.on_cookie_added.connect(lambda cookie: asyncio.ensure_future(self.download_manager.add_cookie(cookie)))
|
self.site_window.on_cookie_added.connect(self.download_manager.add_cookie)
|
||||||
self.logger.debug("Signaux connectés")
|
self.logger.debug("Signaux connectés")
|
||||||
|
|
||||||
# initialisation du gestionnaire de téléchargement
|
|
||||||
self.dm_loop = None
|
|
||||||
QTimer.singleShot(0, self.setup_async_tasks)
|
|
||||||
|
|
||||||
self.logger.info("Fenêtre principale initialisée avec succès")
|
self.logger.info("Fenêtre principale initialisée avec succès")
|
||||||
|
|
||||||
def setup_async_tasks(self):
|
|
||||||
# Lancer les tâches asyncio une fois que l'application est prête
|
|
||||||
self.logger.debug("Configuration des tâches asynchrones")
|
|
||||||
self.dm_loop = asyncio.ensure_future(self.download_manager.loop_queue())
|
|
||||||
self.logger.debug("File d'attente de téléchargement démarrée")
|
|
||||||
|
|
||||||
def handle_files(self, file_paths):
|
def handle_files(self, file_paths):
|
||||||
"""
|
"""
|
||||||
@@ -89,3 +79,11 @@ class MainWindow(QMainWindow):
|
|||||||
self.logger.error(f"Erreur lors de la lecture ou de l'encodage du fichier {file_path}: {e}")
|
self.logger.error(f"Erreur lors de la lecture ou de l'encodage du fichier {file_path}: {e}")
|
||||||
else:
|
else:
|
||||||
self.logger.warning(f"Le fichier {file_path} n'existe pas")
|
self.logger.warning(f"Le fichier {file_path} n'existe pas")
|
||||||
|
|
||||||
|
def closeEvent(self, event, /):
|
||||||
|
self.download_manager.pause = True
|
||||||
|
self.download_manager.stop = True
|
||||||
|
|
||||||
|
self.download_manager.close_thread_workers()
|
||||||
|
|
||||||
|
event.accept()
|
||||||
|
|||||||
@@ -1,19 +1,13 @@
|
|||||||
import os
|
from PySide6.QtWebEngineCore import QWebEngineSettings, QWebEngineProfile, QWebEnginePage
|
||||||
from pathlib import Path
|
|
||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from PySide6.QtWebEngineCore import QWebEngineSettings, QWebEngineProfile, QWebEnginePage, QWebEngineCookieStore
|
|
||||||
from PySide6.QtWidgets import QWidget
|
|
||||||
from PySide6.QtWebEngineWidgets import QWebEngineView
|
from PySide6.QtWebEngineWidgets import QWebEngineView
|
||||||
from PySide6.QtWebChannel import QWebChannel
|
from PySide6.QtWebChannel import QWebChannel
|
||||||
from PySide6.QtCore import Slot, QFile, QIODevice, Signal, Qt, QStandardPaths, QTimer, QDateTime
|
from PySide6.QtCore import Slot, QFile, QIODevice, Signal, Qt, QStandardPaths, QTimer, QDateTime
|
||||||
from PySide6.QtNetwork import QNetworkCookie
|
from PySide6.QtNetwork import QNetworkCookie
|
||||||
|
|
||||||
import sys
|
import os
|
||||||
|
from pathlib import Path
|
||||||
from src.conf import ConfManager
|
import sqlite3
|
||||||
from src.handler import WebHandler
|
import logging
|
||||||
|
|
||||||
|
|
||||||
# https://gitea.devpanel.fr/oxpanel/app/src/branch/master/window/site.py
|
# https://gitea.devpanel.fr/oxpanel/app/src/branch/master/window/site.py
|
||||||
|
|||||||
Reference in New Issue
Block a user